blob: d074826b1755935c183dcbcb6602b07f6161059f [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "VirtualCameraSession"
19#include "VirtualCameraSession.h"
20
Jan Sebechlebsky39129f82024-01-19 16:42:11 +010021#include <algorithm>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010022#include <atomic>
23#include <chrono>
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010024#include <cmath>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010025#include <cstddef>
26#include <cstdint>
27#include <cstring>
28#include <map>
29#include <memory>
30#include <mutex>
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010031#include <numeric>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include <optional>
33#include <tuple>
34#include <unordered_set>
35#include <utility>
36#include <vector>
37
38#include "CameraMetadata.h"
39#include "EGL/egl.h"
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +010040#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010041#include "VirtualCameraRenderThread.h"
42#include "VirtualCameraStream.h"
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010043#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010044#include "aidl/android/hardware/camera/common/Status.h"
45#include "aidl/android/hardware/camera/device/BufferCache.h"
46#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010047#include "aidl/android/hardware/camera/device/CameraMetadata.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010048#include "aidl/android/hardware/camera/device/CaptureRequest.h"
49#include "aidl/android/hardware/camera/device/HalStream.h"
50#include "aidl/android/hardware/camera/device/NotifyMsg.h"
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010051#include "aidl/android/hardware/camera/device/RequestTemplate.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010052#include "aidl/android/hardware/camera/device/ShutterMsg.h"
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010053#include "aidl/android/hardware/camera/device/Stream.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "aidl/android/hardware/camera/device/StreamBuffer.h"
55#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
56#include "aidl/android/hardware/camera/device/StreamRotation.h"
57#include "aidl/android/hardware/graphics/common/BufferUsage.h"
58#include "aidl/android/hardware/graphics/common/PixelFormat.h"
59#include "android/hardware_buffer.h"
60#include "android/native_window_aidl.h"
61#include "fmq/AidlMessageQueue.h"
62#include "system/camera_metadata.h"
63#include "ui/GraphicBuffer.h"
64#include "util/EglDisplayContext.h"
65#include "util/EglFramebuffer.h"
66#include "util/EglProgram.h"
67#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010068#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010069#include "util/Util.h"
70
71namespace android {
72namespace companion {
73namespace virtualcamera {
74
75using ::aidl::android::companion::virtualcamera::Format;
76using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010077using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010078using ::aidl::android::hardware::camera::common::Status;
79using ::aidl::android::hardware::camera::device::BufferCache;
80using ::aidl::android::hardware::camera::device::CameraMetadata;
81using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
82using ::aidl::android::hardware::camera::device::CaptureRequest;
83using ::aidl::android::hardware::camera::device::HalStream;
84using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
85using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
86using ::aidl::android::hardware::camera::device::RequestTemplate;
87using ::aidl::android::hardware::camera::device::Stream;
88using ::aidl::android::hardware::camera::device::StreamBuffer;
89using ::aidl::android::hardware::camera::device::StreamConfiguration;
90using ::aidl::android::hardware::camera::device::StreamRotation;
91using ::aidl::android::hardware::common::fmq::MQDescriptor;
92using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
93using ::aidl::android::hardware::graphics::common::BufferUsage;
94using ::aidl::android::hardware::graphics::common::PixelFormat;
95using ::android::base::unique_fd;
96
97namespace {
98
99using metadata_ptr =
100 std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
101
102using namespace std::chrono_literals;
103
104// Size of request/result metadata fast message queue.
105// Setting to 0 to always disables FMQ.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100106constexpr size_t kMetadataMsgQueueSize = 0;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100107
108// Maximum number of buffers to use per single stream.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100109constexpr size_t kMaxStreamBuffers = 2;
110
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100111// Thumbnail size (0,0) correspods to disabling thumbnail.
112const Resolution kDefaultJpegThumbnailSize(0, 0);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100113
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100114camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
115 const RequestTemplate type) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100116 switch (type) {
117 case RequestTemplate::PREVIEW:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100118 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100119 case RequestTemplate::STILL_CAPTURE:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100120 return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100121 case RequestTemplate::VIDEO_RECORD:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100122 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100123 case RequestTemplate::VIDEO_SNAPSHOT:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100124 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100125 default:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100126 // Return PREVIEW by default
127 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100128 }
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100129}
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100130
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100131int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
132 return std::transform_reduce(
133 configs.begin(), configs.end(), 0,
134 [](const int a, const int b) { return std::max(a, b); },
135 [](const SupportedStreamConfiguration& config) { return config.maxFps; });
136}
137
138CameraMetadata createDefaultRequestSettings(
139 const RequestTemplate type,
140 const std::vector<SupportedStreamConfiguration>& inputConfigs) {
141 int maxFps = getMaxFps(inputConfigs);
142 auto metadata =
143 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100144 .setAberrationCorrectionMode(
145 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100146 .setControlCaptureIntent(requestTemplateToIntent(type))
147 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
148 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
149 .setControlAeExposureCompensation(0)
Vadim Caen11dfd932024-03-05 09:57:20 +0100150 .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100151 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
152 .setControlAePrecaptureTrigger(
153 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
154 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100155 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100156 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100157 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100158 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
159 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100160 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100161 .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
162 .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
163 .setJpegThumbnailSize(0, 0)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100164 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100165 .build();
166 if (metadata == nullptr) {
167 ALOGE("%s: Failed to construct metadata for default request type %s",
168 __func__, toString(type).c_str());
169 return CameraMetadata();
170 } else {
171 ALOGV("%s: Successfully created metadata for request type %s", __func__,
172 toString(type).c_str());
173 }
174 return *metadata;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100175}
176
177HalStream getHalStream(const Stream& stream) {
178 HalStream halStream;
179 halStream.id = stream.id;
180 halStream.physicalCameraId = stream.physicalCameraId;
181 halStream.maxBuffers = kMaxStreamBuffers;
182
183 if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
184 // If format is implementation defined we need it to override
185 // it with actual format.
186 // TODO(b/301023410) Override with the format based on the
187 // camera configuration, once we support more formats.
188 halStream.overrideFormat = PixelFormat::YCBCR_420_888;
189 } else {
190 halStream.overrideFormat = stream.format;
191 }
192 halStream.overrideDataSpace = stream.dataSpace;
193
Vadim Caen22a11362024-11-15 09:56:04 +0100194 halStream.producerUsage = static_cast<BufferUsage>(
195 static_cast<int64_t>(stream.usage) |
196 static_cast<int64_t>(BufferUsage::CAMERA_OUTPUT) |
197 static_cast<int64_t>(BufferUsage::GPU_RENDER_TARGET));
198
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100199 halStream.supportOffline = false;
200 return halStream;
201}
202
Jan Sebechlebsky39129f82024-01-19 16:42:11 +0100203Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
204 return *(std::max_element(streams.begin(), streams.end(),
205 [](const Stream& a, const Stream& b) {
206 return a.width * a.height < b.width * b.height;
207 }));
208}
209
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100210Resolution resolutionFromStream(const Stream& stream) {
211 return Resolution(stream.width, stream.height);
212}
213
214Resolution resolutionFromInputConfig(
215 const SupportedStreamConfiguration& inputConfig) {
216 return Resolution(inputConfig.width, inputConfig.height);
217}
218
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200219std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
220 Resolution res{0, 0};
221 if (surface == nullptr) {
222 ALOGE("%s: Cannot get resolution from null surface", __func__);
223 return std::nullopt;
224 }
225
226 int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
227 if (status != NO_ERROR) {
228 ALOGE("%s: Failed to get width from surface", __func__);
229 return std::nullopt;
230 }
231
232 status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
233 if (status != NO_ERROR) {
234 ALOGE("%s: Failed to get height from surface", __func__);
235 return std::nullopt;
236 }
237 return res;
238}
239
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100240std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
241 const std::vector<Stream>& requestedStreams,
242 const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
243 Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
244 Resolution maxResolution = resolutionFromStream(maxResolutionStream);
245
246 // Find best fitting stream to satisfy all requested streams:
247 // Best fitting => same or higher resolution as input with lowest pixel count
248 // difference and same aspect ratio.
249 auto isBetterInputConfig = [maxResolution](
250 const SupportedStreamConfiguration& configA,
251 const SupportedStreamConfiguration& configB) {
252 int maxResPixelCount = maxResolution.width * maxResolution.height;
253 int pixelCountDiffA =
254 std::abs((configA.width * configA.height) - maxResPixelCount);
255 int pixelCountDiffB =
256 std::abs((configB.width * configB.height) - maxResPixelCount);
257
258 return pixelCountDiffA < pixelCountDiffB;
259 };
260
261 std::optional<SupportedStreamConfiguration> bestConfig;
262 for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
263 Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
264 if (inputConfigResolution < maxResolution ||
265 !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
266 // We don't want to upscale from lower resolution, or use different aspect
267 // ratio, skip.
268 continue;
269 }
270
271 if (!bestConfig.has_value() ||
272 isBetterInputConfig(inputConfig, bestConfig.value())) {
273 bestConfig = inputConfig;
274 }
275 }
276
277 return bestConfig;
278}
279
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100280RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
281 return RequestSettings{
282 .jpegQuality = getJpegQuality(metadata).value_or(
283 VirtualCameraDevice::kDefaultJpegQuality),
Vadim Caenc0aff132024-03-12 17:20:07 +0100284 .jpegOrientation = getJpegOrientation(metadata),
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100285 .thumbnailResolution =
286 getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
287 .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
Vadim Caen11dfd932024-03-05 09:57:20 +0100288 VirtualCameraDevice::kDefaultJpegQuality),
289 .fpsRange = getFpsRange(metadata),
290 .captureIntent = getCaptureIntent(metadata).value_or(
Vadim Caenc0aff132024-03-12 17:20:07 +0100291 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
Vadim Caen6a43beb2024-04-12 15:06:42 +0200292 .gpsCoordinates = getGpsCoordinates(metadata),
293 .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100294}
295
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100296} // namespace
297
298VirtualCameraSession::VirtualCameraSession(
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100299 std::shared_ptr<VirtualCameraDevice> cameraDevice,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100300 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
301 std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +0100302 : mCameraDevice(cameraDevice),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100303 mCameraDeviceCallback(cameraDeviceCallback),
304 mVirtualCameraClientCallback(virtualCameraClientCallback) {
305 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
306 kMetadataMsgQueueSize, false /* non blocking */);
307 if (!mRequestMetadataQueue->isValid()) {
308 ALOGE("%s: invalid request fmq", __func__);
309 }
310
311 mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
312 kMetadataMsgQueueSize, false /* non blocking */);
313 if (!mResultMetadataQueue->isValid()) {
314 ALOGE("%s: invalid result fmq", __func__);
315 }
316}
317
318ndk::ScopedAStatus VirtualCameraSession::close() {
319 ALOGV("%s", __func__);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100320 {
321 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200322
323 if (mVirtualCameraClientCallback != nullptr) {
324 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
325 }
326
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100327 if (mRenderThread != nullptr) {
328 mRenderThread->stop();
329 mRenderThread = nullptr;
330 }
331 }
332
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100333 mSessionContext.closeAllStreams();
334 return ndk::ScopedAStatus::ok();
335}
336
337ndk::ScopedAStatus VirtualCameraSession::configureStreams(
338 const StreamConfiguration& in_requestedConfiguration,
339 std::vector<HalStream>* _aidl_return) {
340 ALOGV("%s: requestedConfiguration: %s", __func__,
341 in_requestedConfiguration.toString().c_str());
342
343 if (_aidl_return == nullptr) {
344 return cameraStatus(Status::ILLEGAL_ARGUMENT);
345 }
346
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100347 std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock();
348 if (virtualCamera == nullptr) {
349 ALOGW("%s: configure called on already unregistered camera", __func__);
350 return cameraStatus(Status::CAMERA_DISCONNECTED);
351 }
352
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100353 mSessionContext.removeStreamsNotInStreamConfiguration(
354 in_requestedConfiguration);
355
356 auto& streams = in_requestedConfiguration.streams;
357 auto& halStreams = *_aidl_return;
358 halStreams.clear();
359 halStreams.resize(in_requestedConfiguration.streams.size());
360
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100361 if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +0100362 ALOGE("%s: Requested stream configuration is not supported", __func__);
363 return cameraStatus(Status::ILLEGAL_ARGUMENT);
364 }
365
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100366 sp<Surface> inputSurface = nullptr;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200367 int inputStreamId = -1;
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100368 std::optional<SupportedStreamConfiguration> inputConfig;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100369 {
370 std::lock_guard<std::mutex> lock(mLock);
371 for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100372 halStreams[i] = getHalStream(streams[i]);
373 if (mSessionContext.initializeStream(streams[i])) {
374 ALOGV("Configured new stream: %s", streams[i].toString().c_str());
375 }
376 }
377
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100378 inputConfig = pickInputConfigurationForStreams(
379 streams, virtualCamera->getInputConfigs());
380 if (!inputConfig.has_value()) {
381 ALOGE(
382 "%s: Failed to pick any input configuration for stream configuration "
383 "request: %s",
384 __func__, in_requestedConfiguration.toString().c_str());
385 return cameraStatus(Status::ILLEGAL_ARGUMENT);
386 }
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200387
388 if (mRenderThread != nullptr) {
389 // If there's already a render thread, it means this is not a first
390 // configuration call. If the surface has the same resolution and pixel
391 // format as the picked config, we don't need to do anything, the current
392 // render thread is capable of serving new set of configuration. However
393 // if it differens, we need to discard the current surface and
394 // reinitialize the render thread.
395
396 std::optional<Resolution> currentInputResolution =
397 resolutionFromSurface(mRenderThread->getInputSurface());
398 if (currentInputResolution.has_value() &&
399 *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
400 ALOGI(
401 "%s: Newly configured set of streams matches existing client "
402 "surface (%dx%d)",
403 __func__, currentInputResolution->width,
404 currentInputResolution->height);
405 return ndk::ScopedAStatus::ok();
406 }
407
408 if (mVirtualCameraClientCallback != nullptr) {
409 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
410 }
411
412 ALOGV(
413 "%s: Newly requested output streams are not suitable for "
414 "pre-existing surface (%dx%d), creating new surface (%dx%d)",
415 __func__, currentInputResolution->width,
416 currentInputResolution->height, inputConfig->width,
417 inputConfig->height);
418
419 mRenderThread->flush();
420 mRenderThread->stop();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100421 }
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200422
423 mRenderThread = std::make_unique<VirtualCameraRenderThread>(
424 mSessionContext, resolutionFromInputConfig(*inputConfig),
425 virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
426 mRenderThread->start();
427 inputSurface = mRenderThread->getInputSurface();
428 inputStreamId = mCurrentInputStreamId =
429 virtualCamera->allocateInputStreamId();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100430 }
431
432 if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
433 // TODO(b/301023410) Pass streamId based on client input stream id once
434 // support for multiple input streams is implemented. For now we always
435 // create single texture.
436 mVirtualCameraClientCallback->onStreamConfigured(
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200437 inputStreamId, aidl::android::view::Surface(inputSurface.get()),
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100438 inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100439 }
440
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100441 return ndk::ScopedAStatus::ok();
442}
443
444ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
445 RequestTemplate in_type, CameraMetadata* _aidl_return) {
446 ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
447
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100448 std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
449 if (camera == nullptr) {
450 ALOGW(
451 "%s: constructDefaultRequestSettings called on already unregistered "
452 "camera",
453 __func__);
454 return cameraStatus(Status::CAMERA_DISCONNECTED);
455 }
456
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100457 switch (in_type) {
458 case RequestTemplate::PREVIEW:
459 case RequestTemplate::STILL_CAPTURE:
Jan Sebechlebskyb0119fa2023-12-04 10:29:06 +0100460 case RequestTemplate::VIDEO_RECORD:
461 case RequestTemplate::VIDEO_SNAPSHOT: {
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100462 *_aidl_return =
463 createDefaultRequestSettings(in_type, camera->getInputConfigs());
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100464 return ndk::ScopedAStatus::ok();
465 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100466 case RequestTemplate::MANUAL:
467 case RequestTemplate::ZERO_SHUTTER_LAG:
468 // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
469 return ndk::ScopedAStatus::fromServiceSpecificError(
470 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
471 ;
472 default:
473 ALOGE("%s: unknown request template type %d", __FUNCTION__,
474 static_cast<int>(in_type));
475 return ndk::ScopedAStatus::fromServiceSpecificError(
476 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
477 ;
478 }
479}
480
481ndk::ScopedAStatus VirtualCameraSession::flush() {
482 ALOGV("%s", __func__);
483 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100484 if (mRenderThread != nullptr) {
485 mRenderThread->flush();
486 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100487 return ndk::ScopedAStatus::ok();
488}
489
490ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
491 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
492 ALOGV("%s", __func__);
493 *_aidl_return = mRequestMetadataQueue->dupeDesc();
494 return ndk::ScopedAStatus::ok();
495}
496
497ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
498 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
499 ALOGV("%s", __func__);
500 *_aidl_return = mResultMetadataQueue->dupeDesc();
501 return ndk::ScopedAStatus::ok();
502}
503
504ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
505 const CameraMetadata& in_oldSessionParams,
506 const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
507 ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
508 in_newSessionParams.toString().c_str(),
509 in_oldSessionParams.toString().c_str());
510
511 if (_aidl_return == nullptr) {
512 return ndk::ScopedAStatus::fromServiceSpecificError(
513 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
514 }
515
516 *_aidl_return = true;
517 return ndk::ScopedAStatus::ok();
518}
519
520ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
521 const std::vector<CaptureRequest>& in_requests,
522 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
523 ALOGV("%s", __func__);
524
525 if (!in_cachesToRemove.empty()) {
526 mSessionContext.removeBufferCaches(in_cachesToRemove);
527 }
528
529 for (const auto& captureRequest : in_requests) {
530 auto status = processCaptureRequest(captureRequest);
531 if (!status.isOk()) {
532 return status;
533 }
534 }
535 *_aidl_return = in_requests.size();
536 return ndk::ScopedAStatus::ok();
537}
538
539ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
540 const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
541 ALOGV("%s", __func__);
542
543 (void)in_streamIds;
544 (void)in_streamConfigCounter;
545 return ndk::ScopedAStatus::ok();
546}
547
548ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
549 const std::vector<int32_t>& in_streamsToKeep,
550 CameraOfflineSessionInfo* out_offlineSessionInfo,
551 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
552 ALOGV("%s", __func__);
553
554 (void)in_streamsToKeep;
555 (void)out_offlineSessionInfo;
556
557 if (_aidl_return == nullptr) {
558 return ndk::ScopedAStatus::fromServiceSpecificError(
559 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
560 }
561
562 *_aidl_return = nullptr;
563 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
564}
565
566ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
567 int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
568 ALOGV("%s", __func__);
569 (void)in_frameNumber;
570 (void)in_streamIds;
571 return ndk::ScopedAStatus::ok();
572}
573
574std::set<int> VirtualCameraSession::getStreamIds() const {
575 return mSessionContext.getStreamIds();
576}
577
578ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
579 const CaptureRequest& request) {
Vadim Caen324fcfb2024-03-21 16:49:08 +0100580 ALOGV("%s: request: %s", __func__, request.toString().c_str());
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100581
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100582 std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100583 RequestSettings requestSettings;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200584 int currentInputStreamId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100585 {
586 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100587
588 // If metadata it empty, last received metadata applies, if it's non-empty
589 // update it.
590 if (!request.settings.metadata.empty()) {
591 mCurrentRequestMetadata = request.settings;
592 }
593
594 // We don't have any metadata for this request - this means we received none
595 // in first request, this is an error state.
596 if (mCurrentRequestMetadata.metadata.empty()) {
597 return cameraStatus(Status::ILLEGAL_ARGUMENT);
598 }
599
600 requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
601
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100602 cameraCallback = mCameraDeviceCallback;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200603 currentInputStreamId = mCurrentInputStreamId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100604 }
605
606 if (cameraCallback == nullptr) {
607 ALOGE(
608 "%s: processCaptureRequest called, but there's no camera callback "
609 "configured",
610 __func__);
611 return cameraStatus(Status::INTERNAL_ERROR);
612 }
613
614 if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
615 ALOGE("Failed to import buffers from capture request.");
616 return cameraStatus(Status::INTERNAL_ERROR);
617 }
618
619 std::vector<CaptureRequestBuffer> taskBuffers;
620 taskBuffers.reserve(request.outputBuffers.size());
621 for (const StreamBuffer& streamBuffer : request.outputBuffers) {
622 taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
623 importFence(streamBuffer.acquireFence));
624 }
625
626 {
627 std::lock_guard<std::mutex> lock(mLock);
628 if (mRenderThread == nullptr) {
629 ALOGE(
630 "%s: processCaptureRequest (frameNumber %d)called before configure "
631 "(render thread not initialized)",
632 __func__, request.frameNumber);
633 return cameraStatus(Status::INTERNAL_ERROR);
634 }
635 mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100636 request.frameNumber, taskBuffers, requestSettings));
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100637 }
638
639 if (mVirtualCameraClientCallback != nullptr) {
640 auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200641 currentInputStreamId, request.frameNumber);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100642 if (!status.isOk()) {
643 ALOGE(
644 "Failed to invoke onProcessCaptureRequest client callback for frame "
645 "%d",
646 request.frameNumber);
647 }
648 }
649
650 return ndk::ScopedAStatus::ok();
651}
652
653} // namespace virtualcamera
654} // namespace companion
655} // namespace android