Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2023 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | // #define LOG_NDEBUG 0 |
| 18 | #define LOG_TAG "VirtualCameraSession" |
| 19 | #include "VirtualCameraSession.h" |
| 20 | |
Jan Sebechlebsky | 39129f8 | 2024-01-19 16:42:11 +0100 | [diff] [blame^] | 21 | #include <algorithm> |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 22 | #include <atomic> |
| 23 | #include <chrono> |
| 24 | #include <cstddef> |
| 25 | #include <cstdint> |
| 26 | #include <cstring> |
| 27 | #include <map> |
| 28 | #include <memory> |
| 29 | #include <mutex> |
| 30 | #include <optional> |
| 31 | #include <tuple> |
| 32 | #include <unordered_set> |
| 33 | #include <utility> |
| 34 | #include <vector> |
| 35 | |
| 36 | #include "CameraMetadata.h" |
| 37 | #include "EGL/egl.h" |
Jan Sebechlebsky | 3b478c4 | 2023-11-23 13:15:56 +0100 | [diff] [blame] | 38 | #include "VirtualCameraDevice.h" |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 39 | #include "VirtualCameraRenderThread.h" |
| 40 | #include "VirtualCameraStream.h" |
| 41 | #include "aidl/android/hardware/camera/common/Status.h" |
| 42 | #include "aidl/android/hardware/camera/device/BufferCache.h" |
| 43 | #include "aidl/android/hardware/camera/device/BufferStatus.h" |
| 44 | #include "aidl/android/hardware/camera/device/CaptureRequest.h" |
| 45 | #include "aidl/android/hardware/camera/device/HalStream.h" |
| 46 | #include "aidl/android/hardware/camera/device/NotifyMsg.h" |
| 47 | #include "aidl/android/hardware/camera/device/ShutterMsg.h" |
| 48 | #include "aidl/android/hardware/camera/device/StreamBuffer.h" |
| 49 | #include "aidl/android/hardware/camera/device/StreamConfiguration.h" |
| 50 | #include "aidl/android/hardware/camera/device/StreamRotation.h" |
| 51 | #include "aidl/android/hardware/graphics/common/BufferUsage.h" |
| 52 | #include "aidl/android/hardware/graphics/common/PixelFormat.h" |
| 53 | #include "android/hardware_buffer.h" |
| 54 | #include "android/native_window_aidl.h" |
| 55 | #include "fmq/AidlMessageQueue.h" |
| 56 | #include "system/camera_metadata.h" |
| 57 | #include "ui/GraphicBuffer.h" |
| 58 | #include "util/EglDisplayContext.h" |
| 59 | #include "util/EglFramebuffer.h" |
| 60 | #include "util/EglProgram.h" |
| 61 | #include "util/JpegUtil.h" |
| 62 | #include "util/MetadataBuilder.h" |
| 63 | #include "util/TestPatternHelper.h" |
| 64 | #include "util/Util.h" |
| 65 | |
| 66 | namespace android { |
| 67 | namespace companion { |
| 68 | namespace virtualcamera { |
| 69 | |
| 70 | using ::aidl::android::companion::virtualcamera::Format; |
| 71 | using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback; |
| 72 | using ::aidl::android::hardware::camera::common::Status; |
| 73 | using ::aidl::android::hardware::camera::device::BufferCache; |
| 74 | using ::aidl::android::hardware::camera::device::CameraMetadata; |
| 75 | using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo; |
| 76 | using ::aidl::android::hardware::camera::device::CaptureRequest; |
| 77 | using ::aidl::android::hardware::camera::device::HalStream; |
| 78 | using ::aidl::android::hardware::camera::device::ICameraDeviceCallback; |
| 79 | using ::aidl::android::hardware::camera::device::ICameraOfflineSession; |
| 80 | using ::aidl::android::hardware::camera::device::RequestTemplate; |
| 81 | using ::aidl::android::hardware::camera::device::Stream; |
| 82 | using ::aidl::android::hardware::camera::device::StreamBuffer; |
| 83 | using ::aidl::android::hardware::camera::device::StreamConfiguration; |
| 84 | using ::aidl::android::hardware::camera::device::StreamRotation; |
| 85 | using ::aidl::android::hardware::common::fmq::MQDescriptor; |
| 86 | using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite; |
| 87 | using ::aidl::android::hardware::graphics::common::BufferUsage; |
| 88 | using ::aidl::android::hardware::graphics::common::PixelFormat; |
| 89 | using ::android::base::unique_fd; |
| 90 | |
| 91 | namespace { |
| 92 | |
| 93 | using metadata_ptr = |
| 94 | std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>; |
| 95 | |
| 96 | using namespace std::chrono_literals; |
| 97 | |
| 98 | // Size of request/result metadata fast message queue. |
| 99 | // Setting to 0 to always disables FMQ. |
| 100 | static constexpr size_t kMetadataMsgQueueSize = 0; |
| 101 | |
| 102 | // Maximum number of buffers to use per single stream. |
| 103 | static constexpr size_t kMaxStreamBuffers = 2; |
| 104 | |
| 105 | CameraMetadata createDefaultRequestSettings(RequestTemplate type) { |
| 106 | hardware::camera::common::V1_0::helper::CameraMetadata metadataHelper; |
| 107 | |
| 108 | camera_metadata_enum_android_control_capture_intent_t intent = |
| 109 | ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; |
| 110 | switch (type) { |
| 111 | case RequestTemplate::PREVIEW: |
| 112 | intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; |
| 113 | break; |
| 114 | case RequestTemplate::STILL_CAPTURE: |
| 115 | intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; |
| 116 | break; |
| 117 | case RequestTemplate::VIDEO_RECORD: |
| 118 | intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; |
| 119 | break; |
| 120 | case RequestTemplate::VIDEO_SNAPSHOT: |
| 121 | intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; |
| 122 | break; |
| 123 | default: |
| 124 | // Leave default. |
| 125 | break; |
| 126 | } |
| 127 | |
| 128 | auto metadata = MetadataBuilder().setControlCaptureIntent(intent).build(); |
| 129 | return (metadata != nullptr) ? std::move(*metadata) : CameraMetadata(); |
| 130 | } |
| 131 | |
| 132 | HalStream getHalStream(const Stream& stream) { |
| 133 | HalStream halStream; |
| 134 | halStream.id = stream.id; |
| 135 | halStream.physicalCameraId = stream.physicalCameraId; |
| 136 | halStream.maxBuffers = kMaxStreamBuffers; |
| 137 | |
| 138 | if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) { |
| 139 | // If format is implementation defined we need it to override |
| 140 | // it with actual format. |
| 141 | // TODO(b/301023410) Override with the format based on the |
| 142 | // camera configuration, once we support more formats. |
| 143 | halStream.overrideFormat = PixelFormat::YCBCR_420_888; |
| 144 | } else { |
| 145 | halStream.overrideFormat = stream.format; |
| 146 | } |
| 147 | halStream.overrideDataSpace = stream.dataSpace; |
| 148 | |
| 149 | halStream.producerUsage = BufferUsage::GPU_RENDER_TARGET; |
| 150 | halStream.supportOffline = false; |
| 151 | return halStream; |
| 152 | } |
| 153 | |
Jan Sebechlebsky | 39129f8 | 2024-01-19 16:42:11 +0100 | [diff] [blame^] | 154 | Stream getHighestResolutionStream(const std::vector<Stream>& streams) { |
| 155 | return *(std::max_element(streams.begin(), streams.end(), |
| 156 | [](const Stream& a, const Stream& b) { |
| 157 | return a.width * a.height < b.width * b.height; |
| 158 | })); |
| 159 | } |
| 160 | |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 161 | } // namespace |
| 162 | |
| 163 | VirtualCameraSession::VirtualCameraSession( |
Jan Sebechlebsky | 0bb5e09 | 2023-12-08 16:17:54 +0100 | [diff] [blame] | 164 | std::shared_ptr<VirtualCameraDevice> cameraDevice, |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 165 | std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, |
| 166 | std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback) |
Jan Sebechlebsky | 3b478c4 | 2023-11-23 13:15:56 +0100 | [diff] [blame] | 167 | : mCameraDevice(cameraDevice), |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 168 | mCameraDeviceCallback(cameraDeviceCallback), |
| 169 | mVirtualCameraClientCallback(virtualCameraClientCallback) { |
| 170 | mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>( |
| 171 | kMetadataMsgQueueSize, false /* non blocking */); |
| 172 | if (!mRequestMetadataQueue->isValid()) { |
| 173 | ALOGE("%s: invalid request fmq", __func__); |
| 174 | } |
| 175 | |
| 176 | mResultMetadataQueue = std::make_shared<ResultMetadataQueue>( |
| 177 | kMetadataMsgQueueSize, false /* non blocking */); |
| 178 | if (!mResultMetadataQueue->isValid()) { |
| 179 | ALOGE("%s: invalid result fmq", __func__); |
| 180 | } |
| 181 | } |
| 182 | |
| 183 | ndk::ScopedAStatus VirtualCameraSession::close() { |
| 184 | ALOGV("%s", __func__); |
| 185 | |
| 186 | if (mVirtualCameraClientCallback != nullptr) { |
| 187 | mVirtualCameraClientCallback->onStreamClosed(/*streamId=*/0); |
| 188 | } |
| 189 | |
Jan Sebechlebsky | b0d8cab | 2023-11-28 10:55:04 +0100 | [diff] [blame] | 190 | { |
| 191 | std::lock_guard<std::mutex> lock(mLock); |
| 192 | if (mRenderThread != nullptr) { |
| 193 | mRenderThread->stop(); |
| 194 | mRenderThread = nullptr; |
| 195 | } |
| 196 | } |
| 197 | |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 198 | mSessionContext.closeAllStreams(); |
| 199 | return ndk::ScopedAStatus::ok(); |
| 200 | } |
| 201 | |
| 202 | ndk::ScopedAStatus VirtualCameraSession::configureStreams( |
| 203 | const StreamConfiguration& in_requestedConfiguration, |
| 204 | std::vector<HalStream>* _aidl_return) { |
| 205 | ALOGV("%s: requestedConfiguration: %s", __func__, |
| 206 | in_requestedConfiguration.toString().c_str()); |
| 207 | |
| 208 | if (_aidl_return == nullptr) { |
| 209 | return cameraStatus(Status::ILLEGAL_ARGUMENT); |
| 210 | } |
| 211 | |
Jan Sebechlebsky | 0bb5e09 | 2023-12-08 16:17:54 +0100 | [diff] [blame] | 212 | std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock(); |
| 213 | if (virtualCamera == nullptr) { |
| 214 | ALOGW("%s: configure called on already unregistered camera", __func__); |
| 215 | return cameraStatus(Status::CAMERA_DISCONNECTED); |
| 216 | } |
| 217 | |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 218 | mSessionContext.removeStreamsNotInStreamConfiguration( |
| 219 | in_requestedConfiguration); |
| 220 | |
| 221 | auto& streams = in_requestedConfiguration.streams; |
| 222 | auto& halStreams = *_aidl_return; |
| 223 | halStreams.clear(); |
| 224 | halStreams.resize(in_requestedConfiguration.streams.size()); |
| 225 | |
| 226 | sp<Surface> inputSurface = nullptr; |
| 227 | int inputWidth; |
| 228 | int inputHeight; |
| 229 | |
Jan Sebechlebsky | 0bb5e09 | 2023-12-08 16:17:54 +0100 | [diff] [blame] | 230 | if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) { |
Jan Sebechlebsky | 3b478c4 | 2023-11-23 13:15:56 +0100 | [diff] [blame] | 231 | ALOGE("%s: Requested stream configuration is not supported", __func__); |
| 232 | return cameraStatus(Status::ILLEGAL_ARGUMENT); |
| 233 | } |
| 234 | |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 235 | { |
| 236 | std::lock_guard<std::mutex> lock(mLock); |
| 237 | for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) { |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 238 | halStreams[i] = getHalStream(streams[i]); |
| 239 | if (mSessionContext.initializeStream(streams[i])) { |
| 240 | ALOGV("Configured new stream: %s", streams[i].toString().c_str()); |
| 241 | } |
| 242 | } |
| 243 | |
Jan Sebechlebsky | 39129f8 | 2024-01-19 16:42:11 +0100 | [diff] [blame^] | 244 | Stream maxResStream = getHighestResolutionStream(streams); |
| 245 | inputWidth = maxResStream.width; |
| 246 | inputHeight = maxResStream.height; |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 247 | if (mRenderThread == nullptr) { |
| 248 | // If there's no client callback, start camera in test mode. |
| 249 | const bool testMode = mVirtualCameraClientCallback == nullptr; |
| 250 | mRenderThread = std::make_unique<VirtualCameraRenderThread>( |
| 251 | mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback, |
| 252 | testMode); |
| 253 | mRenderThread->start(); |
| 254 | inputSurface = mRenderThread->getInputSurface(); |
| 255 | } |
| 256 | } |
| 257 | |
| 258 | if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) { |
| 259 | // TODO(b/301023410) Pass streamId based on client input stream id once |
| 260 | // support for multiple input streams is implemented. For now we always |
| 261 | // create single texture. |
| 262 | mVirtualCameraClientCallback->onStreamConfigured( |
| 263 | /*streamId=*/0, aidl::android::view::Surface(inputSurface.get()), |
| 264 | inputWidth, inputHeight, Format::YUV_420_888); |
| 265 | } |
| 266 | |
| 267 | mFirstRequest.store(true); |
| 268 | return ndk::ScopedAStatus::ok(); |
| 269 | } |
| 270 | |
| 271 | ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings( |
| 272 | RequestTemplate in_type, CameraMetadata* _aidl_return) { |
| 273 | ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type)); |
| 274 | |
| 275 | switch (in_type) { |
| 276 | case RequestTemplate::PREVIEW: |
| 277 | case RequestTemplate::STILL_CAPTURE: |
Jan Sebechlebsky | b0119fa | 2023-12-04 10:29:06 +0100 | [diff] [blame] | 278 | case RequestTemplate::VIDEO_RECORD: |
| 279 | case RequestTemplate::VIDEO_SNAPSHOT: { |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 280 | *_aidl_return = createDefaultRequestSettings(in_type); |
| 281 | return ndk::ScopedAStatus::ok(); |
| 282 | } |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 283 | case RequestTemplate::MANUAL: |
| 284 | case RequestTemplate::ZERO_SHUTTER_LAG: |
| 285 | // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates |
| 286 | return ndk::ScopedAStatus::fromServiceSpecificError( |
| 287 | static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)); |
| 288 | ; |
| 289 | default: |
| 290 | ALOGE("%s: unknown request template type %d", __FUNCTION__, |
| 291 | static_cast<int>(in_type)); |
| 292 | return ndk::ScopedAStatus::fromServiceSpecificError( |
| 293 | static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)); |
| 294 | ; |
| 295 | } |
| 296 | } |
| 297 | |
| 298 | ndk::ScopedAStatus VirtualCameraSession::flush() { |
| 299 | ALOGV("%s", __func__); |
| 300 | std::lock_guard<std::mutex> lock(mLock); |
Jan Sebechlebsky | b0d8cab | 2023-11-28 10:55:04 +0100 | [diff] [blame] | 301 | if (mRenderThread != nullptr) { |
| 302 | mRenderThread->flush(); |
| 303 | } |
Jan Sebechlebsky | 5cb3996 | 2023-11-22 17:33:07 +0100 | [diff] [blame] | 304 | return ndk::ScopedAStatus::ok(); |
| 305 | } |
| 306 | |
| 307 | ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue( |
| 308 | MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) { |
| 309 | ALOGV("%s", __func__); |
| 310 | *_aidl_return = mRequestMetadataQueue->dupeDesc(); |
| 311 | return ndk::ScopedAStatus::ok(); |
| 312 | } |
| 313 | |
| 314 | ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue( |
| 315 | MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) { |
| 316 | ALOGV("%s", __func__); |
| 317 | *_aidl_return = mResultMetadataQueue->dupeDesc(); |
| 318 | return ndk::ScopedAStatus::ok(); |
| 319 | } |
| 320 | |
| 321 | ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired( |
| 322 | const CameraMetadata& in_oldSessionParams, |
| 323 | const CameraMetadata& in_newSessionParams, bool* _aidl_return) { |
| 324 | ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__, |
| 325 | in_newSessionParams.toString().c_str(), |
| 326 | in_oldSessionParams.toString().c_str()); |
| 327 | |
| 328 | if (_aidl_return == nullptr) { |
| 329 | return ndk::ScopedAStatus::fromServiceSpecificError( |
| 330 | static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)); |
| 331 | } |
| 332 | |
| 333 | *_aidl_return = true; |
| 334 | return ndk::ScopedAStatus::ok(); |
| 335 | } |
| 336 | |
| 337 | ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest( |
| 338 | const std::vector<CaptureRequest>& in_requests, |
| 339 | const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) { |
| 340 | ALOGV("%s", __func__); |
| 341 | |
| 342 | if (!in_cachesToRemove.empty()) { |
| 343 | mSessionContext.removeBufferCaches(in_cachesToRemove); |
| 344 | } |
| 345 | |
| 346 | for (const auto& captureRequest : in_requests) { |
| 347 | auto status = processCaptureRequest(captureRequest); |
| 348 | if (!status.isOk()) { |
| 349 | return status; |
| 350 | } |
| 351 | } |
| 352 | *_aidl_return = in_requests.size(); |
| 353 | return ndk::ScopedAStatus::ok(); |
| 354 | } |
| 355 | |
| 356 | ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush( |
| 357 | const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) { |
| 358 | ALOGV("%s", __func__); |
| 359 | |
| 360 | (void)in_streamIds; |
| 361 | (void)in_streamConfigCounter; |
| 362 | return ndk::ScopedAStatus::ok(); |
| 363 | } |
| 364 | |
| 365 | ndk::ScopedAStatus VirtualCameraSession::switchToOffline( |
| 366 | const std::vector<int32_t>& in_streamsToKeep, |
| 367 | CameraOfflineSessionInfo* out_offlineSessionInfo, |
| 368 | std::shared_ptr<ICameraOfflineSession>* _aidl_return) { |
| 369 | ALOGV("%s", __func__); |
| 370 | |
| 371 | (void)in_streamsToKeep; |
| 372 | (void)out_offlineSessionInfo; |
| 373 | |
| 374 | if (_aidl_return == nullptr) { |
| 375 | return ndk::ScopedAStatus::fromServiceSpecificError( |
| 376 | static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)); |
| 377 | } |
| 378 | |
| 379 | *_aidl_return = nullptr; |
| 380 | return cameraStatus(Status::OPERATION_NOT_SUPPORTED); |
| 381 | } |
| 382 | |
| 383 | ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd( |
| 384 | int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) { |
| 385 | ALOGV("%s", __func__); |
| 386 | (void)in_frameNumber; |
| 387 | (void)in_streamIds; |
| 388 | return ndk::ScopedAStatus::ok(); |
| 389 | } |
| 390 | |
| 391 | std::set<int> VirtualCameraSession::getStreamIds() const { |
| 392 | return mSessionContext.getStreamIds(); |
| 393 | } |
| 394 | |
| 395 | ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest( |
| 396 | const CaptureRequest& request) { |
| 397 | ALOGD("%s: request: %s", __func__, request.toString().c_str()); |
| 398 | |
| 399 | if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) { |
| 400 | return cameraStatus(Status::ILLEGAL_ARGUMENT); |
| 401 | } |
| 402 | |
| 403 | std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr; |
| 404 | { |
| 405 | std::lock_guard<std::mutex> lock(mLock); |
| 406 | cameraCallback = mCameraDeviceCallback; |
| 407 | } |
| 408 | |
| 409 | if (cameraCallback == nullptr) { |
| 410 | ALOGE( |
| 411 | "%s: processCaptureRequest called, but there's no camera callback " |
| 412 | "configured", |
| 413 | __func__); |
| 414 | return cameraStatus(Status::INTERNAL_ERROR); |
| 415 | } |
| 416 | |
| 417 | if (!mSessionContext.importBuffersFromCaptureRequest(request)) { |
| 418 | ALOGE("Failed to import buffers from capture request."); |
| 419 | return cameraStatus(Status::INTERNAL_ERROR); |
| 420 | } |
| 421 | |
| 422 | std::vector<CaptureRequestBuffer> taskBuffers; |
| 423 | taskBuffers.reserve(request.outputBuffers.size()); |
| 424 | for (const StreamBuffer& streamBuffer : request.outputBuffers) { |
| 425 | taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId, |
| 426 | importFence(streamBuffer.acquireFence)); |
| 427 | } |
| 428 | |
| 429 | { |
| 430 | std::lock_guard<std::mutex> lock(mLock); |
| 431 | if (mRenderThread == nullptr) { |
| 432 | ALOGE( |
| 433 | "%s: processCaptureRequest (frameNumber %d)called before configure " |
| 434 | "(render thread not initialized)", |
| 435 | __func__, request.frameNumber); |
| 436 | return cameraStatus(Status::INTERNAL_ERROR); |
| 437 | } |
| 438 | mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>( |
| 439 | request.frameNumber, taskBuffers)); |
| 440 | } |
| 441 | |
| 442 | if (mVirtualCameraClientCallback != nullptr) { |
| 443 | auto status = mVirtualCameraClientCallback->onProcessCaptureRequest( |
| 444 | /*streamId=*/0, request.frameNumber); |
| 445 | if (!status.isOk()) { |
| 446 | ALOGE( |
| 447 | "Failed to invoke onProcessCaptureRequest client callback for frame " |
| 448 | "%d", |
| 449 | request.frameNumber); |
| 450 | } |
| 451 | } |
| 452 | |
| 453 | return ndk::ScopedAStatus::ok(); |
| 454 | } |
| 455 | |
| 456 | } // namespace virtualcamera |
| 457 | } // namespace companion |
| 458 | } // namespace android |