Add more characteristics and fix inconsistencies
* Fix bug causing accidental reporting of (0,0) FPS range
* Fix FPS range ordering
* Add missing characteristics / metadata required by CTS
Bug: 301023410
Bug: 322965201
Test: atest CtsCameraTestCases
Test: atest virtual_camera_tests
Change-Id: Ic274a804da09436640970d57fd3b31d42256fa1b
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index fccbbc9..947b355 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -72,6 +72,13 @@
constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
+constexpr int32_t kMinFps = 15;
+
+constexpr std::chrono::nanoseconds kMaxFrameDuration =
+ std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+
+constexpr uint8_t kPipelineMaxDepth = 2;
+
constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
const std::array<PixelFormat, 3> kOutputFormats{
@@ -86,10 +93,20 @@
std::vector<MetadataBuilder::FpsRange> fpsRangesForInputConfig(
const std::vector<SupportedStreamConfiguration>& configs) {
std::set<MetadataBuilder::FpsRange> availableRanges;
+
for (const SupportedStreamConfiguration& config : configs) {
+ availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
}
+ if (std::any_of(configs.begin(), configs.end(),
+ [](const SupportedStreamConfiguration& config) {
+ return config.maxFps >= 30;
+ })) {
+ availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
+ availableRanges.insert({.minFps = 30, .maxFps = 30});
+ }
+
return std::vector<MetadataBuilder::FpsRange>(availableRanges.begin(),
availableRanges.end());
}
@@ -159,11 +176,14 @@
.setSensorTimestampSource(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
.setSensorPhysicalSize(36.0, 24.0)
.setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
+ .setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
.setAvailableMaxDigitalZoom(1.0)
.setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
.setControlAfAvailableModes({ANDROID_CONTROL_AF_MODE_OFF})
.setControlAvailableSceneModes({ANDROID_CONTROL_SCENE_MODE_DISABLED})
.setControlAvailableEffects({ANDROID_CONTROL_EFFECT_MODE_OFF})
+ .setControlAvailableVideoStabilizationModes(
+ {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF})
.setControlAeAvailableModes({ANDROID_CONTROL_AE_MODE_ON})
.setControlAeAvailableAntibandingModes(
{ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO})
@@ -182,10 +202,12 @@
// TODO(b/301023410) Add JPEG Exif + thumbnail support.
.setJpegAvailableThumbnailSizes({Resolution(0, 0)})
.setMaxJpegSize(kMaxJpegSize)
+ .setMaxFrameDuration(kMaxFrameDuration)
.setMaxNumberOutputStreams(
VirtualCameraDevice::kMaxNumberOfRawStreams,
VirtualCameraDevice::kMaxNumberOfProcessedStreams,
VirtualCameraDevice::kMaxNumberOfStallStreams)
+ .setPipelineMaxDepth(kPipelineMaxDepth)
.setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
.setAvailableRequestKeys(
{ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_AE_MODE,
@@ -194,12 +216,17 @@
ANDROID_CONTROL_AE_ANTIBANDING_MODE,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
- ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_FLASH_MODE})
+ ANDROID_SCALER_CROP_REGION, ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE, ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_CONTROL_ZOOM_RATIO, ANDROID_STATISTICS_FACE_DETECT_MODE,
+ ANDROID_FLASH_MODE})
.setAvailableResultKeys(
{ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
- ANDROID_FLASH_STATE, ANDROID_SENSOR_TIMESTAMP,
- ANDROID_LENS_FOCAL_LENGTH})
+ ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
+ ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
+ ANDROID_SENSOR_TIMESTAMP, ANDROID_LENS_FOCAL_LENGTH})
.setAvailableCapabilities(
{ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
@@ -426,6 +453,19 @@
return mSupportedInputConfigurations;
}
+Resolution VirtualCameraDevice::getMaxInputResolution() const {
+ std::optional<Resolution> maxResolution =
+ getMaxResolution(mSupportedInputConfigurations);
+ if (!maxResolution.has_value()) {
+ ALOGE(
+ "%s: Cannot determine sensor size for virtual camera - input "
+ "configurations empty?",
+ __func__);
+ return Resolution(0, 0);
+ }
+ return maxResolution.value();
+}
+
std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
// SharedRefBase which BnCameraDevice inherits from breaks
// std::enable_shared_from_this. This is recommended replacement for
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index fbd9095..720f02e 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -24,6 +24,7 @@
#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "util/Util.h"
namespace android {
namespace companion {
@@ -98,6 +99,9 @@
aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
getInputConfigs() const;
+ // Returns largest supported input resolution.
+ Resolution getMaxInputResolution() const;
+
// Maximal number of RAW streams - virtual camera doesn't support RAW streams.
static const int32_t kMaxNumberOfRawStreams = 0;
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index cd36c6d..7bbc6ea 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -18,7 +18,6 @@
#include "VirtualCameraRenderThread.h"
#include <chrono>
-#include <cstddef>
#include <cstdint>
#include <future>
#include <memory>
@@ -72,17 +71,29 @@
static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
CameraMetadata createCaptureResultMetadata(
- const std::chrono::nanoseconds timestamp) {
+ const std::chrono::nanoseconds timestamp,
+ const Resolution reportedSensorSize) {
std::unique_ptr<CameraMetadata> metadata =
MetadataBuilder()
.setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
.setControlAePrecaptureTrigger(
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
- .setControlAfMode(ANDROID_CONTROL_AF_MODE_AUTO)
+ .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
.setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+ .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+ .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+ .setCropRegion(0, 0, reportedSensorSize.width,
+ reportedSensorSize.height)
+ .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
.setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
.setFocalLength(VirtualCameraDevice::kFocalLength)
+ .setPipelineDepth(kPipelineDepth)
.setSensorTimestamp(timestamp)
.build();
if (metadata == nullptr) {
@@ -180,12 +191,12 @@
}
VirtualCameraRenderThread::VirtualCameraRenderThread(
- VirtualCameraSessionContext& sessionContext, const int mWidth,
- const int mHeight,
+ VirtualCameraSessionContext& sessionContext,
+ const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
: mCameraDeviceCallback(cameraDeviceCallback),
- mInputSurfaceWidth(mWidth),
- mInputSurfaceHeight(mHeight),
+ mInputSurfaceSize(inputSurfaceSize),
+ mReportedSensorSize(reportedSensorSize),
mTestMode(testMode),
mSessionContext(sessionContext) {
}
@@ -273,8 +284,8 @@
std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
EglTextureProgram::TextureFormat::RGBA);
- mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
- mInputSurfaceHeight);
+ mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
+ mInputSurfaceSize.width, mInputSurfaceSize.height);
mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
@@ -297,7 +308,8 @@
captureResult.partialResult = 1;
captureResult.inputBuffer.streamId = -1;
captureResult.physicalCameraMetadata.resize(0);
- captureResult.result = createCaptureResultMetadata(timestamp);
+ captureResult.result =
+ createCaptureResultMetadata(timestamp, mReportedSensorSize);
const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
captureResult.outputBuffers.resize(buffers.size());
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index b3aaed8..c8f61f4 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -22,6 +22,7 @@
#include <memory>
#include <thread>
+#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
#include "android/binder_auto_utils.h"
@@ -77,14 +78,14 @@
// Create VirtualCameraRenderThread instance:
// * sessionContext - VirtualCameraSessionContext reference for shared access
// to mapped buffers.
- // * inputWidth - requested width of input surface ("virtual camera sensor")
- // * inputHeight - requested height of input surface ("virtual camera sensor")
+ // * inputSurfaceSize - requested size of input surface.
+ // * reportedSensorSize - reported static sensor size of virtual camera.
// * cameraDeviceCallback - callback for corresponding camera instance
// * testMode - when set to true, test pattern is rendered to input surface
// before each capture request is processed to simulate client input.
VirtualCameraRenderThread(
- VirtualCameraSessionContext& sessionContext, int inputWidth,
- int inputHeight,
+ VirtualCameraSessionContext& sessionContext, Resolution inputSurfaceSize,
+ Resolution reportedSensorSize,
std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
cameraDeviceCallback,
@@ -149,8 +150,8 @@
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
mCameraDeviceCallback;
- const int mInputSurfaceWidth;
- const int mInputSurfaceHeight;
+ const Resolution mInputSurfaceSize;
+ const Resolution mReportedSensorSize;
const int mTestMode;
VirtualCameraSessionContext& mSessionContext;
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 8449f41..d1ec763 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -144,10 +144,12 @@
.setControlAePrecaptureTrigger(
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
.setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
- .setControlAfMode(ANDROID_CONTROL_AF_MODE_AUTO)
+ .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
.setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+ .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
.setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
.setFlashMode(ANDROID_FLASH_MODE_OFF)
+ .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
.build();
if (metadata == nullptr) {
ALOGE("%s: Failed to construct metadata for default request type %s",
@@ -279,7 +281,8 @@
// If there's no client callback, start camera in test mode.
const bool testMode = mVirtualCameraClientCallback == nullptr;
mRenderThread = std::make_unique<VirtualCameraRenderThread>(
- mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback,
+ mSessionContext, Resolution(inputWidth, inputHeight),
+ virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
testMode);
mRenderThread->start();
inputSurface = mRenderThread->getInputSurface();
diff --git a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
index 5f899b8..ddcb789 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
@@ -33,6 +33,7 @@
#include "android/binder_auto_utils.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
+#include "util/Util.h"
namespace android {
namespace companion {
@@ -62,6 +63,7 @@
constexpr int kInputWidth = 640;
constexpr int kInputHeight = 480;
+const Resolution kInputResolution(kInputWidth, kInputHeight);
Matcher<StreamBuffer> IsStreamBufferWithStatus(const int streamId,
const int bufferId,
@@ -102,7 +104,8 @@
mMockCameraDeviceCallback =
ndk::SharedRefBase::make<MockCameraDeviceCallback>();
mRenderThread = std::make_unique<VirtualCameraRenderThread>(
- *mSessionContext, kInputWidth, kInputHeight, mMockCameraDeviceCallback);
+ *mSessionContext, kInputResolution,
+ /*reportedSensorSize*/ kInputResolution, mMockCameraDeviceCallback);
}
protected:
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataBuilder.cc
index db5a5dd..2bbd58c 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ b/services/camera/virtualcamera/util/MetadataBuilder.cc
@@ -137,6 +137,14 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setAvailableTestPatternModes(
+ const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+ testPatternModes) {
+ mEntryMap[ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES] =
+ convertTo<int32_t>(testPatternModes);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setFaceDetectMode(
const camera_metadata_enum_android_statistics_face_detect_mode_t
faceDetectMode) {
@@ -175,6 +183,21 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlEffectMode(
+ const camera_metadata_enum_android_control_effect_mode_t effectMode) {
+ mEntryMap[ANDROID_CONTROL_EFFECT_MODE] = asVectorOf<uint8_t>(effectMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableVideoStabilizationModes(
+ const std::vector<
+ camera_metadata_enum_android_control_video_stabilization_mode_t>&
+ videoStabilizationModes) {
+ mEntryMap[ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES] =
+ convertTo<uint8_t>(videoStabilizationModes);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
const std::vector<camera_metadata_enum_android_control_af_mode_t>&
availableModes) {
@@ -199,7 +222,7 @@
MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRanges(
const std::vector<FpsRange>& fpsRanges) {
std::vector<int32_t> ranges;
- ranges.resize(2 * fpsRanges.size());
+ ranges.reserve(2 * fpsRanges.size());
for (const FpsRange fpsRange : fpsRanges) {
ranges.push_back(fpsRange.minFps);
ranges.push_back(fpsRange.maxFps);
@@ -341,11 +364,26 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setCropRegion(const int32_t x, const int32_t y,
+ const int32_t width,
+ const int32_t height) {
+ mEntryMap[ANDROID_SCALER_CROP_REGION] =
+ std::vector<int32_t>({x, y, width, height});
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
mEntryMap[ANDROID_JPEG_MAX_SIZE] = asVectorOf<int32_t>(size);
return *this;
}
+MetadataBuilder& MetadataBuilder::setMaxFrameDuration(
+ const std::chrono::nanoseconds duration) {
+ mEntryMap[ANDROID_SENSOR_INFO_MAX_FRAME_DURATION] =
+ asVectorOf<int64_t>(duration.count());
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setJpegAvailableThumbnailSizes(
const std::vector<Resolution>& thumbnailSizes) {
std::vector<int32_t> sizes;
@@ -372,6 +410,16 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setPipelineMaxDepth(const uint8_t maxDepth) {
+ mEntryMap[ANDROID_REQUEST_PIPELINE_MAX_DEPTH] = asVectorOf<uint8_t>(maxDepth);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineDepth(const uint8_t depth) {
+ mEntryMap[ANDROID_REQUEST_PIPELINE_DEPTH] = asVectorOf<uint8_t>(depth);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setAvailableRequestCapabilities(
const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
requestCapabilities) {
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataBuilder.h
index fdc35fd..df99089 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ b/services/camera/virtualcamera/util/MetadataBuilder.h
@@ -64,7 +64,8 @@
int32_t maxFps;
bool operator<(const FpsRange& other) const {
- return std::tuple(minFps, maxFps) < std::tuple(other.minFps, other.maxFps);
+ return maxFps == other.maxFps ? minFps < other.minFps
+ : maxFps < other.maxFps;
}
};
@@ -129,6 +130,11 @@
const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
faceDetectMode);
+ // See SENSOR_AVAILABLE_TEST_PATTERN_MODES in CameraCharacteristics.java.
+ MetadataBuilder& setAvailableTestPatternModes(
+ const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+ testPatternModes);
+
// See ANDROID_STATISTICS_FACE_DETECT_MODE in CaptureRequest.java.
MetadataBuilder& setFaceDetectMode(
camera_metadata_enum_android_statistics_face_detect_mode_t faceDetectMode);
@@ -161,6 +167,16 @@
const std::vector<camera_metadata_enum_android_control_effect_mode>&
availableEffects);
+ // See CONTROL_EFFECT_MODE in CaptureRequest.java.
+ MetadataBuilder& setControlEffectMode(
+ camera_metadata_enum_android_control_effect_mode_t effectMode);
+
+ // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES
+ MetadataBuilder& setControlAvailableVideoStabilizationModes(
+ const std::vector<
+ camera_metadata_enum_android_control_video_stabilization_mode_t>&
+ videoStabilizationModes);
+
// See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
MetadataBuilder& setControlAeAvailableAntibandingModes(
const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
@@ -242,6 +258,10 @@
MetadataBuilder& setControlAwbRegions(
const std::vector<ControlRegion>& awbRegions);
+ // See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
+ MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
+ int32_t height);
+
// See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
MetadataBuilder& setControlAfRegions(
const std::vector<ControlRegion>& afRegions);
@@ -251,6 +271,9 @@
// See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
MetadataBuilder& setMaxJpegSize(int32_t size);
+ // See SENSOR_INFO_MAX_FRAME_DURATION in CameraCharacteristic.java.
+ MetadataBuilder& setMaxFrameDuration(std::chrono::nanoseconds duration);
+
// See JPEG_AVAILABLE_THUMBNAIL_SIZES in CameraCharacteristic.java.
MetadataBuilder& setJpegAvailableThumbnailSizes(
const std::vector<Resolution>& thumbnailSizes);
@@ -267,6 +290,12 @@
MetadataBuilder& setSyncMaxLatency(
camera_metadata_enum_android_sync_max_latency setSyncMaxLatency);
+ // See REQUEST_PIPELINE_MAX_DEPTH in CameraCharacteristic.java.
+ MetadataBuilder& setPipelineMaxDepth(uint8_t maxDepth);
+
+ // See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+ MetadataBuilder& setPipelineDepth(uint8_t depth);
+
// See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);