Camera: Add stream use case API

Camera stream use case indicates the purpose of a camera stream
independent of its direct consumer target. The stream use case is
plumbed from the app to the camera HAL.

Test: Camera CTS
Bug: 200307880
Change-Id: I15f9e63c4dc41ce355a509c154686fc06c1ab72d
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f826d83..548fb0b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -31,6 +31,7 @@
 using android::hardware::camera2::ICameraDeviceUser;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap;
+using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
 
 namespace android {
 namespace camera3 {
@@ -310,11 +311,30 @@
     }
 }
 
+bool isStreamUseCaseSupported(int streamUseCase,
+        const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t availableStreamUseCases =
+            deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
+
+    if (availableStreamUseCases.count == 0 &&
+            streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        return true;
+    }
+
+    for (size_t i = 0; i < availableStreamUseCases.count; i++) {
+        if (availableStreamUseCases.data.i32[i] == streamUseCase) {
+            return true;
+        }
+    }
+    return false;
+}
+
 binder::Status createSurfaceFromGbp(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
-        const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile){
+        const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile,
+        int streamUseCase) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -427,6 +447,13 @@
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
+    if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
+            physicalCameraMetadata)) {
+        String8 msg = String8::format("Camera %s: stream use case %d not supported,"
+                " failed to create output stream", logicalCameraId.string(), streamUseCase);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
 
     if (!isStreamInfoValid) {
         streamInfo.width = width;
@@ -436,6 +463,7 @@
         streamInfo.consumerUsage = consumerUsage;
         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
         streamInfo.dynamicRangeProfile = dynamicRangeProfile;
+        streamInfo.streamUseCase = streamUseCase;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
@@ -506,6 +534,8 @@
     stream->dynamicRangeProfile =
         static_cast<CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap> (
                 streamInfo.dynamicRangeProfile);
+    stream->useCase = static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(
+            streamInfo.streamUseCase);
 }
 
 binder::Status checkPhysicalCameraId(
@@ -666,6 +696,7 @@
             return res;
         }
 
+        int streamUseCase = it.getStreamUseCase();
         if (deferredConsumer) {
             streamInfo.width = it.getWidth();
             streamInfo.height = it.getHeight();
@@ -686,6 +717,7 @@
                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                                 "Deferred surface sensor pixel modes not valid");
             }
+            streamInfo.streamUseCase = streamUseCase;
             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
                     &streamConfiguration.streams[streamIdx++]);
             isStreamInfoValid = true;
@@ -698,7 +730,8 @@
         for (auto& bufferProducer : bufferProducers) {
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile);
+                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
+                    streamUseCase);
 
             if (!res.isOk())
                 return res;
@@ -851,6 +884,11 @@
             // image
             return false;
         }
+        if (static_cast<int32_t>(streamConfigV38.streams[i].useCase) !=
+                ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+            // ICameraDevice older than 3.8 doesn't support stream use case
+            return false;
+        }
         streamConfigV37.streams[i] = streamConfigV38.streams[i].v3_7;
     }
     streamConfigV37.operationMode = streamConfigV38.operationMode;