Camera: Add support Jpeg/R output

Initial set of changes required to support Jpeg/R outputs via
composite streams. The modifications do not include the Jpeg/R
codec compression sequence. The encoding functionality will be
added by subsequent patch/patches.

Bug: 241284696
Test: atest -c -d
cts/tests/camera/src/android/hardware/camera2/cts/ImageReaderTest.java#testJpegR
atest -c -d
cts/tests/camera/src/android/hardware/camera2/cts/ImageReaderTest.java#testJpegRDisplayP3

Change-Id: Ia010bcb8a9df899f674009e4f5e2928b8b6ae842
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 2869669..152b786 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -75,6 +75,7 @@
     ACAMERA_AUTOMOTIVE,
     ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_EXTENSION,
+    ACAMERA_JPEGR,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -121,6 +122,7 @@
     ACAMERA_AUTOMOTIVE_START       = ACAMERA_AUTOMOTIVE        << 16,
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
+    ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -7513,6 +7515,145 @@
             ACAMERA_AUTOMOTIVE_LENS_START,
     ACAMERA_AUTOMOTIVE_LENS_END,
 
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+     * <p>If the camera device supports Jpeg/R, it will support the same stream combinations with
+     * Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported
+     * by the device is determined by the device's hardware level and capabilities.</p>
+     * <p>All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats.
+     * Configuring JPEG and Jpeg/R streams at the same time is not supported.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEGR format as OUTPUT only.</p>
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS =      // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)
+            ACAMERA_JPEGR_START,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This should correspond to the frame duration when only that
+     * stream is active, with all processing (typically in android.*.mode)
+     * set to either OFF or FAST.</p>
+     * <p>When multiple streams are used in a request, the minimum frame
+     * duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS =        // int64[4*n]
+            ACAMERA_JPEGR_START + 1,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A stall duration is how much extra time would get added
+     * to the normal minimum frame duration for a repeating request
+     * that has streams with non-zero stall.</p>
+     * <p>This functions similarly to
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for Jpeg/R
+     * streams.</p>
+     * <p>All Jpeg/R output stream formats may have a nonzero stall
+     * duration.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS =            // int64[4*n]
+            ACAMERA_JPEGR_START + 2,
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEG_R format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)
+            ACAMERA_JPEGR_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 5,
+    ACAMERA_JPEGR_END,
+
 } acamera_metadata_tag_t;
 
 /**
@@ -10991,6 +11132,25 @@
 
 
 
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT       = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t;
+
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations_maximum_resolution {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t;
+
+
 
 __END_DECLS
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index d52e540..ede6cf7 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -65,6 +65,7 @@
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
+        "api2/JpegRCompositeStream.cpp",
         "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d80b6ec..c08aff3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -36,6 +36,7 @@
 
 #include "DepthCompositeStream.h"
 #include "HeicCompositeStream.h"
+#include "JpegRCompositeStream.h"
 
 // Convenience methods for constructing binder::Status objects for error returns
 
@@ -955,19 +956,24 @@
     bool isDepthCompositeStream =
             camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
     bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
-    if (isDepthCompositeStream || isHeicCompisiteStream) {
+    bool isJpegRCompositeStream =
+        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]);
+    if (isDepthCompositeStream || isHeicCompisiteStream || isJpegRCompositeStream) {
         sp<CompositeStream> compositeStream;
         if (isDepthCompositeStream) {
             compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
-        } else {
+        } else if (isHeicCompisiteStream) {
             compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+        } else {
+            compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
         }
 
         err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
-                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+                streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
         if (err == OK) {
             Mutex::Autolock l(mCompositeLock);
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -1823,7 +1829,8 @@
         for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
             isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
-                camera3::HeicCompositeStream::isHeicCompositeStream(s);
+                camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+                camera3::JpegRCompositeStream::isJpegRCompositeStream(s);
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
                 if (compositeIdx == NAME_NOT_FOUND) {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 4b840fc..8cc47ee 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -49,7 +49,8 @@
         camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> * surfaceIds,
-        int streamSetId, bool isShared, bool isMultiResolution) {
+        int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+        int64_t dynamicProfile, int64_t streamUseCase) {
     if (hasDeferredConsumer) {
         ALOGE("%s: Deferred consumers not supported in case of composite streams!",
                 __FUNCTION__);
@@ -75,7 +76,8 @@
     }
 
     return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
-            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
+            colorSpace, dynamicProfile, streamUseCase);
 }
 
 status_t CompositeStream::deleteStream() {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 600bd28..99067dd 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -46,7 +46,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared, bool isMultiResolution);
+            int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase);
 
     status_t deleteStream();
 
@@ -59,7 +60,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) = 0;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) = 0;
 
     // Release all internal streams and corresponding resources.
     virtual status_t deleteInternalStreams() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 048d85d..2746289 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -581,7 +581,8 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/) {
+        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/) {
     if (mSupportedDepthSizes.empty()) {
         ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
         return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index c1c75c1..b4a4b05 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -53,7 +53,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) override;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) override;
     status_t deleteInternalStreams() override;
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index cd57299..52ab22f 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -120,7 +120,8 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/) {
+        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/) {
 
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 1077a1f..cdcaded 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -48,7 +48,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) override;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) override;
 
     status_t deleteInternalStreams() override;
 
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
new file mode 100644
index 0000000..367a0c8
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -0,0 +1,786 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "hardware/gralloc.h"
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
+#define LOG_TAG "Camera3-JpegRCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
+#include "common/CameraProviderManager.h"
+#include <gui/Surface.h>
+#include <utils/ExifUtils.h>
+#include <utils/Log.h>
+#include "utils/SessionConfigurationUtils.h"
+#include <utils/Trace.h>
+
+#include "JpegRCompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
+JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
+        wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+        CompositeStream(device, cb),
+        mBlobStreamId(-1),
+        mBlobSurfaceId(-1),
+        mP010StreamId(-1),
+        mP010SurfaceId(-1),
+        mBlobWidth(0),
+        mBlobHeight(0),
+        mP010BufferAcquired(false),
+        mBlobBufferAcquired(false),
+        mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+        mProducerListener(new ProducerListener()),
+        mMaxJpegBufferSize(-1),
+        mUHRMaxJpegBufferSize(-1),
+        mStaticInfo(device->info()) {
+    auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
+    if (entry.count > 0) {
+        mMaxJpegBufferSize = entry.data.i32[0];
+    } else {
+        ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
+    }
+
+    mUHRMaxJpegSize =
+            SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+                    /*ultraHighResolution*/true);
+    mDefaultMaxJpegSize =
+            SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+                    /*isUltraHighResolution*/false);
+
+    mUHRMaxJpegBufferSize =
+        SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+                mMaxJpegBufferSize);
+}
+
+JpegRCompositeStream::~JpegRCompositeStream() {
+    mBlobConsumer.clear(),
+    mBlobSurface.clear(),
+    mBlobStreamId = -1;
+    mBlobSurfaceId = -1;
+    mP010Consumer.clear();
+    mP010Surface.clear();
+    mP010Consumer = nullptr;
+    mP010Surface = nullptr;
+}
+
+void JpegRCompositeStream::compilePendingInputLocked() {
+    CpuConsumer::LockedBuffer imgBuffer;
+
+    while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
+        auto it = mInputJpegBuffers.begin();
+        auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
+        if (res == NOT_ENOUGH_DATA) {
+            // Can not lock any more buffers.
+            break;
+        } else if (res != OK) {
+            ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+            mPendingInputFrames[*it].error = true;
+            mInputJpegBuffers.erase(it);
+            continue;
+        }
+
+        if (*it != imgBuffer.timestamp) {
+            ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
+                    "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+        }
+
+        if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+                (mPendingInputFrames[imgBuffer.timestamp].error)) {
+            mBlobConsumer->unlockBuffer(imgBuffer);
+        } else {
+            mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
+            mBlobBufferAcquired = true;
+        }
+        mInputJpegBuffers.erase(it);
+    }
+
+    while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
+        auto it = mInputP010Buffers.begin();
+        auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
+        if (res == NOT_ENOUGH_DATA) {
+            // Can not lock any more buffers.
+            break;
+        } else if (res != OK) {
+            ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+            mPendingInputFrames[*it].error = true;
+            mInputP010Buffers.erase(it);
+            continue;
+        }
+
+        if (*it != imgBuffer.timestamp) {
+            ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
+                    "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+        }
+
+        if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+                (mPendingInputFrames[imgBuffer.timestamp].error)) {
+            mP010Consumer->unlockBuffer(imgBuffer);
+        } else {
+            mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
+            mP010BufferAcquired = true;
+        }
+        mInputP010Buffers.erase(it);
+    }
+
+    while (!mCaptureResults.empty()) {
+        auto it = mCaptureResults.begin();
+        // Negative timestamp indicates that something went wrong during the capture result
+        // collection process.
+        if (it->first >= 0) {
+            mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
+            mPendingInputFrames[it->first].result = std::get<1>(it->second);
+        }
+        mCaptureResults.erase(it);
+    }
+
+    while (!mFrameNumberMap.empty()) {
+        auto it = mFrameNumberMap.begin();
+        mPendingInputFrames[it->second].frameNumber = it->first;
+        mFrameNumberMap.erase(it);
+    }
+
+    auto it = mErrorFrameNumbers.begin();
+    while (it != mErrorFrameNumbers.end()) {
+        bool frameFound = false;
+        for (auto &inputFrame : mPendingInputFrames) {
+            if (inputFrame.second.frameNumber == *it) {
+                inputFrame.second.error = true;
+                frameFound = true;
+                break;
+            }
+        }
+
+        if (frameFound) {
+            it = mErrorFrameNumbers.erase(it);
+        } else {
+            ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+                    *it);
+            it++;
+        }
+    }
+}
+
+bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
+    if (currentTs == nullptr) {
+        return false;
+    }
+
+    bool newInputAvailable = false;
+    for (const auto& it : mPendingInputFrames) {
+        if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+                ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
+                (it.first < *currentTs)) {
+            *currentTs = it.first;
+            newInputAvailable = true;
+        }
+    }
+
+    return newInputAvailable;
+}
+
+int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
+    int64_t ret = -1;
+    if (currentTs == nullptr) {
+        return ret;
+    }
+
+    for (const auto& it : mPendingInputFrames) {
+        if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+            *currentTs = it.first;
+            ret = it.second.frameNumber;
+        }
+    }
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
+    status_t res;
+    sp<ANativeWindow> outputANW = mOutputSurface;
+    ANativeWindowBuffer *anb;
+    int fenceFd;
+    void *dstBuffer;
+
+    size_t maxJpegBufferSize = 0;
+    if (mMaxJpegBufferSize > 0) {
+        // If this is an ultra high resolution sensor and the input frames size
+        // is > default res jpeg.
+        if (mUHRMaxJpegSize.width != 0 &&
+                inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+                mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+            maxJpegBufferSize = mUHRMaxJpegBufferSize;
+        } else {
+            maxJpegBufferSize = mMaxJpegBufferSize;
+        }
+    } else {
+        maxJpegBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
+    }
+
+    uint8_t jpegQuality = 100;
+    auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
+    if (entry.count > 0) {
+        jpegQuality = entry.data.u8[0];
+    }
+
+    uint8_t jpegOrientation = 0;
+    entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
+    if (entry.count > 0) {
+        jpegOrientation = entry.data.i32[0];
+    }
+
+    if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegBufferSize, 1))
+            != OK) {
+        ALOGE("%s: Unable to configure stream buffer dimensions"
+                " %zux%u for stream %d", __FUNCTION__, maxJpegBufferSize, 1U, mP010StreamId);
+        return res;
+    }
+
+    res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+                res);
+        return res;
+    }
+
+    sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
+    GraphicBufferLocker gbLocker(gb);
+    res = gbLocker.lockAsync(&dstBuffer, fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return res;
+    }
+
+    if ((gb->getWidth() < maxJpegBufferSize) || (gb->getHeight() != 1)) {
+        ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
+                maxJpegBufferSize, 1, gb->getWidth(), gb->getHeight());
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return BAD_VALUE;
+    }
+
+    if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
+        // Configure Jpeg/R for P3 output and possibly input in case of concurrent SDR Jpeg support
+    } else {
+        // Configure Jpeg/R for SRGB output
+    }
+
+    size_t actualJpegSize = 0;
+    if (mSupportInternalJpeg) {
+        actualJpegSize = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
+                inputFrame.jpegBuffer.width);
+        if (actualJpegSize == 0) {
+            ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
+                    __FUNCTION__);
+            actualJpegSize = inputFrame.jpegBuffer.width;
+        }
+        if (actualJpegSize <= maxJpegBufferSize) {
+            memcpy(dstBuffer, inputFrame.jpegBuffer.data, actualJpegSize);
+        }
+    } else {
+        const uint8_t* exifBuffer = nullptr;
+        size_t exifBufferSize = 0;
+        std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+        utils->initializeEmpty();
+        utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
+                inputFrame.p010Buffer.height);
+        if (utils->generateApp1()) {
+            exifBuffer = utils->getApp1Buffer();
+            exifBufferSize = utils->getApp1Length();
+        } else {
+            ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+        }
+    }
+
+    //TODO: Process JpegR here and retrieve the final jpeg/r size
+
+    size_t finalJpegSize = actualJpegSize + sizeof(CameraBlob);
+    if (finalJpegSize > maxJpegBufferSize) {
+        ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return NO_MEMORY;
+    }
+
+    res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+                getStreamId(), strerror(-res), res);
+        return res;
+    }
+
+    ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
+    uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
+        (gb->getWidth() - sizeof(CameraBlob));
+    CameraBlob blobHeader = {
+        .blobId = CameraBlobId::JPEG,
+        .blobSizeBytes = static_cast<int32_t>(actualJpegSize)
+    };
+    memcpy(header, &blobHeader, sizeof(CameraBlob));
+    outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+
+    return res;
+}
+
+void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+    if (inputFrame == nullptr) {
+        return;
+    }
+
+    if (inputFrame->p010Buffer.data != nullptr) {
+        mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
+        inputFrame->p010Buffer.data = nullptr;
+        mP010BufferAcquired = false;
+    }
+
+    if (inputFrame->jpegBuffer.data != nullptr) {
+        mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
+        inputFrame->jpegBuffer.data = nullptr;
+        mBlobBufferAcquired = false;
+    }
+
+    if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+        //TODO: Figure out correct requestId
+        notifyError(inputFrame->frameNumber, -1 /*requestId*/);
+        inputFrame->errorNotified = true;
+    }
+}
+
+void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+    auto it = mPendingInputFrames.begin();
+    while (it != mPendingInputFrames.end()) {
+        if (it->first <= currentTs) {
+            releaseInputFrameLocked(&it->second);
+            it = mPendingInputFrames.erase(it);
+        } else {
+            it++;
+        }
+    }
+}
+
+bool JpegRCompositeStream::threadLoop() {
+    int64_t currentTs = INT64_MAX;
+    bool newInputAvailable = false;
+
+    {
+        Mutex::Autolock l(mMutex);
+
+        if (mErrorState) {
+            // In case we landed in error state, return any pending buffers and
+            // halt all further processing.
+            compilePendingInputLocked();
+            releaseInputFramesLocked(currentTs);
+            return false;
+        }
+
+        while (!newInputAvailable) {
+            compilePendingInputLocked();
+            newInputAvailable = getNextReadyInputLocked(&currentTs);
+            if (!newInputAvailable) {
+                auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
+                if (failingFrameNumber >= 0) {
+                    // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+                    // possible for two internal stream buffers to fail. In such scenario the
+                    // composite stream should notify the client about a stream buffer error only
+                    // once and this information is kept within 'errorNotified'.
+                    // Any present failed input frames will be removed on a subsequent call to
+                    // 'releaseInputFramesLocked()'.
+                    releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+                    currentTs = INT64_MAX;
+                }
+
+                auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+                if (ret == TIMED_OUT) {
+                    return true;
+                } else if (ret != OK) {
+                    ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+                            strerror(-ret), ret);
+                    return false;
+                }
+            }
+        }
+    }
+
+    auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+    Mutex::Autolock l(mMutex);
+    if (res != OK) {
+        ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
+                currentTs, strerror(-res), res);
+        mPendingInputFrames[currentTs].error = true;
+    }
+
+    releaseInputFramesLocked(currentTs);
+
+    return true;
+}
+
+bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
+    if (CameraProviderManager::kFrameworkJpegRDisabled) {
+        return false;
+    }
+    ANativeWindow *anw = surface.get();
+    status_t err;
+    int format;
+    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+        ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
+                err);
+        return false;
+    }
+
+    int dataspace;
+    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+        ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
+                err);
+        return false;
+    }
+
+    if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
+void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
+        int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
+    if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
+        return;
+    }
+
+    switch (dynamicProfile) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+            *dynamicRange = dynamicProfile;
+            *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
+            break;
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+            *dynamicRange = dynamicProfile;
+            *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+            break;
+        default:
+            *dynamicRange = kP010DefaultDynamicRange;
+            *dataSpace = kP010DefaultDataSpace;
+    }
+
+}
+
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+        bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+        int64_t dynamicProfile, int64_t streamUseCase) {
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (!device.get()) {
+        ALOGE("%s: Invalid camera device!", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
+    mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+            mStaticInfo, mP010DynamicRange,
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+
+    sp<IGraphicBufferProducer> producer;
+    sp<IGraphicBufferConsumer> consumer;
+    BufferQueue::createBufferQueue(&producer, &consumer);
+    mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
+    mP010Consumer->setFrameAvailableListener(this);
+    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+    mP010Surface = new Surface(producer);
+
+    auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
+            static_cast<android_dataspace>(mP010DataSpace), rotation,
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
+            GRALLOC_USAGE_SW_READ_OFTEN,
+            mP010DynamicRange,
+            streamUseCase);
+    if (ret == OK) {
+        mP010StreamId = *id;
+        mP010SurfaceId = (*surfaceIds)[0];
+        mOutputSurface = consumers[0];
+    } else {
+        return ret;
+    }
+
+    if (mSupportInternalJpeg) {
+        BufferQueue::createBufferQueue(&producer, &consumer);
+        mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        mBlobConsumer->setFrameAvailableListener(this);
+        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+        mBlobSurface = new Surface(producer);
+        std::vector<int> blobSurfaceId;
+        ret = device->createStream(mBlobSurface, width, height, format,
+                kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
+                &blobSurfaceId,
+                /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
+                /*isShared*/  false,
+                /*isMultiResolution*/ false,
+                /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
+                /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+                streamUseCase,
+                /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+                /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
+                /*colorSpace*/ colorSpace);
+        if (ret == OK) {
+            mBlobSurfaceId = blobSurfaceId[0];
+        } else {
+            return ret;
+        }
+
+        ret = registerCompositeStreamListener(mBlobStreamId);
+        if (ret != OK) {
+            ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
+            return ret;
+        }
+    }
+
+    ret = registerCompositeStreamListener(getStreamId());
+    if (ret != OK) {
+        ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
+        return ret;
+    }
+
+    mOutputColorSpace = colorSpace;
+    mBlobWidth = width;
+    mBlobHeight = height;
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::configureStream() {
+    if (isRunning()) {
+        // Processing thread is already running, nothing more to do.
+        return NO_ERROR;
+    }
+
+    if (mOutputSurface.get() == nullptr) {
+        ALOGE("%s: No valid output surface set!", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    if (res != OK) {
+        ALOGE("%s: Unable to connect to native window for stream %d",
+                __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+            != OK) {
+        ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+                mP010StreamId);
+        return res;
+    }
+
+    int maxProducerBuffers;
+    ANativeWindow *anw = mP010Surface.get();
+    if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    ANativeWindow *anwConsumer = mOutputSurface.get();
+    int maxConsumerBuffers;
+    if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+                    &maxConsumerBuffers)) != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_buffer_count(
+                    anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+        ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    run("JpegRCompositeStreamProc");
+
+    return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::deleteInternalStreams() {
+    // The 'CameraDeviceClient' parent will delete the P010 stream
+    requestExit();
+
+    auto ret = join();
+    if (ret != OK) {
+        ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+                strerror(-ret), ret);
+    }
+
+    if (mBlobStreamId >= 0) {
+        // Camera devices may not be valid after switching to offline mode.
+        // In this case, all offline streams including internal composite streams
+        // are managed and released by the offline session.
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device.get() != nullptr) {
+            ret = device->deleteStream(mBlobStreamId);
+        }
+
+        mBlobStreamId = -1;
+    }
+
+    if (mOutputSurface != nullptr) {
+        mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
+        mOutputSurface.clear();
+    }
+
+    return ret;
+}
+
+void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
+    if (item.mDataSpace == kJpegDataSpace) {
+        ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
+                __func__, ns2ms(item.mTimestamp));
+
+        Mutex::Autolock l(mMutex);
+        if (!mErrorState) {
+            mInputJpegBuffers.push_back(item.mTimestamp);
+            mInputReadyCondition.signal();
+        }
+    } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
+        ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
+                ns2ms(item.mTimestamp));
+
+        Mutex::Autolock l(mMutex);
+        if (!mErrorState) {
+            mInputP010Buffers.push_back(item.mTimestamp);
+            mInputReadyCondition.signal();
+        }
+    } else {
+        ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+    }
+}
+
+status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+        Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+    if (outputStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
+        outputStreamIds->push_back(mP010StreamId);
+    }
+    (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
+
+    if (mSupportInternalJpeg) {
+        if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
+            outputStreamIds->push_back(mBlobStreamId);
+        }
+        (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
+    }
+
+    if (currentStreamId != nullptr) {
+        *currentStreamId = mP010StreamId;
+    }
+
+    return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::insertCompositeStreamIds(
+        std::vector<int32_t>* compositeStreamIds /*out*/) {
+    if (compositeStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    compositeStreamIds->push_back(mP010StreamId);
+    if (mSupportInternalJpeg) {
+        compositeStreamIds->push_back(mBlobStreamId);
+    }
+
+    return OK;
+}
+
+void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+    // Processing can continue even in case of result errors.
+    // At the moment Jpeg/R composite stream processing relies mainly on static camera
+    // characteristics data. The actual result data can be used for the jpeg quality but
+    // in case it is absent we can default to maximum.
+    eraseResult(resultExtras.frameNumber);
+}
+
+bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+    bool ret = false;
+    // Buffer errors concerning internal composite streams should not be directly visible to
+    // camera clients. They must only receive a single buffer error with the public composite
+    // stream id.
+    if ((resultExtras.errorStreamId == mP010StreamId) ||
+            (resultExtras.errorStreamId == mBlobStreamId)) {
+        flagAnErrorFrameNumber(resultExtras.frameNumber);
+        ret = true;
+    }
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+            const CameraMetadata& staticInfo,
+            std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+    if (compositeOutput == nullptr) {
+        return BAD_VALUE;
+    }
+
+    int64_t dynamicRange, dataSpace;
+    deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
+
+    compositeOutput->clear();
+    compositeOutput->push_back({});
+    (*compositeOutput)[0].width = streamInfo.width;
+    (*compositeOutput)[0].height = streamInfo.height;
+    (*compositeOutput)[0].format = kP010PixelFormat;
+    (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
+    (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+    (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
+    (*compositeOutput)[0].colorSpace =
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
+                streamInfo.dynamicRangeProfile,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+        compositeOutput->push_back({});
+        (*compositeOutput)[1].width = streamInfo.width;
+        (*compositeOutput)[1].height = streamInfo.height;
+        (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+        (*compositeOutput)[1].dataSpace = kJpegDataSpace;
+        (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+        (*compositeOutput)[1].dynamicRangeProfile =
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
+    }
+
+    return NO_ERROR;
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
new file mode 100644
index 0000000..181a05d
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+
+#include <gui/CpuConsumer.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "system/graphics-base-v1.1.h"
+
+#include "api1/client2/JpegProcessor.h"
+
+#include "CompositeStream.h"
+
+namespace android {
+
+class CameraDeviceClient;
+class CameraMetadata;
+class Surface;
+
+namespace camera3 {
+
+class JpegRCompositeStream : public CompositeStream, public Thread,
+        public CpuConsumer::FrameAvailableListener {
+
+public:
+    JpegRCompositeStream(sp<CameraDeviceBase> device,
+            wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+    ~JpegRCompositeStream() override;
+
+    static bool isJpegRCompositeStream(const sp<Surface> &surface);
+
+    // CompositeStream overrides
+    status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+            bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) override;
+    status_t deleteInternalStreams() override;
+    status_t configureStream() override;
+    status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+            int32_t* /*out*/currentStreamId) override;
+    status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+    int getStreamId() override { return mP010StreamId; }
+
+    // CpuConsumer listener implementation
+    void onFrameAvailable(const BufferItem& item) override;
+
+    // Return stream information about the internal camera streams
+    static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+            const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+protected:
+
+    bool threadLoop() override;
+    bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+    void onResultError(const CaptureResultExtras& resultExtras) override;
+
+private:
+    struct InputFrame {
+        CpuConsumer::LockedBuffer p010Buffer;
+        CpuConsumer::LockedBuffer jpegBuffer;
+        CameraMetadata            result;
+        bool                      error;
+        bool                      errorNotified;
+        int64_t                   frameNumber;
+        int32_t                   requestId;
+
+        InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1) { }
+    };
+
+    status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
+
+    // Buffer/Results handling
+    void compilePendingInputLocked();
+    void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+    void releaseInputFramesLocked(int64_t currentTs);
+
+    // Find first complete and valid frame with smallest timestamp
+    bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
+
+    // Find next failing frame number with smallest timestamp and return respective frame number
+    int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
+
+    static void deriveDynamicRangeAndDataspace(int64_t dynamicProfile, int64_t* /*out*/dynamicRange,
+            int64_t* /*out*/dataSpace);
+
+    static const nsecs_t kWaitDuration = 10000000; // 10 ms
+    static const auto kP010PixelFormat = HAL_PIXEL_FORMAT_YCBCR_P010;
+    static const auto kP010DefaultDataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+    static const auto kP010DefaultDynamicRange =
+        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+    static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+    static const auto kJpegRDataSpace =
+        aidl::android::hardware::graphics::common::Dataspace::JPEG_R;
+
+    bool                 mSupportInternalJpeg = false;
+    int64_t              mP010DataSpace = HAL_DATASPACE_BT2020_HLG;
+    int64_t              mP010DynamicRange =
+        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+    int                  mBlobStreamId, mBlobSurfaceId, mP010StreamId, mP010SurfaceId;
+    size_t               mBlobWidth, mBlobHeight;
+    sp<CpuConsumer>      mBlobConsumer, mP010Consumer;
+    bool                 mP010BufferAcquired, mBlobBufferAcquired;
+    sp<Surface>          mP010Surface, mBlobSurface, mOutputSurface;
+    int32_t              mOutputColorSpace;
+    sp<ProducerListener> mProducerListener;
+
+    ssize_t              mMaxJpegBufferSize;
+    ssize_t              mUHRMaxJpegBufferSize;
+
+    camera3::Size        mDefaultMaxJpegSize;
+    camera3::Size        mUHRMaxJpegSize;
+
+    // Keep all incoming P010 buffer timestamps pending further processing.
+    std::vector<int64_t> mInputP010Buffers;
+
+    // Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
+    std::vector<int64_t> mInputJpegBuffers;
+
+    // Map of all input frames pending further processing.
+    std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
+
+    const CameraMetadata mStaticInfo;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 89c7459..5b8e3a1 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
 #define LOG_TAG "CameraProviderManager"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -40,6 +42,7 @@
 #include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
 #include <utils/Trace.h>
+#include <ui/PublicFormat.h>
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
@@ -59,6 +62,8 @@
 } // anonymous namespace
 
 const float CameraProviderManager::kDepthARTolerance = .1f;
+const bool CameraProviderManager::kFrameworkJpegRDisabled =
+        property_get_bool("ro.camera.disableJpegR", false);
 
 CameraProviderManager::HidlServiceInteractionProxyImpl
 CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -1071,6 +1076,209 @@
     }
 }
 
+bool CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+        const CameraMetadata& deviceInfo, int64_t profile, int64_t concurrentProfile) {
+    auto entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    if (entry.count == 0) {
+        return false;
+    }
+
+    const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
+    if (it == entry.data.u8 + entry.count) {
+        return false;
+    }
+
+    entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+    if (entry.count == 0 || ((entry.count % 3) != 0)) {
+        return false;
+    }
+
+    for (size_t i = 0; i < entry.count; i += 3) {
+        if (entry.data.i64[i] == profile) {
+            if (entry.data.i64[i+1] & concurrentProfile) {
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
+    if (kFrameworkJpegRDisabled) {
+        return OK;
+    }
+
+    const int32_t scalerSizesTag =
+              SessionConfigurationUtils::getAppropriateModeTag(
+                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t scalerMinFrameDurationsTag =
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+    const int32_t scalerStallDurationsTag =
+                 SessionConfigurationUtils::getAppropriateModeTag(
+                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+    const int32_t jpegRSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t jpegRStallDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS, maxResolution);
+    const int32_t jpegRMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                 ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
+
+    auto& c = mCameraCharacteristics;
+    std::vector<int32_t> supportedChTags;
+    auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    if (chTags.count == 0) {
+        ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes,
+            supportedDynamicDepthSizes, internalDepthSizes;
+    auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    if (capabilities.count == 0) {
+        ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    auto end = capabilities.data.u8 + capabilities.count;
+    bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+    if (!isTenBitOutputSupported) {
+        // No 10-bit support, nothing more to do.
+        return OK;
+    }
+
+    if (!isConcurrentDynamicRangeCaptureSupported(c,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+        // Advertise Jpeg/R only in case 10 and 8-bit concurrent capture is supported.
+        // This can be removed when 10-bit to 8-bit tonemapping is available.
+        return OK;
+    }
+
+    getSupportedSizes(c, scalerSizesTag,
+            static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_BLOB), &supportedBlobSizes);
+    getSupportedSizes(c, scalerSizesTag,
+            static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+    auto it = supportedP010Sizes.begin();
+    while (it != supportedP010Sizes.end()) {
+        // Resolutions that don't align on 32 pixels are not supported by Jpeg/R.
+        // This can be removed as soon as the encoder restriction is lifted.
+        if ((std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
+                supportedBlobSizes.end()) || ((std::get<0>(*it) % 32) != 0)) {
+            it = supportedP010Sizes.erase(it);
+        } else {
+            it++;
+        }
+    }
+    if (supportedP010Sizes.empty()) {
+        // Nothing to do in this case.
+        return OK;
+    }
+
+    std::vector<int32_t> jpegREntries;
+    for (const auto& it : supportedP010Sizes) {
+        int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+                static_cast<int32_t> (std::get<1>(it)),
+                ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT };
+        jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
+    }
+
+    std::vector<int64_t> blobMinDurations, blobStallDurations;
+    std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
+
+    // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
+    // durations.
+    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedP010Sizes, &blobMinDurations);
+    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedP010Sizes, &blobStallDurations);
+    if (blobStallDurations.empty() || blobMinDurations.empty() ||
+            (blobMinDurations.size() != blobStallDurations.size())) {
+        ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu",
+                __FUNCTION__, blobMinDurations.size(), blobStallDurations.size());
+        return BAD_VALUE;
+    }
+
+    auto itDuration = blobMinDurations.begin();
+    auto itSize = supportedP010Sizes.begin();
+    while (itDuration != blobMinDurations.end()) {
+        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+        jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
+        itDuration++; itSize++;
+    }
+
+    itDuration = blobStallDurations.begin();
+    itSize = supportedP010Sizes.begin();
+    while (itDuration != blobStallDurations.end()) {
+        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+        jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
+        itDuration++; itSize++;
+    }
+
+    supportedChTags.reserve(chTags.count + 3);
+    supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+            chTags.data.i32 + chTags.count);
+    supportedChTags.push_back(jpegRSizesTag);
+    supportedChTags.push_back(jpegRMinFrameDurationsTag);
+    supportedChTags.push_back(jpegRStallDurationsTag);
+    c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
+    c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
+    c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
+    c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+            supportedChTags.size());
+
+    auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+    if (colorSpaces.count > 0 && !maxResolution) {
+        bool displayP3Support = false;
+        int64_t dynamicRange = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        for (size_t i = 0; i < colorSpaces.count; i += 3) {
+            auto colorSpace = colorSpaces.data.i64[i];
+            auto format = colorSpaces.data.i64[i+1];
+            bool formatMatch = (format == static_cast<int64_t>(PublicFormat::JPEG)) ||
+                    (format == static_cast<int64_t>(PublicFormat::UNKNOWN));
+            bool colorSpaceMatch =
+                colorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3;
+            if (formatMatch && colorSpaceMatch) {
+                displayP3Support = true;
+            }
+
+            // Jpeg/R will support the same dynamic range profiles as P010
+            if (format == static_cast<int64_t>(PublicFormat::YCBCR_P010)) {
+                dynamicRange |= colorSpaces.data.i64[i+2];
+            }
+        }
+        if (displayP3Support) {
+            std::vector<int64_t> supportedColorSpaces;
+            // Jpeg/R must support the default system as well ase display P3 color space
+            supportedColorSpaces.reserve(colorSpaces.count + 3*2);
+            supportedColorSpaces.insert(supportedColorSpaces.end(), colorSpaces.data.i64,
+                    colorSpaces.data.i64 + colorSpaces.count);
+
+            supportedColorSpaces.push_back(static_cast<int64_t>(
+                    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB));
+            supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+            supportedColorSpaces.push_back(dynamicRange);
+
+            supportedColorSpaces.push_back(static_cast<int64_t>(
+                    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3));
+            supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+            supportedColorSpaces.push_back(dynamicRange);
+            c.update(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+                    supportedColorSpaces.data(), supportedColorSpaces.size());
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
         bool maxResolution) {
     const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index ab1b389..acf511b 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -407,7 +407,11 @@
 
     status_t notifyUsbDeviceEvent(int32_t eventId, const std::string &usbDeviceId);
 
+    static bool isConcurrentDynamicRangeCaptureSupported(const CameraMetadata& deviceInfo,
+            int64_t profile, int64_t concurrentProfile);
+
     static const float kDepthARTolerance;
+    static const bool kFrameworkJpegRDisabled;
 private:
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
     mutable std::mutex mInterfaceMutex;
@@ -675,6 +679,7 @@
             status_t fixupTorchStrengthTags();
             status_t addDynamicDepthTags(bool maxResolution = false);
             status_t deriveHeicTags(bool maxResolution = false);
+            status_t deriveJpegRTags(bool maxResolution = false);
             status_t addRotateCropTags();
             status_t addAutoframingTags();
             status_t addPreCorrectionActiveArraySize();
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 5a449b6..84fe3a5 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -501,6 +501,11 @@
         ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+    res = deriveJpegRTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
 
     if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
         status_t status = addDynamicDepthTags(/*maxResolution*/true);
@@ -514,6 +519,12 @@
             ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
                     "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
         }
+
+        status = deriveJpegRTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
     }
 
     res = addRotateCropTags();
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index 8b9b92b..9098fe8 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -77,6 +77,12 @@
       {34, {
           ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
           ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
         } },
 };
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f9afd41..f786b79 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,6 +19,8 @@
 #include "SessionConfigurationUtils.h"
 #include "../api2/DepthCompositeStream.h"
 #include "../api2/HeicCompositeStream.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "api2/JpegRCompositeStream.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -26,6 +28,7 @@
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "system/graphics-base-v1.1.h"
+#include <ui/PublicFormat.h>
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
@@ -209,11 +212,18 @@
 }
 
 //check if format is 10-bit compatible
-bool is10bitCompatibleFormat(int32_t format) {
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
     switch(format) {
         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
         case HAL_PIXEL_FORMAT_YCBCR_P010:
             return true;
+        case HAL_PIXEL_FORMAT_BLOB:
+            if (dataSpace == static_cast<android_dataspace_t>(
+                        ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+                return true;
+            }
+
+            return false;
         default:
             return false;
     }
@@ -316,6 +326,10 @@
         return false; // RAW_DEPTH, not applicable
     } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
         return false; // RAW_DEPTH10, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+            static_cast<android_dataspace>(
+                ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+        format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
     }
 
     camera_metadata_ro_entry_t entry =
@@ -499,7 +513,7 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
-            !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+            !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
         String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
                 logicalCameraId.string(), format, dynamicRangeProfile);
@@ -772,7 +786,9 @@
                         camera3::DepthCompositeStream::isDepthCompositeStream(surface);
                 bool isHeicCompositeStream =
                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                if (isDepthCompositeStream || isHeicCompositeStream) {
+                bool isJpegRCompositeStream =
+                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface);
+                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
                     // We need to take in to account that composite streams can have
                     // additional internal camera streams.
                     std::vector<OutputStreamInfo> compositeStreams;
@@ -780,10 +796,14 @@
                       // TODO: Take care of composite streams.
                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
                                 deviceInfo, &compositeStreams);
-                    } else {
+                    } else if (isHeicCompositeStream) {
                         ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
                             deviceInfo, &compositeStreams);
+                    } else {
+                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                            deviceInfo, &compositeStreams);
                     }
+
                     if (ret != OK) {
                         String8 msg = String8::format(
                                 "Camera %s: Failed adding composite streams: %s (%d)",
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 264045e..b5654ac 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -102,7 +102,7 @@
         int32_t colorSpace);
 
 //check if format is 10-bit output compatible
-bool is10bitCompatibleFormat(int32_t format);
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
 
 // check if the dynamic range requires 10-bit output
 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 250ac63..28a22e1 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,6 +49,12 @@
             return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
         case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
             return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION;
         case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
             return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
         case ANDROID_LENS_INTRINSIC_CALIBRATION:
@@ -97,4 +103,4 @@
 
 } // namespace SessionConfigurationUtils
 } // namespace camera3
-} // namespace android
\ No newline at end of file
+} // namespace android