Camera2: Add jpeg encoding support for all camera extensions

Enable consistent Jpeg output support for all present camera
extensions. Extensions with exclusive YUV_420 support will
include an extra SW encoding pass before the processed results
can be queued back to the client surface.

Bug: 179818844
Test: Camera CTS

Change-Id: I461e54024f150925ca1a5a57ff7b327712ce0e96
diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java
index f9eecae..ac6ba0a 100644
--- a/core/java/android/hardware/camera2/CameraDevice.java
+++ b/core/java/android/hardware/camera2/CameraDevice.java
@@ -378,9 +378,10 @@
      * released, continuous repeating requests stopped and any pending
      * multi-frame capture requests flushed.</p>
      *
-     * <p>Note that the CameraExtensionSession currently supports at most two
-     * multi frame capture surface formats: ImageFormat.YUV_420_888 and
-     * ImageFormat.JPEG. Clients must query the multi-frame capture format support using
+     * <p>Note that the CameraExtensionSession currently supports at most wo
+     * multi frame capture surface formats: ImageFormat.JPEG will be supported
+     * by all extensions and ImageFormat.YUV_420_888 may or may not be supported.
+     * Clients must query the multi-frame capture format support using
      * {@link CameraExtensionCharacteristics#getExtensionSupportedSizes(int, int)}.
      * For repeating requests CameraExtensionSession supports only
      * {@link android.graphics.SurfaceTexture} as output. Clients can query the supported resolution
diff --git a/core/java/android/hardware/camera2/CameraExtensionCharacteristics.java b/core/java/android/hardware/camera2/CameraExtensionCharacteristics.java
index d3eb377..6121cd2 100644
--- a/core/java/android/hardware/camera2/CameraExtensionCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraExtensionCharacteristics.java
@@ -35,6 +35,7 @@
 import android.util.Pair;
 import android.util.Size;
 
+import java.util.HashSet;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.util.ArrayList;
@@ -153,12 +154,8 @@
         mChars = chars;
     }
 
-    private static List<Size> generateSupportedSizes(List<SizeList> sizesList,
-                                                     Integer format,
-                                                     StreamConfigurationMap streamMap) {
-        // Per API contract it is assumed that the extension is able to support all
-        // camera advertised sizes for a given format in case it doesn't return
-        // a valid non-empty size list.
+    private static ArrayList<Size> getSupportedSizes(List<SizeList> sizesList,
+            Integer format) {
         ArrayList<Size> ret = new ArrayList<>();
         if ((sizesList != null) && (!sizesList.isEmpty())) {
             for (SizeList entry : sizesList) {
@@ -170,13 +167,36 @@
                 }
             }
         }
+
+        return ret;
+    }
+
+    private static List<Size> generateSupportedSizes(List<SizeList> sizesList,
+                                                     Integer format,
+                                                     StreamConfigurationMap streamMap) {
+        // Per API contract it is assumed that the extension is able to support all
+        // camera advertised sizes for a given format in case it doesn't return
+        // a valid non-empty size list.
+        ArrayList<Size> ret = getSupportedSizes(sizesList, format);
         Size[] supportedSizes = streamMap.getOutputSizes(format);
-        if (supportedSizes != null) {
+        if ((ret.isEmpty()) && (supportedSizes != null)) {
             ret.addAll(Arrays.asList(supportedSizes));
         }
         return ret;
     }
 
+    private static List<Size> generateJpegSupportedSizes(List<SizeList> sizesList,
+            StreamConfigurationMap streamMap) {
+        ArrayList<Size> extensionSizes = getSupportedSizes(sizesList, ImageFormat.YUV_420_888);
+        HashSet<Size> supportedSizes = extensionSizes.isEmpty() ? new HashSet<>(Arrays.asList(
+                streamMap.getOutputSizes(ImageFormat.YUV_420_888))) : new HashSet<>(extensionSizes);
+        HashSet<Size> supportedJpegSizes = new HashSet<>(Arrays.asList(streamMap.getOutputSizes(
+                ImageFormat.JPEG)));
+        supportedSizes.retainAll(supportedJpegSizes);
+
+        return new ArrayList<>(supportedSizes);
+    }
+
     /**
      * A per-process global camera extension manager instance, to track and
      * initialize/release extensions depending on client activity.
@@ -488,8 +508,8 @@
      * {@link StreamConfigurationMap#getOutputSizes}.</p>
      *
      * <p>Device-specific extensions currently support at most two
-     * multi-frame capture surface formats, ImageFormat.YUV_420_888 or
-     * ImageFormat.JPEG.</p>
+     * multi-frame capture surface formats. ImageFormat.JPEG will be supported by all
+     * extensions and ImageFormat.YUV_420_888 may or may not be supported.</p>
      *
      * @param extension the extension type
      * @param format    device-specific extension output format
@@ -526,14 +546,17 @@
                             format, streamMap);
                 } else if (format == ImageFormat.JPEG) {
                     extenders.second.init(mCameraId, mChars.getNativeMetadata());
-                    if (extenders.second.getCaptureProcessor() == null) {
+                    if (extenders.second.getCaptureProcessor() != null) {
+                        // The framework will perform the additional encoding pass on the
+                        // processed YUV_420 buffers.
+                        return generateJpegSupportedSizes(
+                                extenders.second.getSupportedResolutions(), streamMap);
+                    } else {
                         return generateSupportedSizes(null, format, streamMap);
                     }
-
-                    return new ArrayList<>();
+                } else {
+                    throw new IllegalArgumentException("Unsupported format: " + format);
                 }
-
-                throw new IllegalArgumentException("Unsupported format: " + format);
             } finally {
                 unregisterClient(clientId);
             }
diff --git a/core/java/android/hardware/camera2/CameraExtensionSession.java b/core/java/android/hardware/camera2/CameraExtensionSession.java
index 877dfbc..e1b8177 100644
--- a/core/java/android/hardware/camera2/CameraExtensionSession.java
+++ b/core/java/android/hardware/camera2/CameraExtensionSession.java
@@ -238,8 +238,10 @@
      * from the camera device, to produce a single high-quality output result.
      *
      * <p>Note that single capture requests currently do not support
-     * client parameters. Settings included in the request will
-     * be entirely overridden by the device-specific extension. </p>
+     * client parameters except for {@link CaptureRequest#JPEG_ORIENTATION orientation} and
+     * {@link CaptureRequest#JPEG_QUALITY quality} in case of ImageFormat.JPEG output target.
+     * The rest of the settings included in the request will be entirely overridden by
+     * the device-specific extension. </p>
      *
      * <p>The {@link CaptureRequest.Builder#addTarget} supports only one
      * ImageFormat.YUV_420_888 or ImageFormat.JPEG target surface. {@link CaptureRequest}
diff --git a/core/java/android/hardware/camera2/impl/CameraExtensionJpegProcessor.java b/core/java/android/hardware/camera2/impl/CameraExtensionJpegProcessor.java
new file mode 100644
index 0000000..936734b
--- /dev/null
+++ b/core/java/android/hardware/camera2/impl/CameraExtensionJpegProcessor.java
@@ -0,0 +1,312 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+import android.annotation.NonNull;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.extension.CaptureBundle;
+import android.hardware.camera2.extension.ICaptureProcessorImpl;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.media.ImageReader;
+import android.media.ImageWriter;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.util.Log;
+import android.view.Surface;
+
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+// Jpeg compress input YUV and queue back in the client target surface.
+public class CameraExtensionJpegProcessor implements ICaptureProcessorImpl {
+    public final static String TAG = "CameraExtensionJpeg";
+    private final static int JPEG_QUEUE_SIZE = 1;
+    private final static int JPEG_DEFAULT_QUALITY = 100;
+    private final static int JPEG_DEFAULT_ROTATION = 0;
+
+    private final Handler mHandler;
+    private final HandlerThread mHandlerThread;
+    private final ICaptureProcessorImpl mProcessor;
+
+    private ImageReader mYuvReader = null;
+    private android.hardware.camera2.extension.Size mResolution = null;
+    private int mFormat = -1;
+    private Surface mOutputSurface = null;
+    private ImageWriter mOutputWriter = null;
+
+    private static final class JpegParameters {
+        public HashSet<Long> mTimeStamps = new HashSet<>();
+        public int mRotation = JPEG_DEFAULT_ROTATION; // CCW multiple of 90 degrees
+        public int mQuality = JPEG_DEFAULT_QUALITY; // [0..100]
+    }
+
+    private ConcurrentLinkedQueue<JpegParameters> mJpegParameters = new ConcurrentLinkedQueue<>();
+
+    public CameraExtensionJpegProcessor(@NonNull ICaptureProcessorImpl processor) {
+        mProcessor = processor;
+        mHandlerThread = new HandlerThread(TAG);
+        mHandlerThread.start();
+        mHandler = new Handler(mHandlerThread.getLooper());
+    }
+
+    public void close() {
+        mHandlerThread.quitSafely();
+
+        if (mOutputWriter != null) {
+            mOutputWriter.close();
+            mOutputWriter = null;
+        }
+
+        if (mYuvReader != null) {
+            mYuvReader.close();
+            mYuvReader = null;
+        }
+    }
+
+    private static JpegParameters getJpegParameters(List<CaptureBundle> captureBundles) {
+        JpegParameters ret = new JpegParameters();
+        if (!captureBundles.isEmpty()) {
+            // The quality and orientation settings must be equal for requests in a burst
+
+            Byte jpegQuality = captureBundles.get(0).captureResult.get(CaptureResult.JPEG_QUALITY);
+            if (jpegQuality != null) {
+                ret.mQuality = jpegQuality;
+            } else {
+                Log.w(TAG, "No jpeg quality set, using default: " + JPEG_DEFAULT_QUALITY);
+            }
+
+            Integer orientation = captureBundles.get(0).captureResult.get(
+                    CaptureResult.JPEG_ORIENTATION);
+            if (orientation != null) {
+                ret.mRotation = orientation / 90;
+            } else {
+                Log.w(TAG, "No jpeg rotation set, using default: " + JPEG_DEFAULT_ROTATION);
+            }
+
+            for (CaptureBundle bundle : captureBundles) {
+                Long timeStamp = bundle.captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
+                if (timeStamp != null) {
+                    ret.mTimeStamps.add(timeStamp);
+                } else {
+                    Log.e(TAG, "Capture bundle without valid sensor timestamp!");
+                }
+            }
+        }
+
+        return ret;
+    }
+
+    /**
+     * Compresses a YCbCr image to jpeg, applying a crop and rotation.
+     * <p>
+     * The input is defined as a set of 3 planes of 8-bit samples, one plane for
+     * each channel of Y, Cb, Cr.<br>
+     * The Y plane is assumed to have the same width and height of the entire
+     * image.<br>
+     * The Cb and Cr planes are assumed to be downsampled by a factor of 2, to
+     * have dimensions (floor(width / 2), floor(height / 2)).<br>
+     * Each plane is specified by a direct java.nio.ByteBuffer, a pixel-stride,
+     * and a row-stride. So, the sample at coordinate (x, y) can be retrieved
+     * from byteBuffer[x * pixel_stride + y * row_stride].
+     * <p>
+     * The pre-compression transformation is applied as follows:
+     * <ol>
+     * <li>The image is cropped to the rectangle from (cropLeft, cropTop) to
+     * (cropRight - 1, cropBottom - 1). So, a cropping-rectangle of (0, 0) -
+     * (width, height) is a no-op.</li>
+     * <li>The rotation is applied counter-clockwise relative to the coordinate
+     * space of the image, so a CCW rotation will appear CW when the image is
+     * rendered in scanline order. Only rotations which are multiples of
+     * 90-degrees are suppored, so the parameter 'rot90' specifies which
+     * multiple of 90 to rotate the image.</li>
+     * </ol>
+     *
+     * @param width          the width of the image to compress
+     * @param height         the height of the image to compress
+     * @param yBuf           the buffer containing the Y component of the image
+     * @param yPStride       the stride between adjacent pixels in the same row in
+     *                       yBuf
+     * @param yRStride       the stride between adjacent rows in yBuf
+     * @param cbBuf          the buffer containing the Cb component of the image
+     * @param cbPStride      the stride between adjacent pixels in the same row in
+     *                       cbBuf
+     * @param cbRStride      the stride between adjacent rows in cbBuf
+     * @param crBuf          the buffer containing the Cr component of the image
+     * @param crPStride      the stride between adjacent pixels in the same row in
+     *                       crBuf
+     * @param crRStride      the stride between adjacent rows in crBuf
+     * @param outBuf         a direct java.nio.ByteBuffer to hold the compressed jpeg.
+     *                       This must have enough capacity to store the result, or an
+     *                       error code will be returned.
+     * @param outBufCapacity the capacity of outBuf
+     * @param quality        the jpeg-quality (1-100) to use
+     * @param cropLeft       left-edge of the bounds of the image to crop to before
+     *                       rotation
+     * @param cropTop        top-edge of the bounds of the image to crop to before
+     *                       rotation
+     * @param cropRight      right-edge of the bounds of the image to crop to before
+     *                       rotation
+     * @param cropBottom     bottom-edge of the bounds of the image to crop to
+     *                       before rotation
+     * @param rot90          the multiple of 90 to rotate the image CCW (after cropping)
+     */
+    private static native int compressJpegFromYUV420pNative(
+            int width, int height,
+            ByteBuffer yBuf, int yPStride, int yRStride,
+            ByteBuffer cbBuf, int cbPStride, int cbRStride,
+            ByteBuffer crBuf, int crPStride, int crRStride,
+            ByteBuffer outBuf, int outBufCapacity,
+            int quality,
+            int cropLeft, int cropTop, int cropRight, int cropBottom,
+            int rot90);
+
+    public void process(List<CaptureBundle> captureBundle) throws RemoteException {
+        JpegParameters jpegParams = getJpegParameters(captureBundle);
+        try {
+            mJpegParameters.add(jpegParams);
+            mProcessor.process(captureBundle);
+        } catch (Exception e) {
+            mJpegParameters.remove(jpegParams);
+            throw e;
+        }
+    }
+
+    public void onOutputSurface(Surface surface, int format) throws RemoteException {
+        if (format != ImageFormat.JPEG) {
+            Log.e(TAG, "Unsupported output format: " + format);
+            return;
+        }
+        mOutputSurface = surface;
+        initializePipeline();
+    }
+
+    @Override
+    public void onResolutionUpdate(android.hardware.camera2.extension.Size size)
+            throws RemoteException {
+        mResolution = size;
+        initializePipeline();
+    }
+
+    public void onImageFormatUpdate(int format) throws RemoteException {
+        if (format != ImageFormat.YUV_420_888) {
+            Log.e(TAG, "Unsupported input format: " + format);
+            return;
+        }
+        mFormat = format;
+        initializePipeline();
+    }
+
+    private void initializePipeline() throws RemoteException {
+        if ((mFormat != -1) && (mOutputSurface != null) && (mResolution != null) &&
+                (mYuvReader == null)) {
+            // Jpeg/blobs are expected to be configured with (w*h)x1
+            mOutputWriter = ImageWriter.newInstance(mOutputSurface, 1 /*maxImages*/,
+                    ImageFormat.JPEG, mResolution.width * mResolution.height, 1);
+            mYuvReader = ImageReader.newInstance(mResolution.width, mResolution.height, mFormat,
+                    JPEG_QUEUE_SIZE);
+            mYuvReader.setOnImageAvailableListener(new YuvCallback(), mHandler);
+            mProcessor.onOutputSurface(mYuvReader.getSurface(), mFormat);
+            mProcessor.onResolutionUpdate(mResolution);
+            mProcessor.onImageFormatUpdate(mFormat);
+        }
+    }
+
+    @Override
+    public IBinder asBinder() {
+        throw new UnsupportedOperationException("Binder IPC not supported!");
+    }
+
+    private class YuvCallback implements ImageReader.OnImageAvailableListener {
+        @Override
+        public void onImageAvailable(ImageReader reader) {
+            Image yuvImage = null;
+            Image jpegImage = null;
+            try {
+                yuvImage = mYuvReader.acquireNextImage();
+                jpegImage = mOutputWriter.dequeueInputImage();
+            } catch (IllegalStateException e) {
+                if (yuvImage != null) {
+                    yuvImage.close();
+                }
+                if (jpegImage != null) {
+                    jpegImage.close();
+                }
+                Log.e(TAG, "Failed to acquire processed yuv image or jpeg image!");
+                return;
+            }
+
+            ByteBuffer jpegBuffer = jpegImage.getPlanes()[0].getBuffer();
+            jpegBuffer.clear();
+            // Jpeg/blobs are expected to be configured with (w*h)x1
+            int jpegCapacity = jpegImage.getWidth();
+
+            Plane lumaPlane = yuvImage.getPlanes()[0];
+            Plane crPlane = yuvImage.getPlanes()[1];
+            Plane cbPlane = yuvImage.getPlanes()[2];
+
+            Iterator<JpegParameters> jpegIter = mJpegParameters.iterator();
+            JpegParameters jpegParams = null;
+            while(jpegIter.hasNext()) {
+                JpegParameters currentParams = jpegIter.next();
+                if (currentParams.mTimeStamps.contains(yuvImage.getTimestamp())) {
+                    jpegParams = currentParams;
+                    jpegIter.remove();
+                    break;
+                }
+            }
+            if (jpegParams == null) {
+                if (mJpegParameters.isEmpty()) {
+                    Log.w(TAG, "Empty jpeg settings queue! Using default jpeg orientation"
+                            + " and quality!");
+                    jpegParams = new JpegParameters();
+                    jpegParams.mRotation = JPEG_DEFAULT_ROTATION;
+                    jpegParams.mQuality = JPEG_DEFAULT_QUALITY;
+                } else {
+                    Log.w(TAG, "No jpeg settings found with matching timestamp for current"
+                            + " processed input!");
+                    Log.w(TAG, "Using values from the top of the queue!");
+                    jpegParams = mJpegParameters.poll();
+                }
+            }
+
+            compressJpegFromYUV420pNative(
+                    yuvImage.getWidth(), yuvImage.getHeight(),
+                    lumaPlane.getBuffer(), lumaPlane.getPixelStride(), lumaPlane.getRowStride(),
+                    crPlane.getBuffer(), crPlane.getPixelStride(), crPlane.getRowStride(),
+                    cbPlane.getBuffer(), cbPlane.getPixelStride(), cbPlane.getRowStride(),
+                    jpegBuffer, jpegCapacity, jpegParams.mQuality,
+                    0, 0, yuvImage.getWidth(), yuvImage.getHeight(),
+                    jpegParams.mRotation);
+            yuvImage.close();
+
+            try {
+                mOutputWriter.queueInputImage(jpegImage);
+            } catch (IllegalStateException e) {
+                Log.e(TAG, "Failed to queue encoded result!");
+            } finally {
+                jpegImage.close();
+            }
+        }
+    }
+}
diff --git a/core/java/android/hardware/camera2/impl/CameraExtensionSessionImpl.java b/core/java/android/hardware/camera2/impl/CameraExtensionSessionImpl.java
index 8fe7158..850dd44 100644
--- a/core/java/android/hardware/camera2/impl/CameraExtensionSessionImpl.java
+++ b/core/java/android/hardware/camera2/impl/CameraExtensionSessionImpl.java
@@ -91,6 +91,7 @@
     private ImageReader mStubCaptureImageReader = null;
     private ImageWriter mRepeatingRequestImageWriter = null;
 
+    private CameraExtensionJpegProcessor mImageJpegProcessor = null;
     private ICaptureProcessorImpl mImageProcessor = null;
     private CameraExtensionForwardProcessor mPreviewImageProcessor = null;
     private IRequestUpdateProcessorImpl mPreviewRequestUpdateProcessor = null;
@@ -413,6 +414,10 @@
         if (mImageProcessor != null) {
             if (mClientCaptureSurface != null) {
                 SurfaceInfo surfaceInfo = querySurface(mClientCaptureSurface);
+                if (surfaceInfo.mFormat == ImageFormat.JPEG) {
+                    mImageJpegProcessor = new CameraExtensionJpegProcessor(mImageProcessor);
+                    mImageProcessor = mImageJpegProcessor;
+                }
                 mBurstCaptureImageReader = ImageReader.newInstance(surfaceInfo.mWidth,
                         surfaceInfo.mHeight, CameraExtensionCharacteristics.PROCESSING_INPUT_FORMAT,
                         mImageExtender.getMaxCaptureStage());
@@ -570,14 +575,16 @@
                 return null;
             }
 
-            // Set user supported jpeg quality and rotation parameters
+            // This will override the extension capture stage jpeg parameters with the user set
+            // jpeg quality and rotation. This will guarantee that client configured jpeg
+            // parameters always have highest priority.
             Integer jpegRotation = clientRequest.get(CaptureRequest.JPEG_ORIENTATION);
             if (jpegRotation != null) {
-                requestBuilder.set(CaptureRequest.JPEG_ORIENTATION, jpegRotation);
+                captureStage.parameters.set(CaptureRequest.JPEG_ORIENTATION, jpegRotation);
             }
             Byte jpegQuality = clientRequest.get(CaptureRequest.JPEG_QUALITY);
             if (jpegQuality != null) {
-                requestBuilder.set(CaptureRequest.JPEG_QUALITY, jpegQuality);
+                captureStage.parameters.set(CaptureRequest.JPEG_QUALITY, jpegQuality);
             }
 
             requestBuilder.addTarget(target);
@@ -753,6 +760,11 @@
                 mPreviewImageProcessor = null;
             }
 
+            if (mImageJpegProcessor != null) {
+                mImageJpegProcessor.close();
+                mImageJpegProcessor = null;
+            }
+
             mCaptureSession = null;
             mImageProcessor = null;
             mCameraRepeatingSurface = mClientRepeatingRequestSurface = null;
@@ -1014,7 +1026,10 @@
                 mCaptureRequestMap.clear();
                 mCapturePendingMap.clear();
                 boolean processStatus = true;
-                List<CaptureBundle> captureList = initializeParcelable(mCaptureStageMap);
+                Byte jpegQuality = mClientRequest.get(CaptureRequest.JPEG_QUALITY);
+                Integer jpegOrientation = mClientRequest.get(CaptureRequest.JPEG_ORIENTATION);
+                List<CaptureBundle> captureList = initializeParcelable(mCaptureStageMap,
+                        jpegOrientation, jpegQuality);
                 try {
                     mImageProcessor.process(captureList);
                 } catch (RemoteException e) {
@@ -1437,10 +1452,8 @@
             }
             for (int i = idx; i >= 0; i--) {
                 if (previewMap.valueAt(i).first != null) {
-                    Log.w(TAG, "Discard pending buffer with timestamp: " + previewMap.keyAt(i));
                     previewMap.valueAt(i).first.close();
                 } else {
-                    Log.w(TAG, "Discard pending result with timestamp: " + previewMap.keyAt(i));
                     if (mClientNotificationsEnabled && ((i != idx) || notifyCurrentIndex)) {
                         Log.w(TAG, "Preview frame drop with timestamp: " + previewMap.keyAt(i));
                         final long ident = Binder.clearCallingIdentity();
@@ -1632,7 +1645,8 @@
     }
 
     private static List<CaptureBundle> initializeParcelable(
-            HashMap<Integer, Pair<Image, TotalCaptureResult>> captureMap) {
+            HashMap<Integer, Pair<Image, TotalCaptureResult>> captureMap, Integer jpegOrientation,
+            Byte jpegQuality) {
         ArrayList<CaptureBundle> ret = new ArrayList<>();
         for (Integer stagetId : captureMap.keySet()) {
             Pair<Image, TotalCaptureResult> entry = captureMap.get(stagetId);
@@ -1641,6 +1655,12 @@
             bundle.captureImage = initializeParcelImage(entry.first);
             bundle.sequenceId = entry.second.getSequenceId();
             bundle.captureResult = entry.second.getNativeMetadata();
+            if (jpegOrientation != null) {
+                bundle.captureResult.set(CaptureResult.JPEG_ORIENTATION, jpegOrientation);
+            }
+            if (jpegQuality != null) {
+                bundle.captureResult.set(CaptureResult.JPEG_QUALITY, jpegQuality);
+            }
             ret.add(bundle);
         }
 
diff --git a/core/jni/Android.bp b/core/jni/Android.bp
index 8edc8a1..b9d7ee1 100644
--- a/core/jni/Android.bp
+++ b/core/jni/Android.bp
@@ -156,6 +156,7 @@
                 "android_hardware_Camera.cpp",
                 "android_hardware_camera2_CameraMetadata.cpp",
                 "android_hardware_camera2_DngCreator.cpp",
+                "android_hardware_camera2_impl_CameraExtensionJpegProcessor.cpp",
                 "android_hardware_camera2_utils_SurfaceUtils.cpp",
                 "android_hardware_display_DisplayManagerGlobal.cpp",
                 "android_hardware_display_DisplayViewport.cpp",
@@ -210,6 +211,7 @@
                 "audioclient-types-aidl-cpp",
                 "audioflinger-aidl-cpp",
                 "av-types-aidl-cpp",
+                "android.hardware.camera.device@3.2",
                 "libandroidicu",
                 "libbpf_android",
                 "libnetdbpf",
@@ -239,6 +241,7 @@
                 "libdataloader",
                 "libvulkan",
                 "libETC1",
+                "libjpeg",
                 "libhardware",
                 "libhardware_legacy",
                 "libselinux",
diff --git a/core/jni/AndroidRuntime.cpp b/core/jni/AndroidRuntime.cpp
index 8879111..14d1393 100644
--- a/core/jni/AndroidRuntime.cpp
+++ b/core/jni/AndroidRuntime.cpp
@@ -74,6 +74,7 @@
 extern int register_android_hardware_Camera(JNIEnv *env);
 extern int register_android_hardware_camera2_CameraMetadata(JNIEnv *env);
 extern int register_android_hardware_camera2_DngCreator(JNIEnv *env);
+extern int register_android_hardware_camera2_impl_CameraExtensionJpegProcessor(JNIEnv* env);
 extern int register_android_hardware_camera2_utils_SurfaceUtils(JNIEnv* env);
 extern int register_android_hardware_display_DisplayManagerGlobal(JNIEnv* env);
 extern int register_android_hardware_HardwareBuffer(JNIEnv *env);
@@ -1532,6 +1533,7 @@
         REG_JNI(register_android_hardware_Camera),
         REG_JNI(register_android_hardware_camera2_CameraMetadata),
         REG_JNI(register_android_hardware_camera2_DngCreator),
+        REG_JNI(register_android_hardware_camera2_impl_CameraExtensionJpegProcessor),
         REG_JNI(register_android_hardware_camera2_utils_SurfaceUtils),
         REG_JNI(register_android_hardware_display_DisplayManagerGlobal),
         REG_JNI(register_android_hardware_HardwareBuffer),
diff --git a/core/jni/android_hardware_camera2_impl_CameraExtensionJpegProcessor.cpp b/core/jni/android_hardware_camera2_impl_CameraExtensionJpegProcessor.cpp
new file mode 100644
index 0000000..1390759
--- /dev/null
+++ b/core/jni/android_hardware_camera2_impl_CameraExtensionJpegProcessor.cpp
@@ -0,0 +1,629 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <cstring>
+#include <cstdio>
+#include <inttypes.h>
+#include <memory.h>
+#include <vector>
+
+#include <setjmp.h>
+
+#include <android/hardware/camera/device/3.2/types.h>
+
+#include "core_jni_helpers.h"
+#include "jni.h"
+#include <nativehelper/JNIHelp.h>
+
+#define CAMERA_PROCESSOR_CLASS_NAME "android/hardware/camera2/impl/CameraExtensionJpegProcessor"
+
+extern "C" {
+#include "jpeglib.h"
+}
+
+using namespace std;
+using namespace android;
+
+using android::hardware::camera::device::V3_2::CameraBlob;
+using android::hardware::camera::device::V3_2::CameraBlobId;
+
+class Transform;
+struct Plane;
+
+inline int sgn(int val) { return (0 < val) - (val < 0); }
+
+inline int min(int a, int b) { return a < b ? a : b; }
+
+inline int max(int a, int b) { return a > b ? a : b; }
+
+/**
+ * Represents a combined cropping and rotation transformation.
+ *
+ * The transformation maps the coordinates (mOrigX, mOrigY) and (mOneX, mOneY)
+ * in the input image to the origin and (mOutputWidth, mOutputHeight)
+ * respectively.
+ */
+class Transform {
+    public:
+        Transform(int origX, int origY, int oneX, int oneY);
+
+        static Transform forCropFollowedByRotation(int cropLeft, int cropTop,
+                int cropRight, int cropBottom, int rot90);
+
+        inline int getOutputWidth() const { return mOutputWidth; }
+
+        inline int getOutputHeight() const { return mOutputHeight; }
+
+        bool operator==(const Transform& other) const;
+
+        /**
+         * Transforms the input coordinates.  Coordinates outside the cropped region
+         * are clamped to valid values.
+         */
+        void map(int x, int y, int* outX, int* outY) const;
+
+    private:
+        int mOutputWidth;
+        int mOutputHeight;
+
+        // The coordinates of the point to map the origin to.
+        const int mOrigX, mOrigY;
+        // The coordinates of the point to map the point (getOutputWidth(),
+        // getOutputHeight()) to.
+        const int mOneX, mOneY;
+
+        // A matrix for the rotational component.
+        int mMat00, mMat01;
+        int mMat10, mMat11;
+};
+
+/**
+ * Represents a model for accessing pixel data for a single plane of an image.
+ * Note that the actual data is not owned by this class, and the underlying
+ * data does not need to be stored in separate planes.
+ */
+struct Plane {
+    // The dimensions of this plane of the image
+    int width;
+    int height;
+
+    // A pointer to raw pixel data
+    const unsigned char* data;
+    // The difference in address between consecutive pixels in the same row
+    int pixelStride;
+    // The difference in address between the start of consecutive rows
+    int rowStride;
+};
+
+/**
+ * Provides an interface for simultaneously reading a certain number of rows of
+ * an image plane as contiguous arrays, suitable for use with libjpeg.
+ */
+template <unsigned int ROWS>
+class RowIterator {
+    public:
+        /**
+         * Creates a new RowIterator which will crop and rotate with the given
+         * transform.
+         *
+         * @param plane the plane to iterate over
+         * @param transform the transformation to map output values into the
+         * coordinate space of the plane
+         * @param rowLength the length of the rows returned via LoadAt().  If this is
+         * longer than the width of the output (after applying the transform), then
+         * the right-most value is repeated.
+         */
+        inline RowIterator(Plane plane, Transform transform, int rowLength);
+
+        /**
+         * Returns an array of pointers into consecutive rows of contiguous image
+         * data starting at y.  That is, samples within each row are contiguous.
+         * However, the individual arrays pointed-to may be separate.
+         * When the end of the image is reached, the last row of the image is
+         * repeated.
+         * The returned pointers are valid until the next call to loadAt().
+         */
+        inline const std::array<unsigned char*, ROWS> loadAt(int baseY);
+
+    private:
+        Plane mPlane;
+        Transform mTransform;
+        // The length of a row, with padding to the next multiple of 64.
+        int mPaddedRowLength;
+        std::vector<unsigned char> mBuffer;
+};
+
+template <unsigned int ROWS>
+RowIterator<ROWS>::RowIterator(Plane plane, Transform transform,
+                                         int rowLength)
+        : mPlane(plane), mTransform(transform) {
+    mPaddedRowLength = rowLength;
+    mBuffer = std::vector<unsigned char>(rowLength * ROWS);
+}
+
+template <unsigned int ROWS>
+const std::array<unsigned char*, ROWS> RowIterator<ROWS>::loadAt(int baseY) {
+    std::array<unsigned char*, ROWS> bufPtrs;
+    for (unsigned int i = 0; i < ROWS; i++) {
+        bufPtrs[i] = &mBuffer[mPaddedRowLength * i];
+    }
+
+    if (mPlane.width == 0 || mPlane.height == 0) {
+        return bufPtrs;
+    }
+
+    for (unsigned int i = 0; i < ROWS; i++) {
+        int y = i + baseY;
+        y = min(y, mTransform.getOutputHeight() - 1);
+
+        int output_width = mPaddedRowLength;
+        output_width = min(output_width, mTransform.getOutputWidth());
+        output_width = min(output_width, mPlane.width);
+
+        // Each row in the output image will be copied into buf_ by gathering pixels
+        // along an axis-aligned line in the plane.
+        // The line is defined by (startX, startY) -> (endX, endY), computed via the
+        // current Transform.
+        int startX;
+        int startY;
+        mTransform.map(0, y, &startX, &startY);
+
+        int endX;
+        int endY;
+        mTransform.map(output_width - 1, y, &endX, &endY);
+
+        // Clamp (startX, startY) and (endX, endY) to the valid bounds of the plane.
+        startX = min(startX, mPlane.width - 1);
+        startY = min(startY, mPlane.height - 1);
+        endX = min(endX, mPlane.width - 1);
+        endY = min(endY, mPlane.height - 1);
+        startX = max(startX, 0);
+        startY = max(startY, 0);
+        endX = max(endX, 0);
+        endY = max(endY, 0);
+
+        // To reduce work inside the copy-loop, precompute the start, end, and
+        // stride relating the values to be gathered from mPlane into buf
+        // for this particular scan-line.
+        int dx = sgn(endX - startX);
+        int dy = sgn(endY - startY);
+        if (!(dx == 0 || dy == 0)) {
+            ALOGE("%s: Unexpected bounds: %dx%d %dx%d!", __FUNCTION__, startX, endX, startY, endY);
+            return bufPtrs;
+        }
+
+        // The index into mPlane.data of (startX, startY)
+        int plane_start = startX * mPlane.pixelStride + startY * mPlane.rowStride;
+        // The index into mPlane.data of (endX, endY)
+        int plane_end = endX * mPlane.pixelStride + endY * mPlane.rowStride;
+        // The stride, in terms of indices in plane_data, required to enumerate the
+        // samples between the start and end points.
+        int stride = dx * mPlane.pixelStride + dy * mPlane.rowStride;
+        // In the degenerate-case of a 1x1 plane, startX and endX are equal, so
+        // stride would be 0, resulting in an infinite-loop.  To avoid this case,
+        // use a stride of at-least 1.
+        if (stride == 0) {
+            stride = 1;
+        }
+
+        int outX = 0;
+        for (int idx = plane_start; idx >= min(plane_start, plane_end) &&
+                idx <= max(plane_start, plane_end); idx += stride) {
+            bufPtrs[i][outX] = mPlane.data[idx];
+            outX++;
+        }
+
+        // Fill the remaining right-edge of the buffer by extending the last
+        // value.
+        unsigned char right_padding_value = bufPtrs[i][outX - 1];
+        for (; outX < mPaddedRowLength; outX++) {
+            bufPtrs[i][outX] = right_padding_value;
+        }
+    }
+
+    return bufPtrs;
+}
+
+template <typename T>
+void safeDelete(T& t) {
+    delete t;
+    t = nullptr;
+}
+
+template <typename T>
+void safeDeleteArray(T& t) {
+    delete[] t;
+    t = nullptr;
+}
+
+Transform::Transform(int origX, int origY, int oneX, int oneY)
+    : mOrigX(origX), mOrigY(origY), mOneX(oneX), mOneY(oneY) {
+    if (origX == oneX || origY == oneY) {
+        // Handle the degenerate case of cropping to a 0x0 rectangle.
+        mMat00 = 0;
+        mMat01 = 0;
+        mMat10 = 0;
+        mMat11 = 0;
+        return;
+    }
+
+    if (oneX > origX && oneY > origY) {
+        // 0-degree rotation
+        mMat00 = 1;
+        mMat01 = 0;
+        mMat10 = 0;
+        mMat11 = 1;
+        mOutputWidth = abs(oneX - origX);
+        mOutputHeight = abs(oneY - origY);
+    } else if (oneX < origX && oneY > origY) {
+        // 90-degree CCW rotation
+        mMat00 = 0;
+        mMat01 = -1;
+        mMat10 = 1;
+        mMat11 = 0;
+        mOutputWidth = abs(oneY - origY);
+        mOutputHeight = abs(oneX - origX);
+    } else if (oneX > origX && oneY < origY) {
+        // 270-degree CCW rotation
+        mMat00 = 0;
+        mMat01 = 1;
+        mMat10 = -1;
+        mMat11 = 0;
+        mOutputWidth = abs(oneY - origY);
+        mOutputHeight = abs(oneX - origX);;
+    } else if (oneX < origX && oneY < origY) {
+        // 180-degree CCW rotation
+        mMat00 = -1;
+        mMat01 = 0;
+        mMat10 = 0;
+        mMat11 = -1;
+        mOutputWidth = abs(oneX - origX);
+        mOutputHeight = abs(oneY - origY);
+    }
+}
+
+Transform Transform::forCropFollowedByRotation(int cropLeft, int cropTop, int cropRight,
+        int cropBottom, int rot90) {
+    // The input crop-region excludes cropRight and cropBottom, so transform the
+    // crop rect such that it defines the entire valid region of pixels
+    // inclusively.
+    cropRight -= 1;
+    cropBottom -= 1;
+
+    int cropXLow = min(cropLeft, cropRight);
+    int cropYLow = min(cropTop, cropBottom);
+    int cropXHigh = max(cropLeft, cropRight);
+    int cropYHigh = max(cropTop, cropBottom);
+    rot90 %= 4;
+    if (rot90 == 0) {
+        return Transform(cropXLow, cropYLow, cropXHigh + 1, cropYHigh + 1);
+    } else if (rot90 == 1) {
+        return Transform(cropXHigh, cropYLow, cropXLow - 1, cropYHigh + 1);
+    } else if (rot90 == 2) {
+        return Transform(cropXHigh, cropYHigh, cropXLow - 1, cropYLow - 1);
+    } else if (rot90 == 3) {
+        return Transform(cropXLow, cropYHigh, cropXHigh + 1, cropYLow - 1);
+    }
+    // Impossible case.
+    return Transform(cropXLow, cropYLow, cropXHigh + 1, cropYHigh + 1);
+}
+
+bool Transform::operator==(const Transform& other) const {
+    return other.mOrigX == mOrigX &&  //
+           other.mOrigY == mOrigY &&  //
+           other.mOneX == mOneX &&    //
+           other.mOneY == mOneY;
+}
+
+/**
+ * Transforms the input coordinates.  Coordinates outside the cropped region
+ * are clamped to valid values.
+ */
+void Transform::map(int x, int y, int* outX, int* outY) const {
+    x = max(x, 0);
+    y = max(y, 0);
+    x = min(x, getOutputWidth() - 1);
+    y = min(y, getOutputHeight() - 1);
+    *outX = x * mMat00 + y * mMat01 + mOrigX;
+    *outY = x * mMat10 + y * mMat11 + mOrigY;
+}
+
+int compress(int img_width, int img_height, RowIterator<16>& y_row_generator,
+        RowIterator<8>& cb_row_generator, RowIterator<8>& cr_row_generator,
+        unsigned char* out_buf, size_t out_buf_capacity, std::function<void(size_t)> flush,
+        int quality) {
+    // libjpeg requires the use of setjmp/longjmp to recover from errors.  Since
+    // this doesn't play well with RAII, we must use pointers and manually call
+    // delete. See POSIX documentation for longjmp() for details on why the
+    // volatile keyword is necessary.
+    volatile jpeg_compress_struct cinfov;
+
+    jpeg_compress_struct& cinfo =
+            *const_cast<struct jpeg_compress_struct*>(&cinfov);
+
+    JSAMPROW* volatile yArr = nullptr;
+    JSAMPROW* volatile cbArr = nullptr;
+    JSAMPROW* volatile crArr = nullptr;
+
+    JSAMPARRAY imgArr[3];
+
+    // Error handling
+
+    struct my_error_mgr {
+        struct jpeg_error_mgr pub;
+        jmp_buf setjmp_buffer;
+    } err;
+
+    cinfo.err = jpeg_std_error(&err.pub);
+
+    // Default error_exit will call exit(), so override
+    // to return control via setjmp/longjmp.
+    err.pub.error_exit = [](j_common_ptr cinfo) {
+        my_error_mgr* myerr = reinterpret_cast<my_error_mgr*>(cinfo->err);
+
+        (*cinfo->err->output_message)(cinfo);
+
+        // Return control to the setjmp point (see call to setjmp()).
+        longjmp(myerr->setjmp_buffer, 1);
+    };
+
+    cinfo.err = (struct jpeg_error_mgr*)&err;
+
+    // Set the setjmp point to return to in case of error.
+    if (setjmp(err.setjmp_buffer)) {
+        // If libjpeg hits an error, control will jump to this point (see call to
+        // longjmp()).
+        jpeg_destroy_compress(&cinfo);
+
+        safeDeleteArray(yArr);
+        safeDeleteArray(cbArr);
+        safeDeleteArray(crArr);
+
+        return -1;
+    }
+
+    // Create jpeg compression context
+    jpeg_create_compress(&cinfo);
+
+    // Stores data needed by our c-style callbacks into libjpeg
+    struct ClientData {
+        unsigned char* out_buf;
+        size_t out_buf_capacity;
+        std::function<void(size_t)> flush;
+        int totalOutputBytes;
+    } clientData{out_buf, out_buf_capacity, flush, 0};
+
+    cinfo.client_data = &clientData;
+
+    // Initialize destination manager
+    jpeg_destination_mgr dest;
+
+    dest.init_destination = [](j_compress_ptr cinfo) {
+        ClientData& cdata = *reinterpret_cast<ClientData*>(cinfo->client_data);
+
+        cinfo->dest->next_output_byte = cdata.out_buf;
+        cinfo->dest->free_in_buffer = cdata.out_buf_capacity;
+    };
+
+    dest.empty_output_buffer = [](j_compress_ptr cinfo) -> boolean {
+        ClientData& cdata = *reinterpret_cast<ClientData*>(cinfo->client_data);
+
+        size_t numBytesInBuffer = cdata.out_buf_capacity;
+        cdata.flush(numBytesInBuffer);
+        cdata.totalOutputBytes += numBytesInBuffer;
+
+        // Reset the buffer
+        cinfo->dest->next_output_byte = cdata.out_buf;
+        cinfo->dest->free_in_buffer = cdata.out_buf_capacity;
+
+        return true;
+    };
+
+    dest.term_destination = [](j_compress_ptr cinfo __unused) {
+        // do nothing to terminate the output buffer
+    };
+
+    cinfo.dest = &dest;
+
+    // Set jpeg parameters
+    cinfo.image_width = img_width;
+    cinfo.image_height = img_height;
+    cinfo.input_components = 3;
+
+    // Set defaults based on the above values
+    jpeg_set_defaults(&cinfo);
+
+    jpeg_set_quality(&cinfo, quality, true);
+
+    cinfo.dct_method = JDCT_IFAST;
+
+    cinfo.raw_data_in = true;
+
+    jpeg_set_colorspace(&cinfo, JCS_YCbCr);
+
+    cinfo.comp_info[0].h_samp_factor = 2;
+    cinfo.comp_info[0].v_samp_factor = 2;
+    cinfo.comp_info[1].h_samp_factor = 1;
+    cinfo.comp_info[1].v_samp_factor = 1;
+    cinfo.comp_info[2].h_samp_factor = 1;
+    cinfo.comp_info[2].v_samp_factor = 1;
+
+    jpeg_start_compress(&cinfo, true);
+
+    yArr = new JSAMPROW[cinfo.comp_info[0].v_samp_factor * DCTSIZE];
+    cbArr = new JSAMPROW[cinfo.comp_info[1].v_samp_factor * DCTSIZE];
+    crArr = new JSAMPROW[cinfo.comp_info[2].v_samp_factor * DCTSIZE];
+
+    imgArr[0] = const_cast<JSAMPARRAY>(yArr);
+    imgArr[1] = const_cast<JSAMPARRAY>(cbArr);
+    imgArr[2] = const_cast<JSAMPARRAY>(crArr);
+
+    for (int y = 0; y < img_height; y += DCTSIZE * 2) {
+        std::array<unsigned char*, 16> yData = y_row_generator.loadAt(y);
+        std::array<unsigned char*, 8> cbData = cb_row_generator.loadAt(y / 2);
+        std::array<unsigned char*, 8> crData = cr_row_generator.loadAt(y / 2);
+
+        for (int row = 0; row < DCTSIZE * 2; row++) {
+            yArr[row] = yData[row];
+        }
+        for (int row = 0; row < DCTSIZE; row++) {
+            cbArr[row] = cbData[row];
+            crArr[row] = crData[row];
+        }
+
+        jpeg_write_raw_data(&cinfo, imgArr, DCTSIZE * 2);
+    }
+
+    jpeg_finish_compress(&cinfo);
+
+    int numBytesInBuffer = cinfo.dest->next_output_byte - out_buf;
+
+    flush(numBytesInBuffer);
+
+    clientData.totalOutputBytes += numBytesInBuffer;
+
+    safeDeleteArray(yArr);
+    safeDeleteArray(cbArr);
+    safeDeleteArray(crArr);
+
+    jpeg_destroy_compress(&cinfo);
+
+    return clientData.totalOutputBytes;
+}
+
+int compress(
+        /** Input image dimensions */
+        int width, int height,
+        /** Y Plane */
+        unsigned char* yBuf, int yPStride, int yRStride,
+        /** Cb Plane */
+        unsigned char* cbBuf, int cbPStride, int cbRStride,
+        /** Cr Plane */
+        unsigned char* crBuf, int crPStride, int crRStride,
+        /** Output */
+        unsigned char* outBuf, size_t outBufCapacity,
+        /** Jpeg compression parameters */
+        int quality,
+        /** Crop */
+        int cropLeft, int cropTop, int cropRight, int cropBottom,
+        /** Rotation (multiple of 90).  For example, rot90 = 1 implies a 90 degree
+         * rotation. */
+        int rot90) {
+    int finalWidth;
+    int finalHeight;
+    finalWidth = cropRight - cropLeft;
+    finalHeight = cropBottom - cropTop;
+
+    rot90 %= 4;
+    // for 90 and 270-degree rotations, flip the final width and height
+    if (rot90 == 1) {
+        finalWidth = cropBottom - cropTop;
+        finalHeight = cropRight - cropLeft;
+    } else if (rot90 == 3) {
+        finalWidth = cropBottom - cropTop;
+        finalHeight = cropRight - cropLeft;
+    }
+
+    const Plane yP = {width, height, yBuf, yPStride, yRStride};
+    const Plane cbP = {width / 2, height / 2, cbBuf, cbPStride, cbRStride};
+    const Plane crP = {width / 2, height / 2, crBuf, crPStride, crRStride};
+
+    auto flush = [](size_t numBytes __unused) {
+        // do nothing
+    };
+
+    // Round up to the nearest multiple of 64.
+    int y_row_length = (finalWidth + 16 + 63) & ~63;
+    int cb_row_length = (finalWidth / 2 + 16 + 63) & ~63;
+    int cr_row_length = (finalWidth / 2 + 16 + 63) & ~63;
+
+    Transform yTrans = Transform::forCropFollowedByRotation(
+            cropLeft, cropTop, cropRight, cropBottom, rot90);
+
+    Transform chromaTrans = Transform::forCropFollowedByRotation(
+            cropLeft / 2, cropTop / 2, cropRight / 2, cropBottom / 2, rot90);
+
+    RowIterator<16> yIter(yP, yTrans, y_row_length);
+    RowIterator<8> cbIter(cbP, chromaTrans, cb_row_length);
+    RowIterator<8> crIter(crP, chromaTrans, cr_row_length);
+
+    return compress(finalWidth, finalHeight, yIter, cbIter, crIter, outBuf, outBufCapacity, flush,
+            quality);
+}
+
+extern "C" {
+
+static jint CameraExtensionJpegProcessor_compressJpegFromYUV420p(
+        JNIEnv* env, jclass clazz __unused,
+        /** Input image dimensions */
+        jint width, jint height,
+        /** Y Plane */
+        jobject yBuf, jint yPStride, jint yRStride,
+        /** Cb Plane */
+        jobject cbBuf, jint cbPStride, jint cbRStride,
+        /** Cr Plane */
+        jobject crBuf, jint crPStride, jint crRStride,
+        /** Output */
+        jobject outBuf, jint outBufCapacity,
+        /** Jpeg compression parameters */
+        jint quality,
+        /** Crop */
+        jint cropLeft, jint cropTop, jint cropRight, jint cropBottom,
+        /** Rotation (multiple of 90).  For example, rot90 = 1 implies a 90 degree
+         * rotation. */
+        jint rot90) {
+    jbyte* y = (jbyte*)env->GetDirectBufferAddress(yBuf);
+    jbyte* cb = (jbyte*)env->GetDirectBufferAddress(cbBuf);
+    jbyte* cr = (jbyte*)env->GetDirectBufferAddress(crBuf);
+    jbyte* out = (jbyte*)env->GetDirectBufferAddress(outBuf);
+
+    size_t actualJpegSize = compress(width, height,
+            (unsigned char*)y, yPStride, yRStride,
+            (unsigned char*)cb, cbPStride, cbRStride,
+            (unsigned char*)cr, crPStride, crRStride,
+            (unsigned char*)out, (size_t)outBufCapacity,
+            quality, cropLeft, cropTop, cropRight, cropBottom, rot90);
+
+    size_t finalJpegSize = actualJpegSize + sizeof(CameraBlob);
+    if (finalJpegSize > outBufCapacity) {
+        ALOGE("%s: Final jpeg buffer %zu not large enough for the jpeg blob header with "\
+                "capacity %d", __FUNCTION__, finalJpegSize, outBufCapacity);
+        return actualJpegSize;
+    }
+
+    int8_t* header = static_cast<int8_t *> (out) +
+            (outBufCapacity - sizeof(CameraBlob));
+    CameraBlob *blob = reinterpret_cast<CameraBlob *> (header);
+    blob->blobId = CameraBlobId::JPEG;
+    blob->blobSize = actualJpegSize;
+
+    return actualJpegSize;
+}
+
+} // extern "C"
+
+static const JNINativeMethod gCameraExtensionJpegProcessorMethods[] = {
+    {"compressJpegFromYUV420pNative",
+    "(IILjava/nio/ByteBuffer;IILjava/nio/ByteBuffer;IILjava/nio/ByteBuffer;IILjava/nio/ByteBuffer;IIIIIII)I",
+    (void*)CameraExtensionJpegProcessor_compressJpegFromYUV420p}};
+
+// Get all the required offsets in java class and register native functions
+int register_android_hardware_camera2_impl_CameraExtensionJpegProcessor(JNIEnv* env) {
+    // Register native functions
+    return RegisterMethodsOrDie(env, CAMERA_PROCESSOR_CLASS_NAME,
+            gCameraExtensionJpegProcessorMethods, NELEM(gCameraExtensionJpegProcessorMethods));
+}
diff --git a/media/java/android/media/ImageWriter.java b/media/java/android/media/ImageWriter.java
index 92db946..89760cd 100644
--- a/media/java/android/media/ImageWriter.java
+++ b/media/java/android/media/ImageWriter.java
@@ -130,7 +130,59 @@
      */
     public static @NonNull ImageWriter newInstance(@NonNull Surface surface,
             @IntRange(from = 1) int maxImages) {
-        return new ImageWriter(surface, maxImages, ImageFormat.UNKNOWN);
+        return new ImageWriter(surface, maxImages, ImageFormat.UNKNOWN, -1 /*width*/,
+                -1 /*height*/);
+    }
+
+    /**
+     * <p>
+     * Create a new ImageWriter with given number of max Images, format and producer dimension.
+     * </p>
+     * <p>
+     * The {@code maxImages} parameter determines the maximum number of
+     * {@link Image} objects that can be be dequeued from the
+     * {@code ImageWriter} simultaneously. Requesting more buffers will use up
+     * more memory, so it is important to use only the minimum number necessary.
+     * </p>
+     * <p>
+     * The format specifies the image format of this ImageWriter. The format
+     * from the {@code surface} will be overridden with this format. For example,
+     * if the surface is obtained from a {@link android.graphics.SurfaceTexture}, the default
+     * format may be {@link PixelFormat#RGBA_8888}. If the application creates an ImageWriter
+     * with this surface and {@link ImageFormat#PRIVATE}, this ImageWriter will be able to operate
+     * with {@link ImageFormat#PRIVATE} Images.
+     * </p>
+     * <p>
+     * Note that the consumer end-point may or may not be able to support Images with different
+     * format, for such case, the application should only use this method if the consumer is able
+     * to consume such images.
+     * </p>
+     * <p> The input Image size can also be set by the client. </p>
+     *
+     * @param surface The destination Surface this writer produces Image data
+     *            into.
+     * @param maxImages The maximum number of Images the user will want to
+     *            access simultaneously for producing Image data. This should be
+     *            as small as possible to limit memory use. Once maxImages
+     *            Images are dequeued by the user, one of them has to be queued
+     *            back before a new Image can be dequeued for access via
+     *            {@link #dequeueInputImage()}.
+     * @param format The format of this ImageWriter. It can be any valid format specified by
+     *            {@link ImageFormat} or {@link PixelFormat}.
+     *
+     * @param width Input size width.
+     * @param height Input size height.
+     *
+     * @return a new ImageWriter instance.
+     *
+     * @hide
+     */
+    public static @NonNull ImageWriter newInstance(@NonNull Surface surface,
+            @IntRange(from = 1) int maxImages, @Format int format, int width, int height) {
+        if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
+            throw new IllegalArgumentException("Invalid format is specified: " + format);
+        }
+        return new ImageWriter(surface, maxImages, format, width, height);
     }
 
     /**
@@ -179,13 +231,13 @@
         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
             throw new IllegalArgumentException("Invalid format is specified: " + format);
         }
-        return new ImageWriter(surface, maxImages, format);
+        return new ImageWriter(surface, maxImages, format, -1 /*width*/, -1 /*height*/);
     }
 
     /**
      * @hide
      */
-    protected ImageWriter(Surface surface, int maxImages, int format) {
+    protected ImageWriter(Surface surface, int maxImages, int format, int width, int height) {
         if (surface == null || maxImages < 1) {
             throw new IllegalArgumentException("Illegal input argument: surface " + surface
                     + ", maxImages: " + maxImages);
@@ -195,7 +247,8 @@
 
         // Note that the underlying BufferQueue is working in synchronous mode
         // to avoid dropping any buffers.
-        mNativeContext = nativeInit(new WeakReference<>(this), surface, maxImages, format);
+        mNativeContext = nativeInit(new WeakReference<>(this), surface, maxImages, format, width,
+                height);
 
         // nativeInit internally overrides UNKNOWN format. So does surface format query after
         // nativeInit and before getEstimatedNativeAllocBytes().
@@ -899,7 +952,7 @@
 
     // Native implemented ImageWriter methods.
     private synchronized native long nativeInit(Object weakSelf, Surface surface, int maxImgs,
-            int format);
+            int format, int width, int height);
 
     private synchronized native void nativeClose(long nativeCtx);
 
diff --git a/media/jni/android_media_ImageWriter.cpp b/media/jni/android_media_ImageWriter.cpp
index 5d959a3..b291ac95b 100644
--- a/media/jni/android_media_ImageWriter.cpp
+++ b/media/jni/android_media_ImageWriter.cpp
@@ -364,7 +364,7 @@
 }
 
 static jlong ImageWriter_init(JNIEnv* env, jobject thiz, jobject weakThiz, jobject jsurface,
-        jint maxImages, jint userFormat) {
+        jint maxImages, jint userFormat, jint userWidth, jint userHeight) {
     status_t res;
 
     ALOGV("%s: maxImages:%d", __FUNCTION__, maxImages);
@@ -405,20 +405,38 @@
     // Get the dimension and format of the producer.
     sp<ANativeWindow> anw = producer;
     int32_t width, height, surfaceFormat;
-    if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
-        ALOGE("%s: Query Surface width failed: %s (%d)", __FUNCTION__, strerror(-res), res);
-        jniThrowRuntimeException(env, "Failed to query Surface width");
-        return 0;
+    if (userWidth < 0) {
+        if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
+            ALOGE("%s: Query Surface width failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+            jniThrowRuntimeException(env, "Failed to query Surface width");
+            return 0;
+        }
+    } else {
+        width = userWidth;
     }
+
     ctx->setBufferWidth(width);
 
-    if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
-        ALOGE("%s: Query Surface height failed: %s (%d)", __FUNCTION__, strerror(-res), res);
-        jniThrowRuntimeException(env, "Failed to query Surface height");
-        return 0;
+    if (userHeight < 0) {
+        if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+            ALOGE("%s: Query Surface height failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+            jniThrowRuntimeException(env, "Failed to query Surface height");
+            return 0;
+        }
+    } else {
+        height = userHeight;
     }
     ctx->setBufferHeight(height);
 
+    if ((userWidth > 0) && (userHeight > 0)) {
+        res = native_window_set_buffers_user_dimensions(anw.get(), userWidth, userHeight);
+        if (res != OK) {
+            ALOGE("%s: Set buffer dimensions failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+            jniThrowRuntimeException(env, "Set buffer dimensions failed");
+            return 0;
+        }
+    }
+
     // Query surface format if no valid user format is specified, otherwise, override surface format
     // with user format.
     if (userFormat == IMAGE_FORMAT_UNKNOWN) {
@@ -1045,7 +1063,7 @@
 
 static JNINativeMethod gImageWriterMethods[] = {
     {"nativeClassInit",         "()V",                        (void*)ImageWriter_classInit },
-    {"nativeInit",              "(Ljava/lang/Object;Landroid/view/Surface;II)J",
+    {"nativeInit",              "(Ljava/lang/Object;Landroid/view/Surface;IIII)J",
                                                               (void*)ImageWriter_init },
     {"nativeClose",              "(J)V",                      (void*)ImageWriter_close },
     {"nativeAttachAndQueueImage", "(JJIJIIIIII)I",          (void*)ImageWriter_attachAndQueueImage },