Merge "CameraService check sensor privacy if the appop is ignored" into sc-dev
diff --git a/MainlineFiles.cfg b/MainlineFiles.cfg
index f694a41..490bbbf 100644
--- a/MainlineFiles.cfg
+++ b/MainlineFiles.cfg
@@ -1,10 +1,13 @@
-# 
+#
 # mainline files for frameworks/av
+# this list used by tools/mainline_hook_*.sh to help separate
+# mainline changes vs framework changes, which release at different paces.
+#
 #
 # ignore comment (#) lines and blank lines
 # rest are path prefixes starting at root of the project
 # (so OWNERS, not frameworks/av/OWNERS)
-# 
+#
 # path
 # INCLUDE path
 # EXCLUDE path
@@ -24,4 +27,5 @@
 media/codec2/components/
 media/codecs/
 media/extractors/
-media/libstagefright/mpeg2ts
+media/libstagefright/mpeg2ts/
+media/libstagefright/flac/
diff --git a/apex/Android.bp b/apex/Android.bp
index d198be0..6c45749 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -51,7 +51,10 @@
         },
     },
     // JNI
-    native_shared_libs: ["libmediaparser-jni"],
+    native_shared_libs: [
+        "libmediaparser-jni",
+        "libmediaformatshaper",
+    ],
     compile_multilib: "both",
     prebuilts: [
         "code_coverage.policy",
@@ -74,6 +77,9 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 apex {
@@ -105,6 +111,9 @@
         "mediaswcodec",
     ],
     native_shared_libs: [
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl@1.2",
         "libstagefright_foundation",
     ],
     prebuilts: [
@@ -129,6 +138,9 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 prebuilt_etc {
diff --git a/apex/testing/Android.bp b/apex/testing/Android.bp
index 4ff4d06..8b81090 100644
--- a/apex/testing/Android.bp
+++ b/apex/testing/Android.bp
@@ -18,8 +18,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/camera/Android.bp b/camera/Android.bp
index 71c88ab..2c01496 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -81,6 +81,7 @@
     ],
 
     shared_libs: [
+        "libbase",
         "libcutils",
         "libutils",
         "liblog",
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index f9b1b37..af3c492 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -20,14 +20,16 @@
 #include <camera/CameraUtils.h>
 #include <media/hardware/HardwareAPI.h>
 
+#include <android-base/properties.h>
 #include <system/window.h>
 #include <system/graphics.h>
 
-#include <cutils/properties.h>
 #include <utils/Log.h>
 
 namespace android {
 
+const char *kCameraServiceDisabledProperty = "config.disable_cameraservice";
+
 status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
                 /*out*/int32_t* transform) {
     ALOGV("%s", __FUNCTION__);
@@ -124,9 +126,7 @@
 }
 
 bool CameraUtils::isCameraServiceDisabled() {
-    char value[PROPERTY_VALUE_MAX];
-    property_get("config.disable_cameraservice", value, "0");
-    return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
+    return base::GetBoolProperty(kCameraServiceDisabledProperty, false);
 }
 
 } /* namespace android */
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 8af704d..459ad15 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -174,6 +174,13 @@
     oneway void notifySystemEvent(int eventId, in int[] args);
 
     /**
+     * Notify the camera service of a display configuration change.
+     *
+     * Callers require the android.permission.CAMERA_SEND_SYSTEM_EVENTS permission.
+     */
+    oneway void notifyDisplayConfigurationChange();
+
+    /**
      * Notify the camera service of a device physical status change. May only be called from
      * a privileged process.
      *
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index d428b4e..bbb0289 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -35,4 +35,10 @@
      * Update the status of a camera device.
      */
     oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
+
+    /**
+     * Reports whether the top activity needs a rotate and crop override.
+     */
+    boolean isRotateAndCropOverrideNeeded(String packageName, int sensorOrientation,
+            int lensFacing);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 28a57bd..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -119,10 +119,11 @@
      * @param width Width of the input buffers
      * @param height Height of the input buffers
      * @param format Format of the input buffers. One of HAL_PIXEL_FORMAT_*.
+     * @param isMultiResolution Whether the input stream supports variable resolution image.
      *
      * @return new stream ID
      */
-    int createInputStream(int width, int height, int format);
+    int createInputStream(int width, int height, int format, boolean isMultiResolution);
 
     /**
      * Get the surface of the input stream.
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 4e9b27d..d6642f3 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -68,6 +68,14 @@
     return mPhysicalCameraId;
 }
 
+bool OutputConfiguration::isMultiResolution() const {
+    return mIsMultiResolution;
+}
+
+const std::vector<int32_t> &OutputConfiguration::getSensorPixelModesUsed() const {
+    return mSensorPixelModesUsed;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -75,7 +83,8 @@
         mWidth(0),
         mHeight(0),
         mIsDeferred(false),
-        mIsShared(false) {
+        mIsShared(false),
+        mIsMultiResolution(false) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -145,6 +154,17 @@
 
     parcel->readString16(&mPhysicalCameraId);
 
+    int isMultiResolution = 0;
+    if ((err = parcel->readInt32(&isMultiResolution)) != OK) {
+        ALOGE("%s: Failed to read surface isMultiResolution flag from parcel", __FUNCTION__);
+        return err;
+    }
+
+    std::vector<int32_t> sensorPixelModesUsed;
+    if ((err = parcel->readParcelableVector(&sensorPixelModesUsed)) != OK) {
+        ALOGE("%s: Failed to read sensor pixel mode(s) from parcel", __FUNCTION__);
+        return err;
+    }
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -152,6 +172,7 @@
     mHeight = height;
     mIsDeferred = isDeferred != 0;
     mIsShared = isShared != 0;
+    mIsMultiResolution = isMultiResolution != 0;
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
@@ -159,9 +180,11 @@
         mGbps.push_back(surface.graphicBufferProducer);
     }
 
+    mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
+
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
-          " physicalCameraId = %s", __FUNCTION__, mRotation, mSurfaceSetID,
-          mSurfaceType, String8(mPhysicalCameraId).string());
+          " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
+          mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
 
     return err;
 }
@@ -175,6 +198,7 @@
     mIsDeferred = false;
     mIsShared = isShared;
     mPhysicalCameraId = physicalId;
+    mIsMultiResolution = false;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -183,7 +207,7 @@
     int width, int height, bool isShared)
   : mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
-    mPhysicalCameraId(physicalCameraId) { }
+    mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -224,24 +248,54 @@
     err = parcel->writeString16(mPhysicalCameraId);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mIsMultiResolution ? 1 : 0);
+    if (err != OK) return err;
+
+    err = parcel->writeParcelableVector(mSensorPixelModesUsed);
+    if (err != OK) return err;
+
     return OK;
 }
 
+template <typename T>
+static bool simpleVectorsEqual(T first, T second) {
+    if (first.size() != second.size()) {
+        return false;
+    }
+
+    for (size_t i = 0; i < first.size(); i++) {
+        if (first[i] != second[i]) {
+            return false;
+        }
+    }
+    return true;
+}
+
 bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
     const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
             other.getGraphicBufferProducers();
+    return simpleVectorsEqual(otherGbps, mGbps);
+}
 
-    if (mGbps.size() != otherGbps.size()) {
-        return false;
+bool OutputConfiguration::sensorPixelModesUsedEqual(const OutputConfiguration& other) const {
+    const std::vector<int32_t>& othersensorPixelModesUsed = other.getSensorPixelModesUsed();
+    return simpleVectorsEqual(othersensorPixelModesUsed, mSensorPixelModesUsed);
+}
+
+bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
+    const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
+
+    if (mSensorPixelModesUsed.size() !=  spms.size()) {
+        return mSensorPixelModesUsed.size() < spms.size();
     }
 
-    for (size_t i = 0; i < mGbps.size(); i++) {
-        if (mGbps[i] != otherGbps[i]) {
-            return false;
+    for (size_t i = 0; i < spms.size(); i++) {
+        if (mSensorPixelModesUsed[i] != spms[i]) {
+            return mSensorPixelModesUsed[i] < spms[i];
         }
     }
 
-    return true;
+    return false;
 }
 
 bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
diff --git a/camera/camera2/SessionConfiguration.cpp b/camera/camera2/SessionConfiguration.cpp
index a431a33..7cf6087 100644
--- a/camera/camera2/SessionConfiguration.cpp
+++ b/camera/camera2/SessionConfiguration.cpp
@@ -55,6 +55,12 @@
         return err;
     }
 
+    bool inputIsMultiResolution = false;
+    if ((err = parcel->readBool(&inputIsMultiResolution)) != OK) {
+        ALOGE("%s: Failed to read input multi-resolution flag from parcel", __FUNCTION__);
+        return err;
+    }
+
     std::vector<OutputConfiguration> outputStreams;
     if ((err = parcel->readParcelableVector(&outputStreams)) != OK) {
         ALOGE("%s: Failed to read output configurations from parcel", __FUNCTION__);
@@ -65,6 +71,7 @@
     mInputWidth = inputWidth;
     mInputHeight = inputHeight;
     mInputFormat = inputFormat;
+    mInputIsMultiResolution = inputIsMultiResolution;
     for (auto& stream : outputStreams) {
         mOutputStreams.push_back(stream);
     }
@@ -90,6 +97,9 @@
     err = parcel->writeInt32(mInputFormat);
     if (err != OK) return err;
 
+    err = parcel->writeBool(mInputIsMultiResolution);
+    if (err != OK) return err;
+
     err = parcel->writeParcelableVector(mOutputStreams);
     if (err != OK) return err;
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 9398ec3..8ca8920 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -42,6 +42,7 @@
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 95c4f39..f80ed3a 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -47,6 +47,10 @@
     bool                       isDeferred() const;
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
+    bool                       isMultiResolution() const;
+
+    // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
+    const std::vector<int32_t>&            getSensorPixelModesUsed() const;
     /**
      * Keep impl up-to-date with OutputConfiguration.java in frameworks/base
      */
@@ -83,7 +87,9 @@
                 mIsDeferred == other.mIsDeferred &&
                 mIsShared == other.mIsShared &&
                 gbpsEqual(other) &&
-                mPhysicalCameraId == other.mPhysicalCameraId );
+                mPhysicalCameraId == other.mPhysicalCameraId &&
+                mIsMultiResolution == other.mIsMultiResolution &&
+                sensorPixelModesUsedEqual(other));
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -114,13 +120,22 @@
         if (mPhysicalCameraId != other.mPhysicalCameraId) {
             return mPhysicalCameraId < other.mPhysicalCameraId;
         }
+        if (mIsMultiResolution != other.mIsMultiResolution) {
+            return mIsMultiResolution < other.mIsMultiResolution;
+        }
+        if (!sensorPixelModesUsedEqual(other)) {
+            return sensorPixelModesUsedLessThan(other);
+        }
         return gbpsLessThan(other);
     }
+
     bool operator > (const OutputConfiguration& other) const {
         return (*this != other && !(*this < other));
     }
 
     bool gbpsEqual(const OutputConfiguration& other) const;
+    bool sensorPixelModesUsedEqual(const OutputConfiguration& other) const;
+    bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
     bool gbpsLessThan(const OutputConfiguration& other) const;
     void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
 private:
@@ -133,6 +148,8 @@
     bool                       mIsDeferred;
     bool                       mIsShared;
     String16                   mPhysicalCameraId;
+    bool                       mIsMultiResolution;
+    std::vector<int32_t>       mSensorPixelModesUsed;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/include/camera/camera2/SessionConfiguration.h b/camera/include/camera/camera2/SessionConfiguration.h
index 64288ed..29913f6 100644
--- a/camera/include/camera/camera2/SessionConfiguration.h
+++ b/camera/include/camera/camera2/SessionConfiguration.h
@@ -38,6 +38,7 @@
     int getInputHeight() const { return mInputHeight; }
     int getInputFormat() const { return mInputFormat; }
     int getOperatingMode() const { return mOperatingMode; }
+    bool inputIsMultiResolution() const { return mInputIsMultiResolution; }
 
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
@@ -61,7 +62,8 @@
                 mInputWidth == other.mInputWidth &&
                 mInputHeight == other.mInputHeight &&
                 mInputFormat == other.mInputFormat &&
-                mOperatingMode == other.mOperatingMode);
+                mOperatingMode == other.mOperatingMode &&
+                mInputIsMultiResolution == other.mInputIsMultiResolution);
     }
 
     bool operator != (const SessionConfiguration& other) const {
@@ -83,6 +85,10 @@
             return mInputFormat < other.mInputFormat;
         }
 
+        if (mInputIsMultiResolution != other.mInputIsMultiResolution) {
+            return mInputIsMultiResolution < other.mInputIsMultiResolution;
+        }
+
         if (mOperatingMode != other.mOperatingMode) {
             return mOperatingMode < other.mOperatingMode;
         }
@@ -104,6 +110,7 @@
 
     std::vector<OutputConfiguration> mOutputStreams;
     int                              mInputWidth, mInputHeight, mInputFormat, mOperatingMode;
+    bool                             mInputIsMultiResolution = false;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 895514e..dab2fef 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -24,6 +24,28 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+// Enum value must match corresponding enum in ui/PublicFormat.h (which is not
+// available to VNDK)
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
+};
+
 /**
  * ACameraMetadata Implementation
  */
@@ -290,6 +312,10 @@
             format = AIMAGE_FORMAT_DEPTH_POINT_CLOUD;
         } else if (format == HAL_PIXEL_FORMAT_Y16) {
             format = AIMAGE_FORMAT_DEPTH16;
+        } else if (format == HAL_PIXEL_FORMAT_RAW16) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH);
+        } else if (format == HAL_PIXEL_FORMAT_RAW10) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH10);
         }
 
         filteredDepthStreamConfigs.push_back(format);
@@ -534,6 +560,7 @@
         case ACAMERA_SENSOR_SENSITIVITY:
         case ACAMERA_SENSOR_TEST_PATTERN_DATA:
         case ACAMERA_SENSOR_TEST_PATTERN_MODE:
+        case ACAMERA_SENSOR_PIXEL_MODE:
         case ACAMERA_SHADING_MODE:
         case ACAMERA_STATISTICS_FACE_DETECT_MODE:
         case ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE:
@@ -584,6 +611,7 @@
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,
     ANDROID_SENSOR_PROFILE_TONE_CURVE,
     ANDROID_SENSOR_OPAQUE_RAW_SIZE,
+    ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
     ANDROID_SHADING_STRENGTH,
     ANDROID_STATISTICS_HISTOGRAM_MODE,
     ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 6c1cf33..2b7f040 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -61,6 +61,10 @@
  */
 typedef void (*ACameraCaptureSession_stateCallback)(void* context, ACameraCaptureSession *session);
 
+/**
+ * Capture session state callbacks used in {@link ACameraDevice_createCaptureSession} and
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters}
+ */
 typedef struct ACameraCaptureSession_stateCallbacks {
     /// optional application context.
     void*                               context;
@@ -246,6 +250,10 @@
         void* context, ACameraCaptureSession* session,
         ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
 
+/**
+ * ACaptureCaptureSession_captureCallbacks structure used in
+ * {@link ACameraCaptureSession_capture} and {@link ACameraCaptureSession_setRepeatingRequest}.
+ */
 typedef struct ACameraCaptureSession_captureCallbacks {
     /// optional application context.
     void*                                               context;
@@ -413,7 +421,10 @@
  */
 void ACameraCaptureSession_close(ACameraCaptureSession* session);
 
-struct ACameraDevice;
+/**
+ * ACameraDevice is opaque type that provides access to a camera device.
+ * A pointer can be obtained using {@link ACameraManager_openCamera} method.
+ */
 typedef struct ACameraDevice ACameraDevice;
 
 /**
@@ -591,6 +602,10 @@
 camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
         __INTRODUCED_IN(24);
 
+/**
+ * Opaque object for capture session output, use {@link ACaptureSessionOutput_create} or
+ * {@link ACaptureSessionSharedOutput_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
@@ -604,9 +619,9 @@
  *
  * <p>Native windows that get removed must not be part of any active repeating or single/burst
  * request or have any pending results. Consider updating repeating requests via
- * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * {@link ACameraCaptureSession_setRepeatingRequest} and then wait for the last frame number
  * when the sequence completes
- * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.</p>
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.</p>
  *
  * <p>Native windows that get added must not be part of any other registered ACaptureSessionOutput
  * and must be compatible. Compatible windows must have matching format, rotation and
@@ -713,7 +728,15 @@
      * Same as ACameraCaptureSession_captureCallbacks
      */
     void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}.
+     */
     ACameraCaptureSession_captureCallback_start         onCaptureStarted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
     ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
 
     /**
@@ -751,10 +774,18 @@
     ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
 
     /**
-     * Same as ACameraCaptureSession_captureCallbacks
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
      */
     ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
     ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
     ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
 } ACameraCaptureSession_logicalCamera_captureCallbacks;
 
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index f72fe8d..7be4bd3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -124,6 +124,10 @@
  */
 typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
 
+/**
+ * Applications' callbacks for camera device state changes, register with
+ * {@link ACameraManager_openCamera}.
+ */
 typedef struct ACameraDevice_StateCallbacks {
     /// optional application context.
     void*                             context;
@@ -198,6 +202,10 @@
  */
 const char* ACameraDevice_getId(const ACameraDevice* device) __INTRODUCED_IN(24);
 
+/**
+ * Capture request pre-defined template types, used in {@link ACameraDevice_createCaptureRequest}
+ * and {@link ACameraDevice_createCaptureRequest_withPhysicalIds}.
+ */
 typedef enum {
     /**
      * Create a request suitable for a camera preview window. Specifically, this
@@ -301,10 +309,12 @@
         const ACameraDevice* device, ACameraDevice_request_template templateId,
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(24);
 
-
+/**
+ * Opaque object for CaptureSessionOutput container, use
+ * {@link ACaptureSessionOutputContainer_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
 
-typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
  * Create a capture session output container.
@@ -844,7 +854,7 @@
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(29);
 
 /**
- * Check whether a particular {@ACaptureSessionOutputContainer} is supported by
+ * Check whether a particular {@link ACaptureSessionOutputContainer} is supported by
  * the camera device.
  *
  * <p>This method performs a runtime check of a given {@link
@@ -875,6 +885,7 @@
  *                                                         device.</li>
  *        <li>{@link ACAMERA_ERROR_UNSUPPORTED_OPERATION} if the query operation is not
  *                                                        supported by the camera device.</li>
+ *        </ul>
  */
 camera_status_t ACameraDevice_isSessionConfigurationSupported(
         const ACameraDevice* device,
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 9d77eb4..26db7f2 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -40,7 +40,13 @@
 
 __BEGIN_DECLS
 
+/**
+ * Camera status enum types.
+ */
 typedef enum {
+    /**
+     * Camera operation has succeeded.
+     */
     ACAMERA_OK = 0,
 
     ACAMERA_ERROR_BASE                  = -10000,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index be32b11..729182e 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -326,7 +326,7 @@
  * @see ACameraManager_registerExtendedAvailabilityCallback
  */
 typedef struct ACameraManager_ExtendedAvailabilityListener {
-    ///
+    /// Called when a camera becomes available or unavailable
     ACameraManager_AvailabilityCallbacks availabilityCallbacks;
 
     /// Called when there is camera access permission change
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 0d5e6c4..b331d50 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -256,10 +256,12 @@
 
 /**
  * Return a {@link ACameraMetadata} that references the same data as
- * {@link cameraMetadata}, which is an instance of
- * {@link android.hardware.camera2.CameraMetadata} (e.g., a
- * {@link android.hardware.camera2.CameraCharacteristics} or
- * {@link android.hardware.camera2.CaptureResult}).
+ * <a href="/reference/android/hardware/camera2/CameraMetadata">
+ *     android.hardware.camera2.CameraMetadata</a> from Java API. (e.g., a
+ * <a href="/reference/android/hardware/camera2/CameraCharacteristics">
+ *     android.hardware.camera2.CameraCharacteristics</a>
+ * or <a href="/reference/android/hardware/camera2/CaptureResult">
+ *     android.hardware.camera2.CaptureResult</a>).
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
  * after application is done using it.</p>
@@ -269,11 +271,13 @@
  * the Java metadata is garbage collected.
  *
  * @param env the JNI environment.
- * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * @param cameraMetadata the source <a href="/reference/android/hardware/camera2/CameraMetadata">
+                         android.hardware.camera2.CameraMetadata </a>from which the
  *                       returned {@link ACameraMetadata} is a view.
  *
- * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
- *         instance of {@link android.hardware.camera2.CameraMetadata}.
+ * @return a valid ACameraMetadata pointer or NULL if cameraMetadata is null or not a valid
+ *         instance of <a href="android/hardware/camera2/CameraMetadata">
+ *         android.hardware.camera2.CameraMetadata</a>.
  *
  */
 ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index c1b2712..20ffd48 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -527,6 +527,13 @@
      * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -536,7 +543,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AE_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 4,
@@ -718,6 +728,12 @@
      * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -727,7 +743,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AF_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 8,
@@ -904,6 +923,12 @@
      * the scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -913,7 +938,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AWB_REGIONS =                               // int32[5*area_count]
             ACAMERA_CONTROL_START + 12,
@@ -1840,7 +1868,7 @@
      * <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
      * BURST_CAPTURE must still be met.</li>
      * <li>All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
-     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES })
+     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES })
      * will have preview bokeh effect applied.</li>
      * </ul>
      * <p>When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
@@ -2801,6 +2829,51 @@
      */
     ACAMERA_LENS_DISTORTION =                                   // float[5]
             ACAMERA_LENS_START + 13,
+    /**
+     * <p>The correction coefficients to correct for this camera device's
+     * radial and tangential lens distortion for a
+     * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_LENS_DISTORTION, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_LENS_DISTORTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_LENS_DISTORTION_MAXIMUM_RESOLUTION =                // float[5]
+            ACAMERA_LENS_START + 14,
+    /**
+     * <p>The parameters for this camera device's intrinsic
+     * calibration when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_LENS_INTRINSIC_CALIBRATION, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION =     // float[5]
+            ACAMERA_LENS_START + 15,
     ACAMERA_LENS_END,
 
     /**
@@ -3428,6 +3501,12 @@
      * coordinate system is post-zoom, meaning that the activeArraySize or
      * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom.  See
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      *
      * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
@@ -3436,7 +3515,10 @@
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
      * @see ACAMERA_SCALER_CROPPING_TYPE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_SCALER_CROP_REGION =                                // int32[4]
             ACAMERA_SCALER_START,
@@ -3538,8 +3620,6 @@
      * set to either OFF or FAST.</p>
      * <p>When multiple streams are used in a request, the minimum frame
      * duration will be max(individual stream min durations).</p>
-     * <p>The minimum frame duration of a stream (of a particular format, size)
-     * is the same regardless of whether the stream is input or output.</p>
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
@@ -3868,6 +3948,122 @@
      */
     ACAMERA_SCALER_DEFAULT_SECURE_IMAGE_SIZE =                  // int32[2]
             ACAMERA_SCALER_START + 18,
+    /**
+     * <p>The available multi-resolution stream configurations that this
+     * physical camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This list contains a subset of the parent logical camera's multi-resolution stream
+     * configurations which belong to this physical camera, and it will advertise and will only
+     * advertise the maximum supported resolutions for a particular format.</p>
+     * <p>If this camera device isn't a physical camera device constituting a logical camera,
+     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * camera, this field represents the multi-resolution input/output stream configurations of
+     * default mode and max resolution modes. The sizes will be the maximum resolution of a
+     * particular format for default mode and max resolution mode.</p>
+     * <p>This field will only be advertised if the device is a physical camera of a
+     * logical multi-camera device or an ultra high resolution sensor camera. For a logical
+     * multi-camera, the camera API will derive the logical camera’s multi-resolution stream
+     * configurations from all physical cameras. For an ultra high resolution sensor camera, this
+     * is used directly as the camera’s multi-resolution stream configurations.</p>
+     */
+    ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)
+            ACAMERA_SCALER_START + 19,
+    /**
+     * <p>The available stream configurations that this
+     * camera device supports (i.e. format, width, height, output/input stream) for a
+     * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>Not all output formats may be supported in a configuration with
+     * an input stream of a particular format. For more details, see
+     * android.scaler.availableInputOutputFormatsMapMaximumResolution.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t)
+            ACAMERA_SCALER_START + 20,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination when the camera device is sent a CaptureRequest with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>When multiple streams are used in a request (if supported, when ACAMERA_SENSOR_PIXEL_MODE
+     * is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>), the
+     * minimum frame duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_SCALER_START + 21,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination when CaptureRequests are submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a></p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_SCALER_START + 22,
     ACAMERA_SCALER_END,
 
     /**
@@ -4654,6 +4850,67 @@
      */
     ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL =                        // int32
             ACAMERA_SENSOR_START + 29,
+    /**
+     * <p>Switches sensor pixel mode between maximum resolution mode and default mode.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_pixel_mode_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This key controls whether the camera sensor operates in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode or not. By default, all camera devices operate in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
+     * When operating in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
+     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability would typically perform pixel binning in order to improve low light
+     * performance, noise reduction etc. However, in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode (supported only
+     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+     * The stream configurations supported in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode are also different from those of
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
+     * They can be queried through
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#get">CameraCharacteristics#get</a> with
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)</a>.
+     * Unless reported by both
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>s, the outputs from
+     * <code>android.scaler.streamConfigurationMapMaximumResolution</code> and
+     * <code>android.scaler.streamConfigurationMap</code>
+     * must not be mixed in the same CaptureRequest. In other words, these outputs are
+     * exclusive to each other.
+     * This key does not need to be set for reprocess requests.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE =                                 // byte (acamera_metadata_enum_android_sensor_pixel_mode_t)
+            ACAMERA_SENSOR_START + 32,
+    /**
+     * <p>Whether <code>RAW</code> images requested have their bayer pattern as described by
+     * ACAMERA_SENSOR_INFO_BINNING_FACTOR.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_raw_binning_factor_used_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>This key will only be present in devices advertisting the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
+     * RAW targets will have a regular bayer pattern.</p>
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED =                    // byte (acamera_metadata_enum_android_sensor_raw_binning_factor_used_t)
+            ACAMERA_SENSOR_START + 33,
     ACAMERA_SENSOR_END,
 
     /**
@@ -4955,6 +5212,120 @@
      */
     ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE =      // int32[4]
             ACAMERA_SENSOR_INFO_START + 10,
+    /**
+     * <p>The area of the image sensor which corresponds to active pixels after any geometric
+     * distortion correction has been applied, when the sensor runs in maximum resolution mode.</p>
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE
+     * is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * Refer to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE for details, with sensor array related keys
+     * replaced with their
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * counterparts.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION =  // int32[4]
+            ACAMERA_SENSOR_INFO_START + 11,
+    /**
+     * <p>Dimensions of the full pixel array, possibly
+     * including black calibration pixels, when the sensor runs in maximum resolution mode.
+     * Analogous to ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE is
+     * set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The pixel count of the full pixel array of the image sensor, which covers
+     * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
+     * the raw buffers produced by this sensor, when it runs in maximum resolution mode. That
+     * is, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION =   // int32[2]
+            ACAMERA_SENSOR_INFO_START + 12,
+    /**
+     * <p>The area of the image sensor which corresponds to active pixels prior to the
+     * application of any geometric distortion correction, when the sensor runs in maximum
+     * resolution mode. This key must be used for crop / metering regions, only when
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+     * when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION = 
+                                                                // int32[4]
+            ACAMERA_SENSOR_INFO_START + 13,
+    /**
+     * <p>Dimensions of the group of pixels which are under the same color filter.
+     * This specifies the width and height (pair of integers) of the group of pixels which fall
+     * under the same color filter for ULTRA_HIGH_RESOLUTION sensors.</p>
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Sensors can have pixels grouped together under the same color filter in order
+     * to improve various aspects of imaging such as noise reduction, low light
+     * performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
+     * 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
+     * the same color filter.</p>
+     * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
+     * will have a regular bayer pattern.</p>
+     * <p>This key will not be present for sensors which don't have the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     */
+    ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
+            ACAMERA_SENSOR_INFO_START + 14,
     ACAMERA_SENSOR_INFO_END,
 
     /**
@@ -5355,7 +5726,7 @@
      * </ul></p>
      *
      * <p>Since optical image stabilization generally involves motion much faster than the duration
-     * of individualq image exposure, multiple OIS samples can be included for a single capture
+     * of individual image exposure, multiple OIS samples can be included for a single capture
      * result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating
      * at 30fps may have 6-7 OIS samples per capture result. This information can be combined
      * with the rolling shutter skew to account for lens motion during image exposure in
@@ -6160,6 +6531,162 @@
      */
     ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS =     // int64[4*n]
             ACAMERA_DEPTH_START + 8,
+    /**
+     * <p>The available depth dataspace stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream) when a CaptureRequest is submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t)
+            ACAMERA_DEPTH_START + 9,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for depth output formats when a CaptureRequest is submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 10,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for depth streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 11,
+    /**
+     * <p>The available dynamic depth dataspace stream
+     * configurations that this camera device supports (i.e. format, width, height,
+     * output/input stream) for CaptureRequests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t)
+            ACAMERA_DEPTH_START + 12,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for dynamic depth output streams  for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 13,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for dynamic depth streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 14,
     ACAMERA_DEPTH_END,
 
     /**
@@ -6380,6 +6907,71 @@
      */
     ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS =               // int64[4*n]
             ACAMERA_HEIC_START + 2,
+    /**
+     * <p>The available HEIC (ISO/IEC 23008-12) stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t)
+            ACAMERA_HEIC_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for HEIC output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_HEIC_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for HEIC streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_HEIC_START + 5,
     ACAMERA_HEIC_END,
 
 } acamera_metadata_tag_t;
@@ -8330,6 +8922,20 @@
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA             = 14,
 
+    /**
+     * <p>This camera device is capable of producing ultra high resolution images in
+     * addition to the image sizes described in the
+     * android.scaler.streamConfigurationMap.
+     * It can operate in 'default' mode and 'max resolution' mode. It generally does this
+     * by binning pixels in 'default' mode and not binning them in 'max resolution' mode.
+     * <code>android.scaler.streamConfigurationMap</code> describes the streams supported in 'default'
+     * mode.
+     * The stream configurations supported in 'max resolution' mode are described by
+     * <code>android.scaler.streamConfigurationMapMaximumResolution</code>.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+                                                                      = 16,
+
 } acamera_metadata_enum_android_request_available_capabilities_t;
 
 
@@ -8475,6 +9081,26 @@
 
 } acamera_metadata_enum_android_scaler_rotate_and_crop_t;
 
+// ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations {
+    ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t;
+
+// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution {
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
@@ -8633,6 +9259,42 @@
 
 } acamera_metadata_enum_android_sensor_test_pattern_mode_t;
 
+// ACAMERA_SENSOR_PIXEL_MODE
+typedef enum acamera_metadata_enum_acamera_sensor_pixel_mode {
+    /**
+     * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
+     * supported unless a camera device advertises
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
+
+    /**
+     * <p>This sensor pixel mode is offered by devices with capability
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
+     * In this mode, sensors typically do not bin pixels, as a result can offer larger
+     * image sizes.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION                     = 1,
+
+} acamera_metadata_enum_android_sensor_pixel_mode_t;
+
+// ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED
+typedef enum acamera_metadata_enum_acamera_sensor_raw_binning_factor_used {
+    /**
+     * <p>The <code>RAW</code> targets in this capture have ACAMERA_SENSOR_INFO_BINNING_FACTOR as the
+     * bayer pattern.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_TRUE                      = 0,
+
+    /**
+     * <p>The <code>RAW</code> targets have a regular bayer pattern in this capture.</p>
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_FALSE                     = 1,
+
+} acamera_metadata_enum_android_sensor_raw_binning_factor_used_t;
+
 
 // ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
 typedef enum acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement {
@@ -9117,6 +9779,26 @@
 
 } acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_t;
 
+// ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution {
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t;
+
+// ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution {
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t;
+
 
 // ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
 typedef enum acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type {
@@ -9168,6 +9850,16 @@
 
 } acamera_metadata_enum_android_heic_available_heic_stream_configurations_t;
 
+// ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution {
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
+
 
 
 
diff --git a/camera/ndk/include/camera/NdkCameraWindowType.h b/camera/ndk/include/camera/NdkCameraWindowType.h
index df977da..0838fba 100644
--- a/camera/ndk/include/camera/NdkCameraWindowType.h
+++ b/camera/ndk/include/camera/NdkCameraWindowType.h
@@ -50,4 +50,6 @@
 typedef ANativeWindow ACameraWindowType;
 #endif
 
+/** @} */
+
 #endif //_NDK_CAMERA_WINDOW_TYPE_H
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index a4dc374..d83c5b3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -44,10 +44,10 @@
 
 __BEGIN_DECLS
 
-// Container for output targets
+/** Container for output targets */
 typedef struct ACameraOutputTargets ACameraOutputTargets;
 
-// Container for a single output target
+/** Container for a single output target */
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
 /**
@@ -383,10 +383,10 @@
  * Set/change a camera capture control entry with unsigned 8 bits data type for
  * a physical camera backing a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_u8, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_u8, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -413,10 +413,10 @@
  * Set/change a camera capture control entry with signed 32 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i32, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i32, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -443,10 +443,10 @@
  * Set/change a camera capture control entry with float data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_float, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_float, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -473,10 +473,10 @@
  * Set/change a camera capture control entry with signed 64 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i64, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i64, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -503,10 +503,10 @@
  * Set/change a camera capture control entry with double data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_double, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_double, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -533,10 +533,10 @@
  * Set/change a camera capture control entry with rational data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_rational, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_rational, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index ef4c568..03a8dc9 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -16,6 +16,9 @@
         libstagefright_foundation libjpeg libui libgui libcutils liblog \
         libhidlbase libdatasource libaudioclient \
         android.hardware.media.omx@1.0 \
+        media_permission-aidl-cpp
+
+LOCAL_STATIC_LIBRARIES := media_permission-aidl-cpp
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -48,7 +51,8 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libdatasource libaudioclient
+        libstagefright_foundation libdatasource libaudioclient \
+        media_permission-aidl-cpp
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/camera/include \
@@ -85,7 +89,8 @@
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
         frameworks/native/include/media/openmax \
-        frameworks/native/include/media/hardware
+        frameworks/native/include/media/hardware \
+        media_permission-aidl-cpp
 
 LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
 
@@ -113,7 +118,8 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libaudioclient
+        libstagefright_foundation libaudioclient \
+        media_permission-aidl-cpp
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index 84a6d6b..c86a611 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -24,6 +24,7 @@
 
 #include <utils/String16.h>
 
+#include <android/media/permission/Identity.h>
 #include <binder/ProcessState.h>
 #include <media/mediarecorder.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -38,6 +39,8 @@
 
 using namespace android;
 
+using media::permission::Identity;
+
 static void usage(const char* name)
 {
     fprintf(stderr, "Usage: %s [-d du.ration] [-m] [-w] [-N name] [<output-file>]\n", name);
@@ -110,9 +113,10 @@
         audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
         attr.source = AUDIO_SOURCE_MIC;
 
+        // TODO b/182392769: use identity util
         source = new AudioSource(
                 &attr,
-                String16(),
+                Identity(),
                 sampleRate,
                 channels);
     } else {
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index 9d39f83..3257f71 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -146,6 +146,9 @@
                 plugin = hPlugin;
             }
         );
+    if (!hResult.isOk()) {
+        mInitCheck = DEAD_OBJECT;
+    }
     return plugin;
 }
 
@@ -179,10 +182,8 @@
         }
     }
 
-    if (mPlugin == NULL) {
-        mInitCheck = ERROR_UNSUPPORTED;
-    } else {
-        mInitCheck = OK;
+    if (mInitCheck == NO_INIT) {
+        mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
     }
 
     return mInitCheck;
@@ -342,6 +343,7 @@
 
     Return<void> hResult;
 
+    mLock.unlock();
     if (mPluginV1_2 != NULL) {
         hResult = mPluginV1_2->decrypt_1_2(secure, toHidlArray16(keyId), toHidlArray16(iv),
                 hMode, hPattern, hSubSamples, hSource, offset, hDestination,
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index f8f2bc6..253a1fa 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -16,13 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "DrmHal"
-#include <iomanip>
-
-#include <utils/Log.h>
-
-#include <android/binder_manager.h>
 
 #include <aidl/android/media/BnResourceManagerClient.h>
+#include <android/binder_manager.h>
 #include <android/hardware/drm/1.2/types.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
@@ -40,7 +36,9 @@
 #include <mediadrm/DrmSessionManager.h>
 #include <mediadrm/IDrmMetricsConsumer.h>
 #include <mediadrm/DrmUtils.h>
+#include <utils/Log.h>
 
+#include <iomanip>
 #include <vector>
 
 using drm::V1_0::KeyedVector;
@@ -319,8 +317,7 @@
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportPluginMetrics();
-    reportFrameworkMetrics();
+    reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
@@ -340,15 +337,15 @@
 }
 
 std::vector<sp<IDrmFactory>> DrmHal::makeDrmFactories() {
-    std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
+    static std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
     if (factories.size() == 0) {
         // must be in passthrough mode, load the default passthrough service
         auto passthrough = IDrmFactory::getService();
         if (passthrough != NULL) {
-            ALOGI("makeDrmFactories: using default passthrough drm instance");
+            DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
             factories.push_back(passthrough);
         } else {
-            ALOGE("Failed to find any drm factories");
+            DrmUtils::LOG2BE("Failed to find any drm factories");
         }
     }
     return factories;
@@ -364,7 +361,7 @@
     Return<void> hResult = factory->createPlugin(uuid, appPackageName.string(),
             [&](Status status, const sp<IDrmPlugin>& hPlugin) {
                 if (status != Status::OK) {
-                    ALOGE("Failed to make drm plugin");
+                    DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d", status);
                     return;
                 }
                 plugin = hPlugin;
@@ -372,7 +369,8 @@
         );
 
     if (!hResult.isOk()) {
-        ALOGE("createPlugin remote call failed");
+        DrmUtils::LOG2BE(uuid, "createPlugin remote call failed: %s",
+                         hResult.description().c_str());
     }
 
     return plugin;
@@ -566,7 +564,8 @@
     Mutex::Autolock autoLock(mLock);
 
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
-        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+        auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
+        if (hResult.isOk() && hResult) {
             auto plugin = makeDrmPlugin(mFactories[i], uuid, appPackageName);
             if (plugin != NULL) {
                 mPlugin = plugin;
@@ -579,6 +578,7 @@
     }
 
     if (mPlugin == NULL) {
+        DrmUtils::LOG2BE(uuid, "No supported hal instance found");
         mInitCheck = ERROR_UNSUPPORTED;
     } else {
         mInitCheck = OK;
@@ -1463,7 +1463,7 @@
     return hResult.isOk() ? err : DEAD_OBJECT;
 }
 
-void DrmHal::reportFrameworkMetrics() const
+std::string DrmHal::reportFrameworkMetrics(const std::string& pluginMetrics) const
 {
     mediametrics_handle_t item(mediametrics_create("mediadrm"));
     mediametrics_setUid(item, mMetrics.GetAppUid());
@@ -1492,21 +1492,26 @@
     if (!b64EncodedMetrics.empty()) {
         mediametrics_setCString(item, "serialized_metrics", b64EncodedMetrics.c_str());
     }
+    if (!pluginMetrics.empty()) {
+        mediametrics_setCString(item, "plugin_metrics", pluginMetrics.c_str());
+    }
     if (!mediametrics_selfRecord(item)) {
         ALOGE("Failed to self record framework metrics");
     }
     mediametrics_delete(item);
+    return serializedMetrics;
 }
 
-void DrmHal::reportPluginMetrics() const
+std::string DrmHal::reportPluginMetrics() const
 {
     Vector<uint8_t> metricsVector;
     String8 vendor;
     String8 description;
+    std::string metricsString;
     if (getPropertyStringInternal(String8("vendor"), vendor) == OK &&
             getPropertyStringInternal(String8("description"), description) == OK &&
             getPropertyByteArrayInternal(String8("metrics"), metricsVector) == OK) {
-        std::string metricsString = toBase64StringNoPad(metricsVector.array(),
+        metricsString = toBase64StringNoPad(metricsVector.array(),
                                                         metricsVector.size());
         status_t res = android::reportDrmPluginMetrics(metricsString, vendor,
                                                        description, mMetrics.GetAppUid());
@@ -1514,6 +1519,7 @@
             ALOGE("Metrics were retrieved but could not be reported: %d", res);
         }
     }
+    return metricsString;
 }
 
 bool DrmHal::requiresSecureDecoder(const char *mime) const {
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index 82eadd9..ed3848d 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -27,6 +27,8 @@
 #include <android/hardware/drm/1.2/IDrmFactory.h>
 #include <android/hardware/drm/1.3/ICryptoFactory.h>
 #include <android/hardware/drm/1.3/IDrmFactory.h>
+#include <android/hardware/drm/1.4/ICryptoFactory.h>
+#include <android/hardware/drm/1.4/IDrmFactory.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/HidlSupport.h>
 
@@ -41,6 +43,9 @@
 #include <mediadrm/ICrypto.h>
 #include <mediadrm/IDrm.h>
 
+#include <map>
+#include <string>
+
 using HServiceManager = ::android::hidl::manager::V1_2::IServiceManager;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_string;
@@ -64,19 +69,19 @@
     return obj;
 }
 
-template <typename Hal, typename V>
-void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+template <typename Hal, typename V, typename M>
+void MakeHidlFactories(const uint8_t uuid[16], V &factories, M& instances) {
     sp<HServiceManager> serviceManager = HServiceManager::getService();
     if (serviceManager == nullptr) {
-        ALOGE("Failed to get service manager");
-        exit(-1);
+        LOG2BE("Failed to get service manager");
+        return;
     }
 
     serviceManager->listManifestByInterface(Hal::descriptor, [&](const hidl_vec<hidl_string> &registered) {
         for (const auto &instance : registered) {
             auto factory = Hal::getService(instance);
             if (factory != nullptr) {
-                ALOGI("found %s %s", Hal::descriptor, instance.c_str());
+                instances[instance.c_str()] = Hal::descriptor;
                 if (!uuid || factory->isCryptoSchemeSupported(uuid)) {
                     factories.push_back(factory);
                 }
@@ -85,6 +90,12 @@
     });
 }
 
+template <typename Hal, typename V>
+void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+    std::map<std::string, std::string> instances;
+    MakeHidlFactories<Hal>(uuid, factories, instances);
+}
+
 hidl_vec<uint8_t> toHidlVec(const void *ptr, size_t size) {
     hidl_vec<uint8_t> vec(size);
     if (ptr != nullptr) {
@@ -106,6 +117,7 @@
     factory->createPlugin(toHidlArray16(uuid), hidl_string(appPackageName),
                           [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
                               if (status != ::V1_0::Status::OK) {
+                                  LOG2BE(uuid, "MakeDrmPlugin failed: %d", status);
                                   return;
                               }
                               plugin = hPlugin;
@@ -120,6 +132,7 @@
     factory->createPlugin(toHidlArray16(uuid), toHidlVec(initData, initDataSize),
                           [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
                               if (status != ::V1_0::Status::OK) {
+                                  LOG2BE(uuid, "MakeCryptoPlugin failed: %d", status);
                                   return;
                               }
                               plugin = hPlugin;
@@ -143,10 +156,15 @@
 
 std::vector<sp<::V1_0::IDrmFactory>> MakeDrmFactories(const uint8_t uuid[16]) {
     std::vector<sp<::V1_0::IDrmFactory>> drmFactories;
-    MakeHidlFactories<::V1_0::IDrmFactory>(uuid, drmFactories);
-    MakeHidlFactories<::V1_1::IDrmFactory>(uuid, drmFactories);
-    MakeHidlFactories<::V1_2::IDrmFactory>(uuid, drmFactories);
-    MakeHidlFactories<::V1_3::IDrmFactory>(uuid, drmFactories);
+    std::map<std::string, std::string> instances;
+    MakeHidlFactories<::V1_0::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_1::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_2::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_3::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_4::IDrmFactory>(uuid, drmFactories, instances);
+    for (auto const& entry : instances) {
+        LOG2BI("found instance=%s version=%s", entry.first.c_str(), entry.second.c_str());
+    }
     return drmFactories;
 }
 
@@ -165,6 +183,7 @@
     MakeHidlFactories<::V1_1::ICryptoFactory>(uuid, cryptoFactories);
     MakeHidlFactories<::V1_2::ICryptoFactory>(uuid, cryptoFactories);
     MakeHidlFactories<::V1_3::ICryptoFactory>(uuid, cryptoFactories);
+    MakeHidlFactories<::V1_4::ICryptoFactory>(uuid, cryptoFactories);
     return cryptoFactories;
 }
 
@@ -249,6 +268,8 @@
         return ERROR_DRM_PROVISIONING_CONFIG;
     case ::V1_4::Status::PROVISIONING_PARSE_ERROR:
         return ERROR_DRM_PROVISIONING_PARSE;
+    case ::V1_4::Status::PROVISIONING_REQUEST_REJECTED:
+        return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
     case ::V1_4::Status::RETRYABLE_PROVISIONING_ERROR:
         return ERROR_DRM_PROVISIONING_RETRY;
     case ::V1_4::Status::SECURE_STOP_RELEASE_ERROR:
@@ -265,5 +286,65 @@
     return ERROR_DRM_UNKNOWN;
 }
 
+namespace {
+char logPriorityToChar(::V1_4::LogPriority priority) {
+    char p = 'U';
+    switch (priority) {
+        case ::V1_4::LogPriority::VERBOSE:  p = 'V'; break;
+        case ::V1_4::LogPriority::DEBUG:    p = 'D'; break;
+        case ::V1_4::LogPriority::INFO:     p = 'I'; break;
+        case ::V1_4::LogPriority::WARN:     p = 'W'; break;
+        case ::V1_4::LogPriority::ERROR:    p = 'E'; break;
+        case ::V1_4::LogPriority::FATAL:    p = 'F'; break;
+        default: p = 'U'; break;
+    }
+    return p;
+}
+}  // namespace
+
+std::string GetExceptionMessage(status_t err, const char *msg,
+                                const Vector<::V1_4::LogMessage> &logs) {
+    String8 msg8;
+    if (msg) {
+        msg8 += msg;
+        msg8 += ": ";
+    }
+    auto errStr = StrCryptoError(err);
+    msg8 += errStr.c_str();
+
+    for (auto log : logs) {
+        time_t seconds = log.timeMs / 1000;
+        int ms = log.timeMs % 1000;
+        char buf[64] = {0};
+        std::string timeStr = "00-00 00:00:00";
+        if (strftime(buf, sizeof buf, "%m-%d %H:%M:%S", std::localtime(&seconds))) {
+            timeStr = buf;
+        }
+
+        char p = logPriorityToChar(log.priority);
+        msg8 += String8::format("\n%s.%03d %c %s", timeStr.c_str(), ms, p, log.message.c_str());
+    }
+
+    return msg8.c_str();
+}
+
+void LogBuffer::addLog(const ::V1_4::LogMessage &log) {
+    std::unique_lock<std::mutex> lock(mMutex);
+    mBuffer.push_back(log);
+    while (mBuffer.size() > MAX_CAPACITY) {
+        mBuffer.pop_front();
+    }
+}
+
+Vector<::V1_4::LogMessage> LogBuffer::getLogs() {
+    std::unique_lock<std::mutex> lock(mMutex);
+    Vector<::V1_4::LogMessage> logs;
+    for (auto log : mBuffer) {
+        logs.push_back(log);
+    }
+    return logs;
+}
+
+LogBuffer gLogBuf;
 }  // namespace DrmUtils
 }  // namespace android
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index a0aac30..c5206fa 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -241,8 +241,8 @@
 
     void writeByteArray(Parcel &obj, const hidl_vec<uint8_t>& array);
 
-    void reportPluginMetrics() const;
-    void reportFrameworkMetrics() const;
+    std::string reportPluginMetrics() const;
+    std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
     status_t getPropertyStringInternal(String8 const &name, String8 &value) const;
     status_t getPropertyByteArrayInternal(String8 const &name,
                                           Vector<uint8_t> &value) const;
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 4a169ee..ec0b878 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -27,7 +27,16 @@
 #include <utils/String8.h>
 #include <utils/StrongPointer.h>
 #include <utils/Vector.h>
+#include <algorithm>
+#include <chrono>
+#include <cstddef>
+#include <cstdint>
 #include <ctime>
+#include <deque>
+#include <endian.h>
+#include <iterator>
+#include <mutex>
+#include <string>
 #include <vector>
 
 
@@ -42,6 +51,61 @@
 
 namespace DrmUtils {
 
+// Log APIs
+class LogBuffer {
+  public:
+    static const int MAX_CAPACITY = 100;
+    void addLog(const ::V1_4::LogMessage &log);
+    Vector<::V1_4::LogMessage> getLogs();
+
+  private:
+    std::deque<::V1_4::LogMessage> mBuffer;
+    std::mutex mMutex;
+};
+
+extern LogBuffer gLogBuf;
+
+static inline int formatBuffer(char *buf, size_t size, const char *msg) {
+    return snprintf(buf, size, "%s", msg);
+}
+
+template <typename First, typename... Args>
+static inline int formatBuffer(char *buf, size_t size, const char *fmt, First first, Args... args) {
+    return snprintf(buf, size, fmt, first, args...);
+}
+
+template <typename... Args>
+void LogToBuffer(android_LogPriority level, const char *fmt, Args... args) {
+    const int LOG_BUF_SIZE = 256;
+    char buf[LOG_BUF_SIZE];
+    int len = formatBuffer(buf, LOG_BUF_SIZE, fmt, args...);
+    if (len <= 0) {
+        return;
+    }
+    __android_log_write(level, LOG_TAG, buf);
+    if (level >= ANDROID_LOG_INFO) {
+        int64_t epochTimeMs =
+                std::chrono::system_clock::now().time_since_epoch() / std::chrono::milliseconds(1);
+        gLogBuf.addLog({epochTimeMs, static_cast<::V1_4::LogPriority>(level), buf});
+    }
+}
+
+template <typename... Args>
+void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
+    const uint64_t* uuid2 = reinterpret_cast<const uint64_t*>(uuid);
+    std::string uuidFmt("uuid=[%lx %lx] ");
+    uuidFmt += fmt;
+    LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
+}
+
+#ifndef LOG2BE
+#define LOG2BE(...) LogToBuffer(ANDROID_LOG_ERROR, __VA_ARGS__)
+#define LOG2BW(...) LogToBuffer(ANDROID_LOG_WARN, __VA_ARGS__)
+#define LOG2BI(...) LogToBuffer(ANDROID_LOG_INFO, __VA_ARGS__)
+#define LOG2BD(...) LogToBuffer(ANDROID_LOG_DEBUG, __VA_ARGS__)
+#define LOG2BV(...) LogToBuffer(ANDROID_LOG_VERBOSE, __VA_ARGS__)
+#endif
+
 bool UseDrmService();
 
 sp<IDrm> MakeDrm(status_t *pstatus = nullptr);
@@ -119,74 +183,54 @@
 template<typename T, typename U>
 status_t GetLogMessages(const sp<U> &obj, Vector<::V1_4::LogMessage> &logs) {
     sp<T> plugin = T::castFrom(obj);
-    if (plugin == NULL) {
-        return ERROR_UNSUPPORTED;
+    if (obj == NULL) {
+        LOG2BW("%s obj is null", U::descriptor);
+    } else if (plugin == NULL) {
+        LOG2BW("Cannot cast %s obj to %s plugin", U::descriptor, T::descriptor);
     }
 
     ::V1_4::Status err{};
+    std::vector<::V1_4::LogMessage> pluginLogs;
     ::V1_4::IDrmPlugin::getLogMessages_cb cb = [&](
             ::V1_4::Status status,
             hidl_vec<::V1_4::LogMessage> hLogs) {
-        if (::V1_4::Status::OK == status) {
+        if (::V1_4::Status::OK != status) {
             err = status;
             return;
         }
-        logs.appendArray(hLogs.data(), hLogs.size());
+        pluginLogs.assign(hLogs.data(), hLogs.data() + hLogs.size());
     };
 
-    Return<void> hResult = plugin->getLogMessages(cb);
-    if (!hResult.isOk()) {
-        return DEAD_OBJECT;
+    Return<void> hResult;
+    if (plugin != NULL) {
+        hResult = plugin->getLogMessages(cb);
     }
-    return toStatusT(err);
+    if (!hResult.isOk()) {
+        LOG2BW("%s::getLogMessages remote call failed %s",
+               T::descriptor, hResult.description().c_str());
+    }
+
+    auto allLogs(gLogBuf.getLogs());
+    LOG2BD("framework logs size %zu; plugin logs size %zu",
+           allLogs.size(), pluginLogs.size());
+    std::copy(pluginLogs.begin(), pluginLogs.end(), std::back_inserter(allLogs));
+    std::sort(allLogs.begin(), allLogs.end(),
+              [](const ::V1_4::LogMessage &a, const ::V1_4::LogMessage &b) {
+                  return a.timeMs < b.timeMs;
+              });
+
+    logs.appendVector(allLogs);
+    return OK;
 }
 
-namespace {
-static inline char logPriorityToChar(::V1_4::LogPriority priority) {
-    char p = 'U';
-    switch (priority) {
-        case ::V1_4::LogPriority::VERBOSE:  p = 'V'; break;
-        case ::V1_4::LogPriority::DEBUG:    p = 'D'; break;
-        case ::V1_4::LogPriority::INFO:     p = 'I'; break;
-        case ::V1_4::LogPriority::WARN:     p = 'W'; break;
-        case ::V1_4::LogPriority::ERROR:    p = 'E'; break;
-        case ::V1_4::LogPriority::FATAL:    p = 'F'; break;
-        default: p = 'U'; break;
-    }
-    return p;
-}
-}
+std::string GetExceptionMessage(status_t err, const char *msg,
+                                const Vector<::V1_4::LogMessage> &logs);
 
 template<typename T>
 std::string GetExceptionMessage(status_t err, const char *msg, const sp<T> &iface) {
-    String8 msg8;
-    if (msg) {
-        msg8 += msg;
-        msg8 += ": ";
-    }
-    auto errStr = StrCryptoError(err);
-    msg8 += errStr.c_str();
-
     Vector<::V1_4::LogMessage> logs;
-    if (iface->getLogMessages(logs) != NO_ERROR) {
-        return msg8.c_str();
-    }
-
-    for (auto log: logs) {
-        time_t seconds = log.timeMs / 1000;
-        int ms = log.timeMs % 1000;
-        char buf[64] = {0};
-        std::string timeStr = "00-00 00:00:00";
-        if (strftime(buf, sizeof buf, "%m-%d %H:%M:%S", std::localtime(&seconds))) {
-            timeStr = buf;
-        }
-
-        char p = logPriorityToChar(log.priority);
-        msg8 += String8::format("\n%s.%03d %c %s",
-                timeStr.c_str(), ms, p, log.message.c_str());
-    }
-
-    return msg8.c_str();
+    iface->getLogMessages(logs);
+    return GetExceptionMessage(err, msg, logs);
 }
 
 } // namespace DrmUtils
diff --git a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
index a537e63..7c6d86c 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
+++ b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
@@ -22,7 +22,6 @@
 #include <openssl/aes.h>
 #include <utils/KeyedVector.h>
 #include <utils/Mutex.h>
-#include <utils/RefBase.h>
 
 namespace android {
 struct ABuffer;
@@ -30,7 +29,7 @@
 namespace clearkeycas {
 class KeyFetcher;
 
-class ClearKeyCasSession : public RefBase {
+class ClearKeyCasSession {
 public:
     explicit ClearKeyCasSession(CasPlugin *plugin);
 
diff --git a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
index 6ac3510..089eb1c 100644
--- a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
@@ -207,6 +207,7 @@
     }
 
     infoMap.clear();
+    android::Mutex::Autolock lock(mPlayPolicyLock);
     for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
         infoMap.add(mPlayPolicy.keyAt(i), mPlayPolicy.valueAt(i));
     }
diff --git a/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp
index 53ffae4..a2d506d 100644
--- a/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp
@@ -61,7 +61,7 @@
     // all the base64 encoded keys. Each key is also stored separately as
     // a JSON object in mJsonObjects[1..n] where n is the total
     // number of keys in the set.
-    if (!isJsonWebKeySet(mJsonObjects[0])) {
+    if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
         return false;
     }
 
diff --git a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
index aa9b59d..95f15ca 100644
--- a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
@@ -262,7 +262,7 @@
     void initProperties();
     void setPlayPolicy();
 
-    android::Mutex mPlayPolicyLock;
+    mutable android::Mutex mPlayPolicyLock;
     android::KeyedVector<String8, String8> mPlayPolicy;
     android::KeyedVector<String8, String8> mStringProperties;
     android::KeyedVector<String8, Vector<uint8_t>> mByteArrayProperties;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index e6e1f80..c49d5fe 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -50,7 +50,7 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
 
     shared_libs: [
         "android.hardware.drm@1.0",
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index d278633..302dd39 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -37,6 +37,8 @@
     sp<IMemory> hidlMemory = mapMemory(base);
     ALOGE_IF(hidlMemory == nullptr, "mapMemory returns nullptr");
 
+    std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
+
     // allow mapMemory to return nullptr
     mSharedBufferMap[bufferId] = hidlMemory;
     return Void();
@@ -94,6 +96,7 @@
         return Void();
     }
 
+    std::unique_lock<std::mutex> shared_buffer_lock(mSharedBufferLock);
     if (mSharedBufferMap.find(source.bufferId) == mSharedBufferMap.end()) {
       _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
                "source decrypt buffer base not set");
@@ -142,12 +145,17 @@
 
     base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
 
-    if (destBuffer.offset + destBuffer.size > destBase->getSize()) {
+    totalSize = 0;
+    if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
+        totalSize > destBase->getSize()) {
+        android_errorWriteLog(0x534e4554, "176444622");
         _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
         return Void();
     }
-    destPtr = static_cast<void *>(base + destination.nonsecureMemory.offset);
+    destPtr = static_cast<void*>(base + destination.nonsecureMemory.offset);
 
+    // release mSharedBufferLock
+    shared_buffer_lock.unlock();
 
     // Calculate the output buffer size and determine if any subsamples are
     // encrypted.
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index a77759e..6f69110 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -220,6 +220,7 @@
         if (requestString.find(kOfflineLicense) != std::string::npos) {
             std::string emptyResponse;
             std::string keySetIdString(keySetId.begin(), keySetId.end());
+            Mutex::Autolock lock(mFileHandleLock);
             if (!mFileHandle.StoreLicense(keySetIdString,
                     DeviceFiles::kLicenseStateReleasing,
                     emptyResponse)) {
@@ -335,6 +336,7 @@
         }
         *keySetId = kKeySetIdPrefix + ByteArrayToHexString(
                 reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
+        Mutex::Autolock lock(mFileHandleLock);
         if (mFileHandle.LicenseExists(*keySetId)) {
             // collision, regenerate
             ALOGV("Retry generating KeySetId");
@@ -392,6 +394,7 @@
     if (status == Status::OK) {
         if (isOfflineLicense) {
             if (isRelease) {
+                Mutex::Autolock lock(mFileHandleLock);
                 mFileHandle.DeleteLicense(keySetId);
                 mSessionLibrary->destroySession(session);
             } else {
@@ -400,6 +403,7 @@
                     return Void();
                 }
 
+                Mutex::Autolock lock(mFileHandleLock);
                 bool ok = mFileHandle.StoreLicense(
                         keySetId,
                         DeviceFiles::kLicenseStateActive,
@@ -454,6 +458,7 @@
         DeviceFiles::LicenseState licenseState;
         std::string offlineLicense;
         Status status = Status::OK;
+        Mutex::Autolock lock(mFileHandleLock);
         if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
                 &licenseState, &offlineLicense)) {
             ALOGE("Failed to restore offline license");
@@ -576,7 +581,6 @@
 Return<void> DrmPlugin::queryKeyStatus(
         const hidl_vec<uint8_t>& sessionId,
         queryKeyStatus_cb _hidl_cb) {
-
     if (sessionId.size() == 0) {
         // Returns empty key status KeyValue pair
         _hidl_cb(Status::BAD_VALUE, hidl_vec<KeyValue>());
@@ -586,12 +590,14 @@
     std::vector<KeyValue> infoMapVec;
     infoMapVec.clear();
 
+    mPlayPolicyLock.lock();
     KeyValue keyValuePair;
     for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
         keyValuePair.key = mPlayPolicy[i].key;
         keyValuePair.value = mPlayPolicy[i].value;
         infoMapVec.push_back(keyValuePair);
     }
+    mPlayPolicyLock.unlock();
     _hidl_cb(Status::OK, toHidlVec(infoMapVec));
     return Void();
 }
@@ -704,6 +710,8 @@
 }
 
 Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
+    Mutex::Autolock lock(mFileHandleLock);
+
     std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
     std::vector<KeySetId> keySetIds;
     if (mMockError != Status_V1_2::OK) {
@@ -724,6 +732,7 @@
         return toStatus_1_0(mMockError);
     }
     std::string licenseName(keySetId.begin(), keySetId.end());
+    Mutex::Autolock lock(mFileHandleLock);
     if (mFileHandle.DeleteLicense(licenseName)) {
         return Status::OK;
     }
@@ -732,6 +741,8 @@
 
 Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
         getOfflineLicenseState_cb _hidl_cb) {
+    Mutex::Autolock lock(mFileHandleLock);
+
     std::string licenseName(keySetId.begin(), keySetId.end());
     DeviceFiles::LicenseState state;
     std::string license;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
index d93777d..99668a7 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
@@ -65,7 +65,7 @@
     // all the base64 encoded keys. Each key is also stored separately as
     // a JSON object in mJsonObjects[1..n] where n is the total
     // number of keys in the set.
-    if (!isJsonWebKeySet(mJsonObjects[0])) {
+    if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
         return false;
     }
 
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index 051a968..32cf2dc 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -24,11 +24,13 @@
 }
 
 bool MemoryFileSystem::FileExists(const std::string& fileName) const {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     return result != mMemoryFileSystem.end();
 }
 
 ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         return static_cast<ssize_t>(result->second.getFileSize());
@@ -40,6 +42,7 @@
 
 std::vector<std::string> MemoryFileSystem::ListFiles() const {
     std::vector<std::string> list;
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     for (const auto& filename : mMemoryFileSystem) {
         list.push_back(filename.first);
     }
@@ -48,6 +51,7 @@
 
 size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
     std::string key = GetFileName(path);
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         std::string serializedHashFile = result->second.getContent();
@@ -61,6 +65,7 @@
 
 size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
     std::string key = GetFileName(path);
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(key);
@@ -70,6 +75,7 @@
 }
 
 bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(result);
@@ -81,6 +87,7 @@
 }
 
 bool MemoryFileSystem::RemoveAllFiles() {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     mMemoryFileSystem.clear();
     return mMemoryFileSystem.empty();
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
index 8680f0c..23a64fa 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
@@ -20,6 +20,8 @@
 #include <android/hardware/drm/1.2/ICryptoPlugin.h>
 #include <android/hidl/memory/1.0/IMemory.h>
 
+#include <mutex>
+
 #include "ClearKeyTypes.h"
 #include "Session.h"
 #include "Utils.h"
@@ -93,7 +95,7 @@
             const SharedBuffer& source,
             uint64_t offset,
             const DestinationBuffer& destination,
-            decrypt_1_2_cb _hidl_cb);
+            decrypt_1_2_cb _hidl_cb) NO_THREAD_SAFETY_ANALYSIS; // use unique_lock
 
     Return<void> setSharedBufferBase(const hidl_memory& base,
             uint32_t bufferId);
@@ -105,7 +107,8 @@
 private:
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
 
-    std::map<uint32_t, sp<IMemory> > mSharedBufferMap;
+    std::mutex mSharedBufferLock;
+    std::map<uint32_t, sp<IMemory>> mSharedBufferMap GUARDED_BY(mSharedBufferLock);
     sp<Session> mSession;
     Status mInitStatus;
 };
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 076beb8..894985b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -416,7 +416,8 @@
         mMockError = Status_V1_2::OK;
     }
 
-    DeviceFiles mFileHandle;
+    DeviceFiles mFileHandle GUARDED_BY(mFileHandleLock);
+    Mutex mFileHandleLock;
     Mutex mSecureStopLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
index bcd9fd6..6ac0e2c 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
@@ -5,7 +5,9 @@
 #ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
 #define CLEARKEY_MEMORY_FILE_SYSTEM_H_
 
+#include <android-base/thread_annotations.h>
 #include <map>
+#include <mutex>
 #include <string>
 
 #include "ClearKeyTypes.h"
@@ -49,10 +51,12 @@
     size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
 
  private:
+    mutable std::mutex mMemoryFileSystemLock;
+
     // License file name is made up of a unique keySetId, therefore,
     // the filename can be used as the key to locate licenses in the
     // memory file system.
-    std::map<std::string, MemoryFile> mMemoryFileSystem;
+    std::map<std::string, MemoryFile> mMemoryFileSystem GUARDED_BY(mMemoryFileSystemLock);
 
     std::string GetFileName(const std::string& path);
 
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 9cabd8b..200e92d 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -182,6 +182,7 @@
                 // This is set by AudioTrack.setBufferSizeInFrames().
                 // A write will not fill the buffer above this limit.
     volatile    uint32_t   mBufferSizeInFrames;  // effective size of the buffer
+    volatile    uint32_t   mStartThresholdInFrames; // min frames in buffer to start streaming
 
 public:
 
@@ -216,6 +217,8 @@
     };
 
     size_t frameCount() const { return mFrameCount; }
+    uint32_t getStartThresholdInFrames() const;
+    uint32_t setStartThresholdInFrames(uint32_t startThresholdInFrames);
 
 protected:
     // These refer to shared memory, and are virtual addresses with respect to the current process.
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index ca3c81c..be25ffb 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_binary {
     name: "audioserver",
 
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 3e6b0ff..332696d 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -55,6 +55,8 @@
 namespace android {
 
 constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+constexpr size_t kDefaultOutputPortDelay = 2;
+constexpr size_t kMaxOutputPortDelay = 16;
 
 class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -73,7 +75,9 @@
 
         addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
-                .withConstValue(new C2PortActualDelayTuning::output(2u))
+                .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputPortDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputPortDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
@@ -263,6 +267,7 @@
       mAACDecoder(nullptr),
       mStreamInfo(nullptr),
       mSignalledError(false),
+      mOutputPortDelay(kDefaultOutputPortDelay),
       mOutputDelayRingBuffer(nullptr) {
 }
 
@@ -915,6 +920,29 @@
 
     int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
 
+    size_t numSamplesInOutput = mStreamInfo->frameSize * mStreamInfo->numChannels;
+    if (numSamplesInOutput > 0) {
+        size_t actualOutputPortDelay = (outputDelay + numSamplesInOutput - 1) / numSamplesInOutput;
+        if (actualOutputPortDelay > mOutputPortDelay) {
+            mOutputPortDelay = actualOutputPortDelay;
+            ALOGV("New Output port delay %zu ", mOutputPortDelay);
+
+            C2PortActualDelayTuning::output outputPortDelay(mOutputPortDelay);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err =
+                mIntf->config({&outputPortDelay}, C2_MAY_BLOCK, &failures);
+            if (err == OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(outputPortDelay));
+            } else {
+                ALOGE("Cannot set output delay");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
     mBuffersInfo.push_back(std::move(inInfo));
     work->workletsProcessed = 0u;
     if (!eos && mOutputDelayCompensated < outputDelay) {
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index 965c29e..986187c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -57,6 +57,7 @@
     size_t mInputBufferCount;
     size_t mOutputBufferCount;
     bool mSignalledError;
+    size_t mOutputPortDelay;
     struct Info {
         uint64_t frameIndex;
         size_t bufferSize;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index ea76cbb..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
         return UNKNOWN_ERROR;
     }
 
-    if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
-        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+    if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+        int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
             ALOGE("Failed to set AAC encoder parameters");
             return UNKNOWN_ERROR;
         }
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 0207311..e8287f9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -26,7 +26,6 @@
 #include <SimpleC2Interface.h>
 
 #include "C2SoftAvcDec.h"
-#include "ih264d.h"
 
 namespace android {
 
@@ -391,12 +390,14 @@
     }
 
     while (true) {
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
 
-        setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (0 == s_decode_op.u4_output_present) {
+        setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+        (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+        if (0 == ps_decode_op->u4_output_present) {
             resetPlugin();
             break;
         }
@@ -411,8 +412,8 @@
 }
 
 status_t C2SoftAvcDec::createDecoder() {
-    ivdext_create_ip_t s_create_ip;
-    ivdext_create_op_t s_create_op;
+    ivdext_create_ip_t s_create_ip = {};
+    ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -438,8 +439,8 @@
 }
 
 status_t C2SoftAvcDec::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
 
     s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
     s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -458,22 +459,26 @@
 }
 
 status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
-    ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
-    ivd_ctl_set_config_op_t s_set_dyn_params_op;
+    ih264d_ctl_set_config_ip_t s_h264d_set_dyn_params_ip = {};
+    ih264d_ctl_set_config_op_t s_h264d_set_dyn_params_op = {};
+    ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+        &s_h264d_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+    ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+        &s_h264d_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
 
-    s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
-    s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-    s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
-    s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+    ps_set_dyn_params_ip->u4_size = sizeof(ih264d_ctl_set_config_ip_t);
+    ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+    ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+    ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+    ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+    ps_set_dyn_params_op->u4_size = sizeof(ih264d_ctl_set_config_op_t);
     IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
-                                                     &s_set_dyn_params_ip,
-                                                     &s_set_dyn_params_op);
+                                                     &s_h264d_set_dyn_params_ip,
+                                                     &s_h264d_set_dyn_params_op);
     if (status != IV_SUCCESS) {
-        ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+        ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
         return UNKNOWN_ERROR;
     }
 
@@ -481,8 +486,8 @@
 }
 
 void C2SoftAvcDec::getVersion() {
-    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
-    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
     UWORD8 au1_buf[512];
 
     s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -538,7 +543,7 @@
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
     }
 
-    ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_decode_ip->u4_size = sizeof(ih264d_video_decode_ip_t);
     ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
     if (inBuffer) {
         ps_decode_ip->u4_ts = tsMarker;
@@ -567,14 +572,14 @@
         ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
     ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+    ps_decode_op->u4_size = sizeof(ih264d_video_decode_op_t);
 
     return true;
 }
 
 bool C2SoftAvcDec::getVuiParams() {
-    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
-    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
 
     s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
     s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -622,8 +627,8 @@
 }
 
 status_t C2SoftAvcDec::setFlushMode() {
-    ivd_ctl_flush_ip_t s_set_flush_ip;
-    ivd_ctl_flush_op_t s_set_flush_op;
+    ivd_ctl_flush_ip_t s_set_flush_ip = {};
+    ivd_ctl_flush_op_t s_set_flush_op = {};
 
     s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
     s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -641,8 +646,8 @@
 }
 
 status_t C2SoftAvcDec::resetDecoder() {
-    ivd_ctl_reset_ip_t s_reset_ip;
-    ivd_ctl_reset_op_t s_reset_op;
+    ivd_ctl_reset_ip_t s_reset_ip = {};
+    ivd_ctl_reset_op_t s_reset_op = {};
 
     s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
     s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -671,8 +676,8 @@
 
 status_t C2SoftAvcDec::deleteDecoder() {
     if (mDecHandle) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
+        ivdext_delete_ip_t s_delete_ip = {};
+        ivdext_delete_op_t s_delete_op = {};
 
         s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
         s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -837,8 +842,10 @@
             return;
         }
 
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
         {
             C2GraphicView wView = mOutBlock->map().get();
             if (wView.error()) {
@@ -846,7 +853,7 @@
                 work->result = wView.error();
                 return;
             }
-            if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+            if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
                                inOffset + inPos, inSize - inPos, workIndex)) {
                 mSignalledError = true;
                 work->workletsProcessed = 1u;
@@ -862,26 +869,27 @@
             WORD32 delay;
             GETTIME(&mTimeStart, nullptr);
             TIME_DIFF(mTimeEnd, mTimeStart, delay);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+            (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
             WORD32 decodeTime;
             GETTIME(&mTimeEnd, nullptr);
             TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
             ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
-                  s_decode_op.u4_num_bytes_consumed);
+                  ps_decode_op->u4_num_bytes_consumed);
         }
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
+                (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -890,16 +898,16 @@
 
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
-            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         }
-        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
-            mOutputDelay = s_decode_op.i4_reorder_depth;
+        if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+            mOutputDelay = ps_decode_op->i4_reorder_depth;
             ALOGV("New Output delay %d ", mOutputDelay);
 
             C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -917,16 +925,16 @@
                 return;
             }
         }
-        if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+        if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                mStride = ALIGN32(s_decode_op.u4_pic_wd);
+                mStride = ALIGN32(ps_decode_op->u4_pic_wd);
                 setParams(mStride, IVD_DECODE_FRAME);
             }
-            if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
-                mWidth = s_decode_op.u4_pic_wd;
-                mHeight = s_decode_op.u4_pic_ht;
-                CHECK_EQ(0u, s_decode_op.u4_output_present);
+            if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
+                mWidth = ps_decode_op->u4_pic_wd;
+                mHeight = ps_decode_op->u4_pic_ht;
+                CHECK_EQ(0u, ps_decode_op->u4_output_present);
 
                 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -945,11 +953,11 @@
             }
         }
         (void)getVuiParams();
-        hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         }
-        inPos += s_decode_op.u4_num_bytes_consumed;
+        inPos += ps_decode_op->u4_num_bytes_consumed;
     }
     if (eos) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -987,16 +995,18 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         } else {
             fillEmptyWork(work);
             break;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index bd84de0..5c07d29 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -25,8 +25,7 @@
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ih264d.h"
 
 namespace android {
 
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index cfaeb66..fc5b75d 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -19,6 +19,8 @@
 #include <log/log.h>
 #include <utils/misc.h>
 
+#include <algorithm>
+
 #include <media/hardware/VideoAPI.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -121,6 +123,19 @@
                 .build());
 
         addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
+
+        addParameter(
                 DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
                 .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
                 .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
@@ -220,6 +235,7 @@
         return res;
     }
 
+
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
                           C2P<C2StreamPictureSizeInfo::input> &me) {
         (void)mayBlock;
@@ -336,6 +352,13 @@
         return C2R::Ok();
     }
 
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+        (void)me;
+        return C2R::Ok();
+    }
+
     IV_PROFILE_T getProfile_l() const {
         switch (mProfileLevel->profile) {
         case PROFILE_AVC_CONSTRAINED_BASELINE:  [[fallthrough]];
@@ -393,6 +416,8 @@
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
+    { return mPictureQuantization; }
 
 private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -404,6 +429,7 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 #define ive_api_function  ih264e_api_function
@@ -428,11 +454,19 @@
 
 }  // namespace
 
+static IV_COLOR_FORMAT_T GetIvColorFormat() {
+    static IV_COLOR_FORMAT_T sColorFormat =
+        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
+        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
+        IV_YUV_420P;
+    return sColorFormat;
+}
+
 C2SoftAvcEnc::C2SoftAvcEnc(
         const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mIvVideoColorFormat(IV_YUV_420P),
+      mIvVideoColorFormat(GetIvColorFormat()),
       mAVCEncProfile(IV_PROFILE_BASE),
       mAVCEncLevel(41),
       mStarted(false),
@@ -664,20 +698,67 @@
     ive_ctl_set_qp_op_t s_qp_op;
     IV_STATUS_T status;
 
+    ALOGV("in setQp()");
+
+    // set the defaults
     s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
     s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
 
-    s_qp_ip.u4_i_qp = DEFAULT_I_QP;
-    s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+    // these are the ones we're going to set, so want them to default ....
+    // to the DEFAULT values for the codec instea dof CODEC_ bounding
+    int32_t iMin = INT32_MIN, pMin = INT32_MIN, bMin = INT32_MIN;
+    int32_t iMax = INT32_MAX, pMax = INT32_MAX, bMax = INT32_MAX;
 
-    s_qp_ip.u4_p_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                    mIntf->getPictureQuantization_l();
+    for (size_t i = 0; i < qp->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = qp->m.values[i];
 
-    s_qp_ip.u4_b_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layer.max;
+            iMin = layer.min;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layer.max;
+            pMin = layer.min;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+            bMax = layer.max;
+            bMin = layer.min;
+            ALOGV("bMin %d bMax %d", bMin, bMax);
+        }
+    }
+
+    // INT32_{MIN,MAX} means unspecified, so use the codec's default
+    if (iMax == INT32_MAX) iMax = DEFAULT_I_QP_MAX;
+    if (iMin == INT32_MIN) iMin = DEFAULT_I_QP_MIN;
+    if (pMax == INT32_MAX) pMax = DEFAULT_P_QP_MAX;
+    if (pMin == INT32_MIN) pMin = DEFAULT_P_QP_MIN;
+    if (bMax == INT32_MAX) bMax = DEFAULT_B_QP_MAX;
+    if (bMin == INT32_MIN) bMin = DEFAULT_B_QP_MIN;
+
+    // ensure we have legal values
+    iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
+    pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
+    bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+
+    s_qp_ip.u4_i_qp_max = iMax;
+    s_qp_ip.u4_i_qp_min = iMin;
+    s_qp_ip.u4_p_qp_max = pMax;
+    s_qp_ip.u4_p_qp_min = pMin;
+    s_qp_ip.u4_b_qp_max = bMax;
+    s_qp_ip.u4_b_qp_min = bMin;
+
+    // ensure initial qp values are within our newly configured bounds...
+    s_qp_ip.u4_i_qp = std::clamp(DEFAULT_I_QP, iMin, iMax);
+    s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
+    s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
+
+    ALOGV("setting QP: i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
+
 
     s_qp_ip.u4_timestamp_high = -1;
     s_qp_ip.u4_timestamp_low = -1;
@@ -953,8 +1034,7 @@
     // Assume worst case output buffer size to be equal to number of bytes in input
     mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
 
-    // TODO
-    mIvVideoColorFormat = IV_YUV_420P;
+    mIvVideoColorFormat = GetIvColorFormat();
 
     ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
             height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
@@ -1009,29 +1089,31 @@
 
     /* Getting MemRecords Attributes */
     {
-        iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
-        iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+        ih264e_fill_mem_rec_ip_t s_ih264e_mem_rec_ip = {};
+        ih264e_fill_mem_rec_op_t s_ih264e_mem_rec_op = {};
+        iv_fill_mem_rec_ip_t *ps_fill_mem_rec_ip = &s_ih264e_mem_rec_ip.s_ive_ip;
+        iv_fill_mem_rec_op_t *ps_fill_mem_rec_op = &s_ih264e_mem_rec_op.s_ive_op;
 
-        s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
-        s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+        ps_fill_mem_rec_ip->u4_size = sizeof(ih264e_fill_mem_rec_ip_t);
+        ps_fill_mem_rec_op->u4_size = sizeof(ih264e_fill_mem_rec_op_t);
 
-        s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
-        s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
-        s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
-        s_fill_mem_rec_ip.u4_max_wd = width;
-        s_fill_mem_rec_ip.u4_max_ht = height;
-        s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
-        s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
-        s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+        ps_fill_mem_rec_ip->e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+        ps_fill_mem_rec_ip->ps_mem_rec = mMemRecords;
+        ps_fill_mem_rec_ip->u4_num_mem_rec = mNumMemRecords;
+        ps_fill_mem_rec_ip->u4_max_wd = width;
+        ps_fill_mem_rec_ip->u4_max_ht = height;
+        ps_fill_mem_rec_ip->u4_max_level = mAVCEncLevel;
+        ps_fill_mem_rec_ip->e_color_format = DEFAULT_INP_COLOR_FORMAT;
+        ps_fill_mem_rec_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+        ps_fill_mem_rec_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+        ps_fill_mem_rec_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+        ps_fill_mem_rec_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
 
-        status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+        status = ive_api_function(nullptr, &s_ih264e_mem_rec_ip, &s_ih264e_mem_rec_op);
 
         if (status != IV_SUCCESS) {
             ALOGE("Fill memory records failed = 0x%x\n",
-                    s_fill_mem_rec_op.u4_error_code);
+                    ps_fill_mem_rec_op->u4_error_code);
             return C2_CORRUPTED;
         }
     }
@@ -1060,48 +1142,51 @@
 
     /* Codec Instance Creation */
     {
-        ive_init_ip_t s_init_ip;
-        ive_init_op_t s_init_op;
+        ih264e_init_ip_t s_enc_ip = {};
+        ih264e_init_op_t s_enc_op = {};
+
+        ive_init_ip_t *ps_init_ip = &s_enc_ip.s_ive_ip;
+        ive_init_op_t *ps_init_op = &s_enc_op.s_ive_op;
 
         mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
         mCodecCtx->u4_size = sizeof(iv_obj_t);
         mCodecCtx->pv_fxns = (void *)ive_api_function;
 
-        s_init_ip.u4_size = sizeof(ive_init_ip_t);
-        s_init_op.u4_size = sizeof(ive_init_op_t);
+        ps_init_ip->u4_size = sizeof(ih264e_init_ip_t);
+        ps_init_op->u4_size = sizeof(ih264e_init_op_t);
 
-        s_init_ip.e_cmd = IV_CMD_INIT;
-        s_init_ip.u4_num_mem_rec = mNumMemRecords;
-        s_init_ip.ps_mem_rec = mMemRecords;
-        s_init_ip.u4_max_wd = width;
-        s_init_ip.u4_max_ht = height;
-        s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_init_ip.u4_max_level = mAVCEncLevel;
-        s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+        ps_init_ip->e_cmd = IV_CMD_INIT;
+        ps_init_ip->u4_num_mem_rec = mNumMemRecords;
+        ps_init_ip->ps_mem_rec = mMemRecords;
+        ps_init_ip->u4_max_wd = width;
+        ps_init_ip->u4_max_ht = height;
+        ps_init_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+        ps_init_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+        ps_init_ip->u4_max_level = mAVCEncLevel;
+        ps_init_ip->e_inp_color_fmt = mIvVideoColorFormat;
 
         if (mReconEnable || mPSNREnable) {
-            s_init_ip.u4_enable_recon = 1;
+            ps_init_ip->u4_enable_recon = 1;
         } else {
-            s_init_ip.u4_enable_recon = 0;
+            ps_init_ip->u4_enable_recon = 0;
         }
-        s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
-        s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
-        s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
-        s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
-        s_init_ip.u4_num_bframes = mBframes;
-        s_init_ip.e_content_type = IV_PROGRESSIVE;
-        s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
-        s_init_ip.e_slice_mode = mSliceMode;
-        s_init_ip.u4_slice_param = mSliceParam;
-        s_init_ip.e_arch = mArch;
-        s_init_ip.e_soc = DEFAULT_SOC;
+        ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+        ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+        ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+        ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
+        ps_init_ip->u4_num_bframes = mBframes;
+        ps_init_ip->e_content_type = IV_PROGRESSIVE;
+        ps_init_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+        ps_init_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+        ps_init_ip->e_slice_mode = mSliceMode;
+        ps_init_ip->u4_slice_param = mSliceParam;
+        ps_init_ip->e_arch = mArch;
+        ps_init_ip->e_soc = DEFAULT_SOC;
 
-        status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+        status = ive_api_function(mCodecCtx, &s_enc_ip, &s_enc_op);
 
         if (status != IV_SUCCESS) {
-            ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+            ALOGE("Init encoder failed = 0x%x\n", ps_init_op->u4_error_code);
             return C2_CORRUPTED;
         }
     }
@@ -1247,7 +1332,6 @@
               mSize->width, input->height(), mSize->height);
         return C2_BAD_VALUE;
     }
-    ALOGV("width = %d, height = %d", input->width(), input->height());
     const C2PlanarLayout &layout = input->layout();
     uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
     uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
@@ -1284,7 +1368,8 @@
                 return C2_BAD_VALUE;
             }
 
-            if (layout.planes[layout.PLANE_Y].colInc == 1
+            if (mIvVideoColorFormat == IV_YUV_420P
+                    && layout.planes[layout.PLANE_Y].colInc == 1
                     && layout.planes[layout.PLANE_U].colInc == 1
                     && layout.planes[layout.PLANE_V].colInc == 1
                     && uStride == vStride
@@ -1292,21 +1377,61 @@
                 // I420 compatible - already set up above
                 break;
             }
+            if (mIvVideoColorFormat == IV_YUV_420SP_UV
+                    && layout.planes[layout.PLANE_Y].colInc == 1
+                    && layout.planes[layout.PLANE_U].colInc == 2
+                    && layout.planes[layout.PLANE_V].colInc == 2
+                    && uStride == vStride
+                    && yStride == vStride
+                    && uPlane + 1 == vPlane) {
+                // NV12 compatible - already set up above
+                break;
+            }
+            if (mIvVideoColorFormat == IV_YUV_420SP_VU
+                    && layout.planes[layout.PLANE_Y].colInc == 1
+                    && layout.planes[layout.PLANE_U].colInc == 2
+                    && layout.planes[layout.PLANE_V].colInc == 2
+                    && uStride == vStride
+                    && yStride == vStride
+                    && uPlane == vPlane + 1) {
+                // NV21 compatible - already set up above
+                break;
+            }
 
             // copy to I420
             yStride = width;
             uStride = vStride = yStride / 2;
             MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
             mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
-            MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+            MediaImage2 img;
+            switch (mIvVideoColorFormat) {
+                case IV_YUV_420P:
+                    img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+                    yPlane = conversionBuffer.data();
+                    uPlane = yPlane + yPlaneSize;
+                    vPlane = uPlane + yPlaneSize / 4;
+                    break;
+                case IV_YUV_420SP_VU:
+                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+                    img.mPlane[MediaImage2::U].mOffset++;
+                    img.mPlane[MediaImage2::V].mOffset--;
+                    yPlane = conversionBuffer.data();
+                    vPlane = yPlane + yPlaneSize;
+                    uPlane = vPlane + 1;
+                    break;
+                case IV_YUV_420SP_UV:
+                default:
+                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+                    yPlane = conversionBuffer.data();
+                    uPlane = yPlane + yPlaneSize;
+                    vPlane = uPlane + 1;
+                    break;
+            }
             status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
             if (err != OK) {
                 ALOGE("Buffer conversion failed: %d", err);
                 return C2_BAD_VALUE;
             }
-            yPlane = conversionBuffer.data();
-            uPlane = yPlane + yPlaneSize;
-            vPlane = uPlane + yPlaneSize / 4;
             break;
 
         }
@@ -1328,13 +1453,13 @@
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
             ps_inp_raw_buf->apv_bufs[2] = vPlane;
 
-            ps_inp_raw_buf->au4_wd[0] = input->width();
-            ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
-            ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+            ps_inp_raw_buf->au4_wd[0] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+            ps_inp_raw_buf->au4_wd[2] = mSize->width / 2;
 
-            ps_inp_raw_buf->au4_ht[0] = input->height();
-            ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
-            ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+            ps_inp_raw_buf->au4_ht[0] = mSize->height;
+            ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
+            ps_inp_raw_buf->au4_ht[2] = mSize->height / 2;
 
             ps_inp_raw_buf->au4_strd[0] = yStride;
             ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1352,18 +1477,20 @@
             break;
         }
 
-        case IV_YUV_420SP_UV:
         case IV_YUV_420SP_VU:
+            uPlane = vPlane;
+            [[fallthrough]];
+        case IV_YUV_420SP_UV:
         default:
         {
             ps_inp_raw_buf->apv_bufs[0] = yPlane;
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
 
-            ps_inp_raw_buf->au4_wd[0] = input->width();
-            ps_inp_raw_buf->au4_wd[1] = input->width();
+            ps_inp_raw_buf->au4_wd[0] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
 
-            ps_inp_raw_buf->au4_ht[0] = input->height();
-            ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+            ps_inp_raw_buf->au4_ht[0] = mSize->height;
+            ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
 
             ps_inp_raw_buf->au4_strd[0] = yStride;
             ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1429,15 +1556,17 @@
     }
     // while (!mSawOutputEOS && !outQueue.empty()) {
     c2_status_t error;
-    ive_video_encode_ip_t s_encode_ip;
-    ive_video_encode_op_t s_encode_op;
-    memset(&s_encode_op, 0, sizeof(s_encode_op));
+    ih264e_video_encode_ip_t s_video_encode_ip = {};
+    ih264e_video_encode_op_t s_video_encode_op = {};
+    ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+    ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+    memset(ps_encode_op, 0, sizeof(*ps_encode_op));
 
     if (!mSpsPpsHeaderReceived) {
         constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
         uint8_t header[kHeaderLength];
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, workIndex);
+                ps_encode_ip, ps_encode_op, nullptr, header, kHeaderLength, workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed: %d", error);
             mSignalledError = true;
@@ -1445,22 +1574,22 @@
             work->workletsProcessed = 1u;
             return;
         }
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        status = ive_api_function(mCodecCtx, ps_encode_ip, ps_encode_op);
 
         if (IV_SUCCESS != status) {
             ALOGE("Encode header failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
+                    ps_encode_op->u4_error_code);
             work->workletsProcessed = 1u;
             return;
         } else {
             ALOGV("Bytes Generated in header %d\n",
-                    s_encode_op.s_out_buf.u4_bytes);
+                    ps_encode_op->s_out_buf.u4_bytes);
         }
 
         mSpsPpsHeaderReceived = true;
 
         std::unique_ptr<C2StreamInitDataInfo::output> csd =
-            C2StreamInitDataInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+            C2StreamInitDataInfo::output::AllocUnique(ps_encode_op->s_out_buf.u4_bytes, 0u);
         if (!csd) {
             ALOGE("CSD allocation failed");
             mSignalledError = true;
@@ -1468,7 +1597,7 @@
             work->workletsProcessed = 1u;
             return;
         }
-        memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+        memcpy(csd->m.value, header, ps_encode_op->s_out_buf.u4_bytes);
         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
 
         DUMP_TO_FILE(
@@ -1562,7 +1691,7 @@
         }
 
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
+                ps_encode_ip, ps_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed : %d", error);
             mSignalledError = true;
@@ -1579,17 +1708,17 @@
         /* Compute time elapsed between end of previous decode()
          * to start of current decode() */
         TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
         if (IV_SUCCESS != status) {
-            if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+            if ((ps_encode_op->u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
                 // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
                 mOutBufferSize *= 2;
                 mOutBlock.reset();
                 continue;
             }
             ALOGE("Encode Frame failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
+                    ps_encode_op->u4_error_code);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
@@ -1599,7 +1728,7 @@
 
     // Hold input buffer reference
     if (inputBuffer) {
-        mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer;
+        mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
     }
 
     GETTIME(&mTimeEnd, nullptr);
@@ -1607,9 +1736,9 @@
     TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
 
     ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
-            s_encode_op.s_out_buf.u4_bytes);
+            ps_encode_op->s_out_buf.u4_bytes);
 
-    void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+    void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
     /* If encoder frees up an input buffer, mark it as free */
     if (freed != nullptr) {
         if (mBuffers.count(freed) == 0u) {
@@ -1621,17 +1750,17 @@
         }
     }
 
-    if (s_encode_op.output_present) {
-        if (!s_encode_op.s_out_buf.u4_bytes) {
+    if (ps_encode_op->output_present) {
+        if (!ps_encode_op->s_out_buf.u4_bytes) {
             ALOGE("Error: Output present but bytes generated is zero");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
             return;
         }
-        uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
-                      s_encode_op.u4_timestamp_low;
-        finishWork(workId, work, &s_encode_op);
+        uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+                      ps_encode_op->u4_timestamp_low;
+        finishWork(workId, work, ps_encode_op);
     }
     if (mSawInputEOS) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -1671,9 +1800,11 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ive_video_encode_ip_t s_encode_ip;
-        ive_video_encode_op_t s_encode_op;
-        if (C2_OK != setEncodeArgs(&s_encode_ip, &s_encode_op, nullptr,
+        ih264e_video_encode_ip_t s_video_encode_ip = {};
+        ih264e_video_encode_op_t s_video_encode_op = {};
+        ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+        ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+        if (C2_OK != setEncodeArgs(ps_encode_ip, ps_encode_op, nullptr,
                                    wView.base(), wView.capacity(), 0)) {
             ALOGE("setEncodeArgs failed for drainInternal");
             mSignalledError = true;
@@ -1681,9 +1812,9 @@
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void)ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        (void)ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
-        void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+        void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
         /* If encoder frees up an input buffer, mark it as free */
         if (freed != nullptr) {
             if (mBuffers.count(freed) == 0u) {
@@ -1695,10 +1826,10 @@
             }
         }
 
-        if (s_encode_op.output_present) {
-            uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
-                          s_encode_op.u4_timestamp_low;
-            finishWork(workId, work, &s_encode_op);
+        if (ps_encode_op->output_present) {
+            uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+                          ps_encode_op->u4_timestamp_low;
+            finishWork(workId, work, ps_encode_op);
         } else {
             if (work->workletsProcessed != 1u) {
                 work->worklets.front()->output.flags = work->input.flags;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 555055b..673a282 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -24,8 +24,7 @@
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
-#include "iv2.h"
-#include "ive2.h"
+#include "ih264e.h"
 
 namespace android {
 
@@ -100,6 +99,11 @@
 #define STRLENGTH                   500
 #define DEFAULT_CONSTRAINED_INTRA   0
 
+/** limits as specified by h264 */
+#define CODEC_QP_MIN                0
+#define CODEC_QP_MAX                51
+
+
 #define MIN(a, b) ((a) < (b))? (a) : (b)
 #define MAX(a, b) ((a) > (b))? (a) : (b)
 #define ALIGN16(x) ((((x) + 15) >> 4) << 4)
diff --git a/media/codec2/components/flac/Android.bp b/media/codec2/components/flac/Android.bp
index 1143bed..38dfce4 100644
--- a/media/codec2/components/flac/Android.bp
+++ b/media/codec2/components/flac/Android.bp
@@ -42,11 +42,8 @@
 
     srcs: ["C2SoftFlacEnc.cpp"],
 
-    shared_libs: [
-        "libaudioutils",
-    ],
-
     static_libs: [
         "libFLAC",
+        "libaudioutils",
     ],
 }
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 56dd26b..6bcf3a2 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -26,7 +26,6 @@
 #include <SimpleC2Interface.h>
 
 #include "C2SoftHevcDec.h"
-#include "ihevcd_cxa.h"
 
 namespace android {
 
@@ -380,12 +379,14 @@
     }
 
     while (true) {
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
 
-        setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (0 == s_decode_op.u4_output_present) {
+        setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        if (0 == ps_decode_op->u4_output_present) {
             resetPlugin();
             break;
         }
@@ -400,8 +401,8 @@
 }
 
 status_t C2SoftHevcDec::createDecoder() {
-    ivdext_create_ip_t s_create_ip;
-    ivdext_create_op_t s_create_op;
+    ivdext_create_ip_t s_create_ip = {};
+    ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -427,8 +428,8 @@
 }
 
 status_t C2SoftHevcDec::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
 
     s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
     s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -447,22 +448,26 @@
 }
 
 status_t C2SoftHevcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
-    ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
-    ivd_ctl_set_config_op_t s_set_dyn_params_op;
+    ihevcd_cxa_ctl_set_config_ip_t s_hevcd_set_dyn_params_ip = {};
+    ihevcd_cxa_ctl_set_config_op_t s_hevcd_set_dyn_params_op = {};
+    ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+        &s_hevcd_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+    ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+        &s_hevcd_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
 
-    s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
-    s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-    s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
-    s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+    ps_set_dyn_params_ip->u4_size = sizeof(ihevcd_cxa_ctl_set_config_ip_t);
+    ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+    ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+    ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+    ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+    ps_set_dyn_params_op->u4_size = sizeof(ihevcd_cxa_ctl_set_config_op_t);
     IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
-                                                     &s_set_dyn_params_ip,
-                                                     &s_set_dyn_params_op);
+                                                     ps_set_dyn_params_ip,
+                                                     ps_set_dyn_params_op);
     if (status != IV_SUCCESS) {
-        ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+        ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
         return UNKNOWN_ERROR;
     }
 
@@ -470,8 +475,8 @@
 }
 
 status_t C2SoftHevcDec::getVersion() {
-    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
-    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
     UWORD8 au1_buf[512];
 
     s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -529,7 +534,7 @@
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
     }
 
-    ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_decode_ip->u4_size = sizeof(ihevcd_cxa_video_decode_ip_t);
     ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
     if (inBuffer) {
         ps_decode_ip->u4_ts = tsMarker;
@@ -558,15 +563,15 @@
         ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
     ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+    ps_decode_op->u4_size = sizeof(ihevcd_cxa_video_decode_op_t);
     ps_decode_op->u4_output_present = 0;
 
     return true;
 }
 
 bool C2SoftHevcDec::getVuiParams() {
-    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
-    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
 
     s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
     s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -614,8 +619,8 @@
 }
 
 status_t C2SoftHevcDec::setFlushMode() {
-    ivd_ctl_flush_ip_t s_set_flush_ip;
-    ivd_ctl_flush_op_t s_set_flush_op;
+    ivd_ctl_flush_ip_t s_set_flush_ip = {};
+    ivd_ctl_flush_op_t s_set_flush_op = {};
 
     s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
     s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -633,8 +638,8 @@
 }
 
 status_t C2SoftHevcDec::resetDecoder() {
-    ivd_ctl_reset_ip_t s_reset_ip;
-    ivd_ctl_reset_op_t s_reset_op;
+    ivd_ctl_reset_ip_t s_reset_ip = {};
+    ivd_ctl_reset_op_t s_reset_op = {};
 
     s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
     s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -662,8 +667,8 @@
 
 status_t C2SoftHevcDec::deleteDecoder() {
     if (mDecHandle) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
+        ivdext_delete_ip_t s_delete_ip = {};
+        ivdext_delete_op_t s_delete_op = {};
 
         s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
         s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -835,9 +840,11 @@
             work->result = wView.error();
             return;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
                            inOffset + inPos, inSize - inPos, workIndex)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
@@ -852,26 +859,26 @@
         WORD32 delay;
         GETTIME(&mTimeStart, nullptr);
         TIME_DIFF(mTimeEnd, mTimeStart, delay);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
         WORD32 decodeTime;
         GETTIME(&mTimeEnd, nullptr);
         TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
         ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
-              s_decode_op.u4_num_bytes_consumed);
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+              ps_decode_op->u4_num_bytes_consumed);
+        if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
-                   (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+                   (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -880,16 +887,16 @@
 
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
-            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         }
-        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
-            mOutputDelay = s_decode_op.i4_reorder_depth;
+        if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+            mOutputDelay = ps_decode_op->i4_reorder_depth;
             ALOGV("New Output delay %d ", mOutputDelay);
 
             C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -906,17 +913,16 @@
                 work->result = C2_CORRUPTED;
                 return;
             }
-            continue;
         }
-        if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+        if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN32(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
             }
-            if (s_decode_op.u4_pic_wd != mWidth ||  s_decode_op.u4_pic_ht != mHeight) {
-                mWidth = s_decode_op.u4_pic_wd;
-                mHeight = s_decode_op.u4_pic_ht;
-                CHECK_EQ(0u, s_decode_op.u4_output_present);
+            if (ps_decode_op->u4_pic_wd != mWidth ||  ps_decode_op->u4_pic_ht != mHeight) {
+                mWidth = ps_decode_op->u4_pic_wd;
+                mHeight = ps_decode_op->u4_pic_ht;
+                CHECK_EQ(0u, ps_decode_op->u4_output_present);
 
                 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -936,15 +942,15 @@
             }
         }
         (void) getVuiParams();
-        hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         }
-        if (0 == s_decode_op.u4_num_bytes_consumed) {
+        if (0 == ps_decode_op->u4_num_bytes_consumed) {
             ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
             break;
         }
-        inPos += s_decode_op.u4_num_bytes_consumed;
+        inPos += ps_decode_op->u4_num_bytes_consumed;
         if (hasPicture && (inSize - inPos)) {
             ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
                   (int)inSize - (int)inPos);
@@ -986,16 +992,18 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         } else {
             fillEmptyWork(work);
             break;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index 600d7c1..b9b0a48 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -23,8 +23,7 @@
 #include <SimpleC2Component.h>
 
 #include "ihevc_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ihevcd_cxa.h"
 
 namespace android {
 
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index e1cc6b3..3c87531 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -448,6 +448,20 @@
         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
     }
 
+    // handle dynamic bitrate change
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+        lock.unlock();
+
+        if (bitrate != mBitrate) {
+            mBitrate = bitrate;
+            int layerBitrate[2] = {static_cast<int>(mBitrate->value), 0};
+            ALOGV("Calling PVUpdateBitRate %d", layerBitrate[0]);
+            PVUpdateBitRate(mHandle, layerBitrate);
+        }
+    }
+
     std::shared_ptr<const C2GraphicView> rView;
     std::shared_ptr<C2Buffer> inputBuffer;
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
diff --git a/media/codec2/components/mpeg4_h263/TEST_MAPPING b/media/codec2/components/mpeg4_h263/TEST_MAPPING
new file mode 100644
index 0000000..93fba22
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/codec2/components/mpeg4_h263
+{
+  "presubmit": [
+    { "name": "C2SoftMpeg4DecTest" }
+  ]
+}
diff --git a/media/codec2/components/tests/Android.bp b/media/codec2/components/tests/Android.bp
new file mode 100644
index 0000000..3c68eee
--- /dev/null
+++ b/media/codec2/components/tests/Android.bp
@@ -0,0 +1,68 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "C2SoftCodecTest-defaults",
+    gtest: true,
+    host_supported: false,
+    srcs: [
+        "C2SoftCodecTest.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libion",
+        "libfmq",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcodec2",
+        "libhidlbase",
+        "libdmabufheap",
+        "libcodec2_vndk",
+        "libnativewindow",
+        "libcodec2_soft_common",
+        "libsfplugin_ccodec_utils",
+        "libstagefright_foundation",
+        "libstagefright_bufferpool@2.0.1",
+        "android.hardware.graphics.mapper@2.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.allocator@3.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+    ],
+
+    shared_libs: [
+        "libui",
+        "libdl",
+        "libhardware",
+        "libvndksupport",
+        "libprocessgroup",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_test {
+    name: "C2SoftMpeg4DecTest",
+    defaults: ["C2SoftCodecTest-defaults"],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "libcodec2_soft_mpeg4dec",
+    ],
+
+    test_suites: [
+        "general-tests",
+    ],
+}
diff --git a/media/codec2/components/tests/C2SoftCodecTest.cpp b/media/codec2/components/tests/C2SoftCodecTest.cpp
new file mode 100644
index 0000000..84c2562
--- /dev/null
+++ b/media/codec2/components/tests/C2SoftCodecTest.cpp
@@ -0,0 +1,105 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <C2Config.h>
+#include <C2ComponentFactory.h>
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+extern "C" ::C2ComponentFactory* CreateCodec2Factory();
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory);
+
+class C2SoftCodecTest : public ::testing::Test {
+public:
+  void SetUp() override {
+    mFactory = CreateCodec2Factory();
+  }
+
+  void TearDown() override {
+    if (mFactory) {
+      DestroyCodec2Factory(mFactory);
+    }
+  }
+
+  c2_status_t createComponent(
+        std::shared_ptr<C2Component>* const comp) {
+    if (!mFactory) {
+      return C2_NO_INIT;
+    }
+    return mFactory->createComponent(
+        kPlaceholderId, comp, std::default_delete<C2Component>());
+  }
+
+  c2_status_t createInterface(
+      std::shared_ptr<C2ComponentInterface>* const intf) {
+    if (!mFactory) {
+      return C2_NO_INIT;
+    }
+    return mFactory->createInterface(
+        kPlaceholderId, intf, std::default_delete<C2ComponentInterface>());
+  }
+
+  ::C2ComponentFactory *getFactory() { return mFactory; }
+
+private:
+  static constexpr ::c2_node_id_t kPlaceholderId = 0;
+
+  ::C2ComponentFactory *mFactory;
+};
+
+TEST_F(C2SoftCodecTest, PictureSizeInfoTest) {
+  std::shared_ptr<C2ComponentInterface> interface;
+  c2_status_t status = createInterface(&interface);
+  ASSERT_EQ(status, C2_OK) << "Error in createInterface";
+  ASSERT_NE(interface, nullptr) << "interface is null";
+
+  std::unique_ptr<C2StreamPictureSizeInfo::output> param =
+      std::make_unique<C2StreamPictureSizeInfo::output>();
+  std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+      C2FieldSupportedValuesQuery::Current(
+          C2ParamField(param.get(), &C2StreamPictureSizeInfo::width)),
+      C2FieldSupportedValuesQuery::Current(
+          C2ParamField(param.get(), &C2StreamPictureSizeInfo::height))};
+  status = interface->querySupportedValues_vb(validValueInfos, C2_MAY_BLOCK);
+  ASSERT_EQ(status, C2_OK) << "Error in querySupportedValues_vb";
+  ASSERT_EQ(validValueInfos.size(), 2) << "querySupportedValues_vb didn't return 2 values";
+
+  ASSERT_EQ(validValueInfos[0].values.range.max.ref<uint32_t>(), 1920)
+      << "Incorrect maximum value for width";
+  ASSERT_EQ(validValueInfos[1].values.range.max.ref<uint32_t>(), 1920)
+      << "Incorrect maximum value for height";
+  ASSERT_EQ(validValueInfos[0].values.range.min.ref<uint32_t>(), 2)
+      << "Incorrect minimum value for width";
+  ASSERT_EQ(validValueInfos[1].values.range.min.ref<uint32_t>(), 2)
+      << "Incorrect minimum value for height";
+  ASSERT_EQ(validValueInfos[0].values.range.step.ref<uint32_t>(), 2)
+      << "Incorrect alignment value for width";
+  ASSERT_EQ(validValueInfos[1].values.range.step.ref<uint32_t>(), 2)
+      << "Incorrect alignment value for height";
+
+  return;
+}
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+  int status = RUN_ALL_TESTS();
+  ALOGV("Test result = %d\n", status);
+  return status;
+}
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index fe37b05..a5d6fbf 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -642,7 +642,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newLinearAllocation(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -666,7 +667,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle __unused,
@@ -699,7 +701,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newGraphicAllocation(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -724,7 +727,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during recreation
+     *                      (unexpected)
      */
     virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle __unused,
@@ -908,7 +912,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support linear blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchLinearBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -937,7 +942,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support circular blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchCircularBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -969,7 +975,8 @@
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
      *                      error)
      * \retval C2_OMITTED   this pool does not support 2D blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchGraphicBlock(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -980,6 +987,90 @@
     }
 
     virtual ~C2BlockPool() = default;
+
+    /**
+     * Blocking fetch for linear block. Obtains a linear writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \param capacity the size of requested block.
+     * \param usage    the memory usage info for the requested block. Returned blocks will be
+     *                 optimized for this usage, but may be used with any usage. One exception:
+     *                 protected blocks/buffers can only be used in a protected scenario.
+     * \param block    pointer to where the obtained block shall be stored on success. nullptr will
+     *                 be stored here on failure
+     * \param fence    pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+     * \retval C2_OMITTED   this pool does not support linear blocks nor fence.
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchLinearBlock(
+            uint32_t capacity __unused, C2MemoryUsage usage __unused,
+            std::shared_ptr<C2LinearBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
+
+    /**
+     * Blocking fetch for 2D graphic block. Obtains a 2D graphic writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \param width  the width of requested block (the obtained block could be slightly larger, e.g.
+     *               to accommodate any system-required alignment)
+     * \param height the height of requested block (the obtained block could be slightly larger,
+     *               e.g. to accommodate any system-required alignment)
+     * \param format the pixel format of requested block. This could be a vendor specific format.
+     * \param usage  the memory usage info for the requested block. Returned blocks will be
+     *               optimized for this usage, but may be used with any usage. One exception:
+     *               protected blocks/buffers can only be used in a protected scenario.
+     * \param block  pointer to where the obtained block shall be stored on success. nullptr
+     *               will be stored here on failure
+     * \param fence  pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
+     *                      error)
+     * \retval C2_OMITTED   this pool does not support 2D blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+            C2MemoryUsage usage __unused,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
 protected:
     C2BlockPool() = default;
 };
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 752140a..f8aa672 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -187,6 +187,8 @@
     kParamIndexPictureType,
     kParamIndexHdr10PlusMetadata,
 
+    kParamIndexPictureQuantization,
+
     /* ------------------------------------ video components ------------------------------------ */
 
     kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -1699,6 +1701,31 @@
 constexpr char C2_PARAMKEY_GOP[] = "coding.gop";
 
 /**
+ * Quantization
+ * min/max for each picture type
+ *
+ */
+struct C2PictureQuantizationStruct {
+    C2PictureQuantizationStruct() : type_((C2Config::picture_type_t)0),
+                                         min(INT32_MIN), max(INT32_MAX) {}
+    C2PictureQuantizationStruct(C2Config::picture_type_t type, int32_t min_, int32_t max_)
+        : type_(type), min(min_), max(max_) { }
+
+    C2Config::picture_type_t type_;
+    int32_t min;      // INT32_MIN == 'no lower bound specified'
+    int32_t max;      // INT32_MAX == 'no upper bound specified'
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(PictureQuantization)
+    C2FIELD(type_, "type")
+    C2FIELD(min, "min")
+    C2FIELD(max, "max")
+};
+
+typedef C2StreamParam<C2Tuning, C2SimpleArrayStruct<C2PictureQuantizationStruct>,
+        kParamIndexPictureQuantization> C2StreamPictureQuantizationTuning;
+constexpr char C2_PARAMKEY_PICTURE_QUANTIZATION[] = "coding.qp";
+
+/**
  * Sync frame can be requested on demand by the client.
  *
  * If true, the next I frame shall be encoded as a sync frame. This config can be passed
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 008def8..122aacd 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -15,7 +15,6 @@
     defaults: ["hidl_defaults"],
 
     srcs: [
-        "OutputBufferQueue.cpp",
         "types.cpp",
     ],
 
diff --git a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp b/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
index 5ec88ec..7c2e014 100644
--- a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
+++ b/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
@@ -201,6 +201,8 @@
         c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
         mAllocatorMutex.unlock();
         if (err != OK) {
+            native_handle_close(handle);
+            native_handle_delete(handle);
             return UNKNOWN_ERROR;
         }
         std::shared_ptr<C2GraphicBlock> block =
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
new file mode 120000
index 0000000..136279c
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/.clang-format
@@ -0,0 +1 @@
+../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 3a47ae9..efc5813 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -33,14 +33,40 @@
 using android::C2AllocatorIon;
 
 #include "media_c2_hidl_test_common.h"
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+struct CompToURL {
+    std::string mime;
+    std::string mURL;
+    std::string info;
+};
 
-// Resource directory
-static std::string sResourceDir = "";
+std::vector<CompToURL> kCompToURL = {
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
+         "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+         "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+         "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+         "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
+        {"vorbis", "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+         "bbb_vorbis_stereo_128kbps_48000hz.info"},
+        {"opus", "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
+        {"g711-alaw", "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
+        {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
+        {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
+        {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+        {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
+};
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -76,33 +102,17 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
-        const StringToName kStringToName[] = {
-                {"xaac", xaac},          {"mp3", mp3}, {"amrnb", amrnb},
-                {"amrwb", amrwb},        {"aac", aac}, {"vorbis", vorbis},
-                {"opus", opus},          {"pcm", pcm}, {"g711.alaw", g711alaw},
-                {"g711.mlaw", g711mlaw}, {"gsm", gsm}, {"raw", raw},
-                {"flac", flac},
-        };
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
+
         mEos = false;
         mFramesReceived = 0;
         mTimestampUs = 0u;
         mWorkResult = C2_OK;
         mTimestampDevTest = false;
-        if (mCompName == unknown_comp) mDisableTest = true;
         if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
 
@@ -119,6 +129,8 @@
 
     virtual void validateTimestampList(int32_t* bitStreamInfo);
 
+    void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+
     struct outputMetaData {
         uint64_t timestampUs;
         uint32_t rangeLength;
@@ -158,29 +170,12 @@
         }
     }
 
-    enum standardComp {
-        xaac,
-        mp3,
-        amrnb,
-        amrwb,
-        aac,
-        vorbis,
-        opus,
-        pcm,
-        g711alaw,
-        g711mlaw,
-        gsm,
-        raw,
-        flac,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
     bool mDisableTest;
     bool mTimestampDevTest;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     uint64_t mTimestampUs;
@@ -207,9 +202,8 @@
     }
 };
 
-class Codec2AudioDecHidlTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioDecHidlTest : public Codec2AudioDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -217,7 +211,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2AudioDecHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -244,13 +238,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2AudioDecHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -271,7 +258,7 @@
 // parsing the header of elementary stream. Client needs to collect this
 // information and reconfigure
 void getInputChannelInfo(const std::shared_ptr<android::Codec2Client::Component>& component,
-                         Codec2AudioDecHidlTest::standardComp compName, int32_t* bitStreamInfo) {
+                         std::string mime, int32_t* bitStreamInfo) {
     // query nSampleRate and nChannels
     std::initializer_list<C2Param::Index> indices{
             C2StreamSampleRateInfo::output::PARAM_TYPE,
@@ -288,89 +275,29 @@
             C2Param* param = inParams[i].get();
             bitStreamInfo[i] = *(int32_t*)((uint8_t*)param + offset);
         }
-        switch (compName) {
-            case Codec2AudioDecHidlTest::amrnb: {
-                ASSERT_EQ(bitStreamInfo[0], 8000);
-                ASSERT_EQ(bitStreamInfo[1], 1);
-                break;
-            }
-            case Codec2AudioDecHidlTest::amrwb: {
-                ASSERT_EQ(bitStreamInfo[0], 16000);
-                ASSERT_EQ(bitStreamInfo[1], 1);
-                break;
-            }
-            case Codec2AudioDecHidlTest::gsm: {
-                ASSERT_EQ(bitStreamInfo[0], 8000);
-                break;
-            }
-            default:
-                break;
+        if (mime.find("3gpp") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 8000);
+            ASSERT_EQ(bitStreamInfo[1], 1);
+        } else if (mime.find("amr-wb") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 16000);
+            ASSERT_EQ(bitStreamInfo[1], 1);
+        } else if (mime.find("gsm") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 8000);
         }
     }
 }
 
-// number of elementary streams per component
-#define STREAM_COUNT 2
-
 // LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioDecHidlTest::standardComp comp, char* mURL, char* info,
-                        size_t streamIndex = 0) {
-    struct CompToURL {
-        Codec2AudioDecHidlTest::standardComp comp;
-        const char mURL[STREAM_COUNT][512];
-        const char info[STREAM_COUNT][512];
-    };
-    ASSERT_TRUE(streamIndex < STREAM_COUNT);
-
-    static const CompToURL kCompToURL[] = {
-            {Codec2AudioDecHidlTest::standardComp::xaac,
-             {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
-             {"bbb_aac_stereo_128kbps_48000hz.info",
-              "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::mp3,
-             {"bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.mp3"},
-             {"bbb_mp3_stereo_192kbps_48000hz.info",
-              "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::aac,
-             {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
-             {"bbb_aac_stereo_128kbps_48000hz.info",
-              "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::amrnb,
-             {"sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.amrnb"},
-             {"sine_amrnb_1ch_12kbps_8000hz.info",
-              "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::amrwb,
-             {"bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb"},
-             {"bbb_amrwb_1ch_14kbps_16000hz.info",
-              "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::vorbis,
-             {"bbb_vorbis_stereo_128kbps_48000hz.vorbis", ""},
-             {"bbb_vorbis_stereo_128kbps_48000hz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::opus,
-             {"bbb_opus_stereo_128kbps_48000hz.opus", ""},
-             {"bbb_opus_stereo_128kbps_48000hz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::g711alaw,
-             {"bbb_g711alaw_1ch_8khz.raw", ""},
-             {"bbb_g711alaw_1ch_8khz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::g711mlaw,
-             {"bbb_g711mulaw_1ch_8khz.raw", ""},
-             {"bbb_g711mulaw_1ch_8khz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::gsm,
-             {"bbb_gsm_1ch_8khz_13kbps.raw", ""},
-             {"bbb_gsm_1ch_8khz_13kbps.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::raw,
-             {"bbb_raw_1ch_8khz_s32le.raw", ""},
-             {"bbb_raw_1ch_8khz_s32le.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::flac,
-             {"bbb_flac_stereo_680kbps_48000hz.flac", ""},
-             {"bbb_flac_stereo_680kbps_48000hz.info", ""}},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL[streamIndex]);
-            strcat(info, kCompToURL[i].info[streamIndex]);
-            return;
+void Codec2AudioDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
+    int streamCount = 0;
+    for (size_t i = 0; i < kCompToURL.size(); ++i) {
+        if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
+            if (streamCount == streamIndex) {
+                strcat(mURL, kCompToURL[i].mURL.c_str());
+                strcat(info, kCompToURL[i].info.c_str());
+                return;
+            }
+            streamCount++;
         }
     }
 }
@@ -461,7 +388,7 @@
 void Codec2AudioDecHidlTestBase::validateTimestampList(int32_t* bitStreamInfo) {
     uint32_t samplesReceived = 0;
     // Update SampleRate and ChannelCount
-    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     int32_t nSampleRate = bitStreamInfo[0];
     int32_t nChannels = bitStreamInfo[1];
     std::list<uint64_t>::iterator itIn = mTimestampUslist.begin();
@@ -486,7 +413,7 @@
 TEST_P(Codec2AudioDecHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid audio component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
@@ -495,15 +422,13 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ASSERT_EQ(mComponent->start(), C2_OK);
     int32_t bitStreamInfo[2] = {0};
-    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     setupConfigParam(mComponent, bitStreamInfo);
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecDecodeTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -514,16 +439,15 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    ;
-    bool signalEOS = !std::get<3>(GetParam()).compare("true");
+    uint32_t streamIndex = std::get<2>(GetParam());
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
     char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info, streamIndex);
+    GetURLForComponent(mURL, info, streamIndex);
     if (!strcmp(mURL, sResourceDir.c_str())) {
         ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL  %s ", sResourceDir.c_str(), mURL);
         return;
@@ -536,11 +460,11 @@
     mFramesReceived = 0;
     mTimestampUs = 0;
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -591,17 +515,17 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -683,17 +607,17 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -768,7 +692,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     eleInfo.open(info);
     ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -798,11 +722,11 @@
     }
     eleInfo.close();
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -833,9 +757,8 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecCsdInputTests
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2AudioDecCsdInputTests : public Codec2AudioDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -853,7 +776,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
     if (!strcmp(mURL, sResourceDir.c_str())) {
         ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL  %s ", sResourceDir.c_str(), mURL);
         return;
@@ -864,11 +787,11 @@
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -881,7 +804,7 @@
     ASSERT_EQ(eleStream.is_open(), true);
 
     bool signalEOS = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -937,44 +860,36 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2AudioDecDecodeTest,
                          testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
                          testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
     for (auto params : kTestParameters) {
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
 
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index e3a4f68..562c77f 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -35,11 +35,9 @@
 
 #include "media_c2_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
 
-// Resource directory
-static std::string sResourceDir = "";
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -75,30 +73,17 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
-        const StringToName kStringToName[] = {
-                {"aac", aac}, {"flac", flac}, {"opus", opus}, {"amrnb", amrnb}, {"amrwb", amrwb},
-        };
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+                          &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
         mEos = false;
         mCsd = false;
         mFramesReceived = 0;
         mWorkResult = C2_OK;
         mOutputSize = 0u;
-        if (mCompName == unknown_comp) mDisableTest = true;
-        if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
         getInputMaxBufSize();
     }
 
@@ -113,6 +98,8 @@
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
 
+    void GetURLForComponent(char* mURL);
+
     // callback function to process onWorkDone received by Listener
     void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
         for (std::unique_ptr<C2Work>& work : workItems) {
@@ -133,21 +120,13 @@
             }
         }
     }
-    enum standardComp {
-        aac,
-        flac,
-        opus,
-        amrnb,
-        amrwb,
-        unknown_comp,
-    };
 
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
     bool mCsd;
     bool mDisableTest;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     uint32_t mFramesReceived;
@@ -192,9 +171,8 @@
     }
 };
 
-class Codec2AudioEncHidlTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioEncHidlTest : public Codec2AudioEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -202,7 +180,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2AudioEncHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -229,13 +207,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2AudioEncHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -253,56 +224,48 @@
 }
 
 // Get config params for a component
-bool getConfigParams(Codec2AudioEncHidlTest::standardComp compName, int32_t* nChannels,
-                     int32_t* nSampleRate, int32_t* samplesPerFrame) {
-    switch (compName) {
-        case Codec2AudioEncHidlTest::aac:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 1024;
-            break;
-        case Codec2AudioEncHidlTest::flac:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 1152;
-            break;
-        case Codec2AudioEncHidlTest::opus:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 960;
-            break;
-        case Codec2AudioEncHidlTest::amrnb:
-            *nChannels = 1;
-            *nSampleRate = 8000;
-            *samplesPerFrame = 160;
-            break;
-        case Codec2AudioEncHidlTest::amrwb:
-            *nChannels = 1;
-            *nSampleRate = 16000;
-            *samplesPerFrame = 160;
-            break;
-        default:
-            return false;
-    }
+bool getConfigParams(std::string mime, int32_t* nChannels, int32_t* nSampleRate,
+                     int32_t* samplesPerFrame) {
+    if (mime.find("mp4a-latm") != std::string::npos) {
+        *nChannels = 2;
+        *nSampleRate = 48000;
+        *samplesPerFrame = 1024;
+    } else if (mime.find("flac") != std::string::npos) {
+        *nChannels = 2;
+        *nSampleRate = 48000;
+        *samplesPerFrame = 1152;
+    } else if (mime.find("opus") != std::string::npos) {
+        *nChannels = 2;
+        *nSampleRate = 48000;
+        *samplesPerFrame = 960;
+    } else if (mime.find("3gpp") != std::string::npos) {
+        *nChannels = 1;
+        *nSampleRate = 8000;
+        *samplesPerFrame = 160;
+    } else if (mime.find("amr-wb") != std::string::npos) {
+        *nChannels = 1;
+        *nSampleRate = 16000;
+        *samplesPerFrame = 160;
+    } else
+        return false;
+
     return true;
 }
 
 // LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioEncHidlTest::standardComp comp, char* mURL) {
+void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL) {
     struct CompToURL {
-        Codec2AudioEncHidlTest::standardComp comp;
+        std::string mime;
         const char* mURL;
     };
     static const CompToURL kCompToURL[] = {
-            {Codec2AudioEncHidlTest::standardComp::aac, "bbb_raw_2ch_48khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::amrnb, "bbb_raw_1ch_8khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::amrwb, "bbb_raw_1ch_16khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::flac, "bbb_raw_2ch_48khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::opus, "bbb_raw_2ch_48khz_s16le.raw"},
+            {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"}, {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
+            {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"},    {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
+            {"opus", "bbb_raw_2ch_48khz_s16le.raw"},
     };
 
     for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
+        if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
             strcat(mURL, kCompToURL[i].mURL);
             return;
         }
@@ -395,14 +358,12 @@
 TEST_P(Codec2AudioEncHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid audio component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2AudioEncEncodeTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioEncEncodeTest : public Codec2AudioEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -414,17 +375,17 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    GetURLForComponent(mURL);
+    bool signalEOS = std::get<2>(GetParam());
     // Ratio w.r.t to mInputMaxBufSize
-    int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
+    int32_t inputMaxBufRatio = std::get<3>(GetParam());
 
     int32_t nChannels;
     int32_t nSampleRate;
     int32_t samplesPerFrame;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -464,11 +425,9 @@
         ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
         ASSERT_TRUE(false);
     }
-    if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
-        if (!mCsd) {
-            ALOGE("CSD buffer missing");
-            ASSERT_TRUE(false);
-        }
+    if ((mMime.find("flac") != std::string::npos) || (mMime.find("opus") != std::string::npos) ||
+        (mMime.find("mp4a-latm") != std::string::npos)) {
+        ASSERT_TRUE(mCsd) << "CSD buffer missing";
     }
     ASSERT_EQ(mEos, true);
     ASSERT_EQ(mComponent->stop(), C2_OK);
@@ -522,15 +481,15 @@
 
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
+    GetURLForComponent(mURL);
 
     mFlushedIndices.clear();
     int32_t nChannels;
     int32_t nSampleRate;
     int32_t samplesPerFrame;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -587,7 +546,7 @@
 
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
+    GetURLForComponent(mURL);
 
     std::ifstream eleStream;
     eleStream.open(mURL, std::ifstream::binary);
@@ -600,8 +559,8 @@
     int32_t numFrames = 16;
     int32_t maxChannelCount = 8;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -611,7 +570,7 @@
 
     // Looping through the maximum number of channel count supported by encoder
     for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
-        ALOGV("Configuring %u encoder for channel count = %d", mCompName, nChannels);
+        ALOGV("Configuring encoder %s  for channel count = %d", mComponentName.c_str(), nChannels);
         if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
             std::cout << "[   WARN   ] Test Skipped \n";
             return;
@@ -668,7 +627,9 @@
             ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
             ASSERT_TRUE(false);
         }
-        if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+        if ((mMime.find("flac") != std::string::npos) ||
+            (mMime.find("opus") != std::string::npos) ||
+            (mMime.find("mp4a-latm") != std::string::npos)) {
             ASSERT_TRUE(mCsd) << "CSD buffer missing";
         }
         ASSERT_TRUE(mEos);
@@ -687,7 +648,7 @@
 
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
+    GetURLForComponent(mURL);
 
     std::ifstream eleStream;
     eleStream.open(mURL, std::ifstream::binary);
@@ -699,8 +660,8 @@
     int32_t nChannels;
     int32_t numFrames = 16;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -711,7 +672,7 @@
     uint32_t prevSampleRate = 0u;
 
     for (int32_t nSampleRate : sampleRateValues) {
-        ALOGV("Configuring %u encoder for SampleRate = %d", mCompName, nSampleRate);
+        ALOGV("Configuring encoder %s  for SampleRate = %d", mComponentName.c_str(), nSampleRate);
         if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
             std::cout << "[   WARN   ] Test Skipped \n";
             return;
@@ -772,7 +733,9 @@
             ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
             ASSERT_TRUE(false);
         }
-        if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+        if ((mMime.find("flac") != std::string::npos) ||
+            (mMime.find("opus") != std::string::npos) ||
+            (mMime.find("mp4a-latm") != std::string::npos)) {
             ASSERT_TRUE(mCsd) << "CSD buffer missing";
         }
         ASSERT_TRUE(mEos);
@@ -786,36 +749,28 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS and inputMaxBufRatio
 // inputMaxBufRatio is ratio w.r.t. to mInputMaxBufSize
 INSTANTIATE_TEST_SUITE_P(EncodeTest, Codec2AudioEncEncodeTest,
                          testing::ValuesIn(kEncodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
     for (auto params : kTestParameters) {
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "1"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 1));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "2"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 2));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "1"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 1));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "2"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 0251ec2..1f1681d 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -22,6 +22,48 @@
 
 #include <android/hardware/media/c2/1.0/IComponentStore.h>
 
+std::string sResourceDir = "";
+
+std::string sComponentNamePrefix = "";
+
+static constexpr struct option kArgOptions[] = {
+        {"res", required_argument, 0, 'P'},
+        {"prefix", required_argument, 0, 'p'},
+        {"help", required_argument, 0, 'h'},
+        {nullptr, 0, nullptr, 0},
+};
+
+void printUsage(char* me) {
+    std::cerr << "VTS tests to test codec2 components \n";
+    std::cerr << "Usage: " << me << " [options] \n";
+    std::cerr << "\t -P,  --res:    Mandatory path to a folder that contains test resources \n";
+    std::cerr << "\t -p,  --prefix: Optional prefix to select component/s to be tested \n";
+    std::cerr << "\t                    All codecs are tested by default \n";
+    std::cerr << "\t                    Eg: c2.android - test codecs starting with c2.android \n";
+    std::cerr << "\t                    Eg: c2.android.aac.decoder - test a specific codec \n";
+    std::cerr << "\t -h,  --help:   Print usage \n";
+}
+
+void parseArgs(int argc, char** argv) {
+    int arg;
+    int option_index;
+    while ((arg = getopt_long(argc, argv, ":P:p:h", kArgOptions, &option_index)) != -1) {
+        switch (arg) {
+            case 'P':
+                sResourceDir = optarg;
+                break;
+            case 'p':
+                sComponentNamePrefix = optarg;
+                break;
+            case 'h':
+                printUsage(argv[0]);
+                break;
+            default:
+                break;
+        }
+    }
+}
+
 // Test the codecs for NullBuffer, Empty Input Buffer with(out) flags set
 void testInputBuffer(const std::shared_ptr<android::Codec2Client::Component>& component,
                      std::mutex& queueLock, std::list<std::unique_ptr<C2Work>>& workQueue,
@@ -92,8 +134,7 @@
         for (size_t i = 0; i < updates.size(); ++i) {
             C2Param* param = updates[i].get();
             if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
-                C2StreamInitDataInfo::output* csdBuffer =
-                        (C2StreamInitDataInfo::output*)(param);
+                C2StreamInitDataInfo::output* csdBuffer = (C2StreamInitDataInfo::output*)(param);
                 size_t csdSize = csdBuffer->flexCount();
                 if (csdSize > 0) csd = true;
             } else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
@@ -118,8 +159,7 @@
             typedef std::unique_lock<std::mutex> ULock;
             ULock l(queueLock);
             workQueue.push_back(std::move(work));
-            if (!flushedIndices.empty() &&
-                (frameIndexIt != flushedIndices.end())) {
+            if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
                 flushedIndices.erase(frameIndexIt);
             }
             queueCondition.notify_all();
@@ -136,15 +176,15 @@
 }
 
 // Return all test parameters, a list of tuple of <instance, component>
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters() {
+const std::vector<TestParameters>& getTestParameters() {
     return getTestParameters(C2Component::DOMAIN_OTHER, C2Component::KIND_OTHER);
 }
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind) {
-    static std::vector<std::tuple<std::string, std::string>> parameters;
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind) {
+    static std::vector<TestParameters> parameters;
 
     auto instances = android::Codec2Client::GetServiceNames();
     for (std::string instance : instances) {
@@ -157,11 +197,18 @@
                 (traits.domain != domain || traits.kind != kind)) {
                 continue;
             }
-
+            if (traits.name.rfind(sComponentNamePrefix, 0) != 0) {
+                ALOGD("Skipping tests for %s. Prefix specified is %s", traits.name.c_str(),
+                      sComponentNamePrefix.c_str());
+                continue;
+            }
             parameters.push_back(std::make_tuple(instance, traits.name));
         }
     }
 
+    if (parameters.empty()) {
+        ALOGE("No test parameters added. Verify component prefix passed to the test");
+    }
     return parameters;
 }
 
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index 50e3ac5..e74f247 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -40,7 +40,14 @@
 
 using namespace ::std::chrono;
 
-static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+using TestParameters = std::tuple<std::string, std::string>;
+static std::vector<TestParameters> kTestParameters;
+
+// Resource directory
+extern std::string sResourceDir;
+
+// Component name prefix
+extern std::string sComponentNamePrefix;
 
 struct FrameInfo {
     int bytesCount;
@@ -48,6 +55,18 @@
     int64_t timestamp;
 };
 
+template <typename... T>
+static inline std::string PrintInstanceTupleNameToString(
+        const testing::TestParamInfo<std::tuple<T...>>& info) {
+    std::stringstream ss;
+    std::apply([&ss](auto&&... elems) { ((ss << elems << '_'), ...); }, info.param);
+    ss << info.index;
+    std::string param_string = ss.str();
+    auto isNotAlphaNum = [](char c) { return !std::isalnum(c); };
+    std::replace_if(param_string.begin(), param_string.end(), isNotAlphaNum, '_');
+    return param_string;
+}
+
 /*
  * Handle Callback functions onWorkDone(), onTripped(),
  * onError(), onDeath(), onFramesRendered()
@@ -105,13 +124,15 @@
     std::function<void(std::list<std::unique_ptr<C2Work>>& workItems)> callBack;
 };
 
+void parseArgs(int argc, char** argv);
+
 // Return all test parameters, a list of tuple of <instance, component>.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters();
+const std::vector<TestParameters>& getTestParameters();
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind);
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind);
 
 /*
  * common functions declarations
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 6122225..29acd33 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -53,9 +53,8 @@
     }
 
 namespace {
-
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kInputTestParameters;
+using InputTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<InputTestParameters> kInputTestParameters;
 
 // google.codec2 Component test setup
 class Codec2ComponentHidlTestBase : public ::testing::Test {
@@ -120,9 +119,8 @@
     }
 };
 
-class Codec2ComponentHidlTest
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2ComponentHidlTest : public Codec2ComponentHidlTestBase,
+                                public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -317,10 +315,8 @@
     ASSERT_EQ(err, C2_OK);
 }
 
-class Codec2ComponentInputTests
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2ComponentInputTests : public Codec2ComponentHidlTestBase,
+                                  public ::testing::WithParamInterface<InputTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -330,8 +326,8 @@
 TEST_P(Codec2ComponentInputTests, InputBufferTest) {
     description("Tests for different inputs");
 
-    uint32_t flags = std::stoul(std::get<2>(GetParam()));
-    bool isNullBuffer = !std::get<3>(GetParam()).compare("true");
+    uint32_t flags = std::get<2>(GetParam());
+    bool isNullBuffer = std::get<3>(GetParam());
     if (isNullBuffer)
         ALOGD("Testing for null input buffer with flag : %u", flags);
     else
@@ -350,31 +346,28 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_CASE_P(NonStdInputs, Codec2ComponentInputTests,
-                        testing::ValuesIn(kInputTestParameters),
-                        android::hardware::PrintInstanceTupleNameToString<>);
+                        testing::ValuesIn(kInputTestParameters), PrintInstanceTupleNameToString<>);
 }  // anonymous namespace
 
 // TODO: Add test for Invalid work,
 // TODO: Add test for Invalid states
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters();
     for (auto params : kTestParameters) {
         kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, true));
         kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "true"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_CODEC_CONFIG), "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_CODEC_CONFIG, false));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index b520c17..d0a1c31 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -40,13 +40,44 @@
 #include "media_c2_hidl_test_common.h"
 #include "media_c2_video_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
 
-// Resource directory
-static std::string sResourceDir = "";
+struct CompToURL {
+    std::string mime;
+    std::string mURL;
+    std::string info;
+    std::string chksum;
+};
+std::vector<CompToURL> kCompToURL = {
+        {"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
+         "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
+        {"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
+         "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
+        {"hevc", "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
+         "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
+        {"hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
+         "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
+        {"mpeg2", "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info",
+         ""},
+        {"mpeg2", "bbb_mpeg2_352x288_1mbps_60fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+        {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
+        {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
+         ""},
+        {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
+        {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+         "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
+        {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
+         "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+         "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
+        {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
+        {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+};
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -85,26 +116,11 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        const StringToName kStringToName[] = {
-                {"h263", h263}, {"avc", avc}, {"mpeg2", mpeg2}, {"mpeg4", mpeg4},
-                {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},     {"av1", av1},
-        };
-
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
         mEos = false;
         mFramesReceived = 0;
         mTimestampUs = 0u;
@@ -114,11 +130,11 @@
         mMd5Offset = 0;
         mMd5Enable = false;
         mRefMd5 = nullptr;
-        if (mCompName == unknown_comp) mDisableTest = true;
 
         C2SecureModeTuning secureModeTuning{};
         mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
-        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+            secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
             mDisableTest = true;
         }
 
@@ -136,6 +152,9 @@
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
 
+    void GetURLChksmForComponent(char* mURL, char* info, char* chksum, size_t streamIndex);
+    void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+
     /* Calculate the CKSUM for the data in inbuf */
     void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
                         uint32_t u4_height, uint8_t* pu1_cksum_p) {
@@ -220,8 +239,7 @@
                 if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
                     if (mReorderDepth < 0) {
                         C2PortReorderBufferDepthTuning::output reorderBufferDepth;
-                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
-                                          nullptr);
+                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK, nullptr);
                         mReorderDepth = reorderBufferDepth.value;
                         if (mReorderDepth > 0) {
                             // TODO: Add validation for reordered output
@@ -267,18 +285,7 @@
         }
     }
 
-    enum standardComp {
-        h263,
-        avc,
-        mpeg2,
-        mpeg4,
-        hevc,
-        vp8,
-        vp9,
-        av1,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
 
@@ -291,7 +298,6 @@
     char* mRefMd5;
     std::list<uint64_t> mTimestampUslist;
     std::list<uint64_t> mFlushedIndices;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     int32_t mReorderDepth;
@@ -314,9 +320,8 @@
     }
 };
 
-class Codec2VideoDecHidlTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoDecHidlTest : public Codec2VideoDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -324,7 +329,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2VideoDecHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -351,83 +356,32 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2VideoDecHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
 // number of elementary streams per component
 #define STREAM_COUNT 3
 // LookUpTable of clips, metadata and chksum for component testing
-void GetURLChksmForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
-                             char* chksum, size_t streamIndex = 1) {
-    struct CompToURL {
-        Codec2VideoDecHidlTest::standardComp comp;
-        const char mURL[STREAM_COUNT][512];
-        const char info[STREAM_COUNT][512];
-        const char chksum[STREAM_COUNT][512];
-    };
-    ASSERT_TRUE(streamIndex < STREAM_COUNT);
-
-    static const CompToURL kCompToURL[] = {
-            {Codec2VideoDecHidlTest::standardComp::avc,
-             {"bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_640x360_768kbps_30fps.h264", ""},
-             {"bbb_avc_176x144_300kbps_60fps.info", "bbb_avc_640x360_768kbps_30fps.info", ""},
-             {"bbb_avc_176x144_300kbps_60fps_chksum.md5",
-              "bbb_avc_640x360_768kbps_30fps_chksum.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::hevc,
-             {"bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", ""},
-             {"bbb_hevc_176x144_176kbps_60fps.info", "bbb_hevc_640x360_1600kbps_30fps.info", ""},
-             {"bbb_hevc_176x144_176kbps_60fps_chksum.md5",
-              "bbb_hevc_640x360_1600kbps_30fps_chksum.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::mpeg2,
-             {"bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.m2v", ""},
-             {"bbb_mpeg2_176x144_105kbps_25fps.info", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::h263,
-             {"", "bbb_h263_352x288_300kbps_12fps.h263", ""},
-             {"", "bbb_h263_352x288_300kbps_12fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::mpeg4,
-             {"", "bbb_mpeg4_352x288_512kbps_30fps.m4v", ""},
-             {"", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::vp8,
-             {"bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", ""},
-             {"bbb_vp8_176x144_240kbps_60fps.info", "bbb_vp8_640x360_2mbps_30fps.info", ""},
-             {"", "bbb_vp8_640x360_2mbps_30fps_chksm.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::vp9,
-             {"bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
-              "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9"},
-             {"bbb_vp9_176x144_285kbps_60fps.info", "bbb_vp9_640x360_1600kbps_30fps.info",
-              "bbb_vp9_704x480_280kbps_24fps_altref_2.info"},
-             {"", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::av1,
-             {"bbb_av1_640_360.av1", "bbb_av1_176_144.av1", ""},
-             {"bbb_av1_640_360.info", "bbb_av1_176_144.info", ""},
-             {"bbb_av1_640_360_chksum.md5", "bbb_av1_176_144_chksm.md5", ""}},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL[streamIndex]);
-            strcat(info, kCompToURL[i].info[streamIndex]);
-            strcat(chksum, kCompToURL[i].chksum[streamIndex]);
-            return;
+void Codec2VideoDecHidlTestBase::GetURLChksmForComponent(char* mURL, char* info, char* chksum,
+                                                         size_t streamIndex) {
+    int streamCount = 0;
+    for (size_t i = 0; i < kCompToURL.size(); ++i) {
+        if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
+            if (streamCount == streamIndex) {
+                strcat(mURL, kCompToURL[i].mURL.c_str());
+                strcat(info, kCompToURL[i].info.c_str());
+                strcat(chksum, kCompToURL[i].chksum.c_str());
+                return;
+            }
+            streamCount++;
         }
     }
 }
 
-void GetURLForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
-                        size_t streamIndex = 1) {
+void Codec2VideoDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
     char chksum[512];
     strcpy(chksum, sResourceDir.c_str());
-    GetURLChksmForComponent(comp, mURL, info, chksum, streamIndex);
+    GetURLChksmForComponent(mURL, info, chksum, streamIndex);
 }
 
 void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -517,7 +471,7 @@
 TEST_P(Codec2VideoDecHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid video component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
@@ -573,10 +527,8 @@
     return false;
 }
 
-class Codec2VideoDecDecodeTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -588,8 +540,8 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    uint32_t streamIndex = std::get<2>(GetParam());
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
 
     char mURL[512], info[512], chksum[512];
@@ -599,7 +551,7 @@
     strcpy(info, sResourceDir.c_str());
     strcpy(chksum, sResourceDir.c_str());
 
-    GetURLChksmForComponent(mCompName, mURL, info, chksum, streamIndex);
+    GetURLChksmForComponent(mURL, info, chksum, streamIndex);
     if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
         ALOGV("Skipping Test, Stream not available");
         return;
@@ -688,9 +640,11 @@
 TEST_P(Codec2VideoDecHidlTest, AdaptiveDecodeTest) {
     description("Adaptive Decode Test");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
-    if (!(mCompName == avc || mCompName == hevc || mCompName == vp8 || mCompName == vp9 ||
-          mCompName == mpeg2))
+    if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
+          strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
+          strcasestr(mMime.c_str(), "mpeg2"))) {
         return;
+    }
 
     typedef std::unique_lock<std::mutex> ULock;
     ASSERT_EQ(mComponent->start(), C2_OK);
@@ -705,7 +659,7 @@
 
         strcpy(mURL, sResourceDir.c_str());
         strcpy(info, sResourceDir.c_str());
-        GetURLForComponent(mCompName, mURL, info, i % STREAM_COUNT);
+        GetURLForComponent(mURL, info, i % STREAM_COUNT);
         if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
             ALOGV("Stream not available, skipping this index");
             continue;
@@ -801,7 +755,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
@@ -888,7 +842,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     mFlushedIndices.clear();
 
@@ -964,7 +918,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     eleInfo.open(info);
     ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -1017,9 +971,8 @@
     }
 }
 
-class Codec2VideoDecCsdInputTests
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2VideoDecCsdInputTests : public Codec2VideoDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -1038,7 +991,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
@@ -1052,7 +1005,7 @@
     bool flushedDecoder = false;
     bool signalEOS = false;
     bool keyFrame = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
 
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
@@ -1122,49 +1075,41 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2VideoDecDecodeTest,
                          testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
                          testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 // TODO : Video specific configuration Test
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
     for (auto params : kTestParameters) {
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
 
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 5bcea5b..23ceff4 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -41,13 +41,11 @@
         : C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
 };
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeResolutionTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, bool, bool>;
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
 
-// Resource directory
-static std::string sResourceDir = "";
+using EncodeResolutionTestParameters = std::tuple<std::string, std::string, int32_t, int32_t>;
+static std::vector<EncodeResolutionTestParameters> kEncodeResolutionTestParameters;
 
 namespace {
 
@@ -78,26 +76,13 @@
         mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
         ASSERT_NE(mGraphicPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+                          &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        const StringToName kStringToName[] = {
-                {"h263", h263}, {"avc", avc}, {"mpeg4", mpeg4},
-                {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},
-        };
-
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
+        std::cout << "mime : " << mMime << "\n";
         mEos = false;
         mCsd = false;
         mConfigBPictures = false;
@@ -106,11 +91,11 @@
         mTimestampUs = 0u;
         mOutputSize = 0u;
         mTimestampDevTest = false;
-        if (mCompName == unknown_comp) mDisableTest = true;
 
         C2SecureModeTuning secureModeTuning{};
         mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
-        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+            secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
             mDisableTest = true;
         }
 
@@ -187,16 +172,7 @@
         }
     }
 
-    enum standardComp {
-        h263,
-        avc,
-        mpeg4,
-        hevc,
-        vp8,
-        vp9,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
@@ -204,7 +180,6 @@
     bool mDisableTest;
     bool mConfigBPictures;
     bool mTimestampDevTest;
-    standardComp mCompName;
     uint32_t mFramesReceived;
     uint32_t mFailedWorkReceived;
     uint64_t mTimestampUs;
@@ -231,9 +206,8 @@
     }
 };
 
-class Codec2VideoEncHidlTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoEncHidlTest : public Codec2VideoEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -241,7 +215,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2VideoEncHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -268,13 +242,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2VideoEncHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -405,14 +372,12 @@
 TEST_P(Codec2VideoEncHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid video component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2VideoEncEncodeTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string, std::string>> {
+class Codec2VideoEncEncodeTest : public Codec2VideoEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -426,10 +391,10 @@
     char mURL[512];
     int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
     int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<3>(GetParam());
     // Send an empty frame to receive CSD data from encoder.
-    bool sendEmptyFirstFrame = !std::get<3>(GetParam()).compare("true");
-    mConfigBPictures = !std::get<4>(GetParam()).compare("true");
+    bool sendEmptyFirstFrame = std::get<3>(GetParam());
+    mConfigBPictures = std::get<4>(GetParam());
 
     strcpy(mURL, sResourceDir.c_str());
     GetURLForComponent(mURL);
@@ -517,9 +482,9 @@
         ASSERT_TRUE(false);
     }
 
-    if (mCompName == vp8 || mCompName == h263) {
+    if ((mMime.find("vp8") != std::string::npos) || (mMime.find("3gpp") != std::string::npos)) {
         ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
-    } else if (mCompName != vp9) {
+    } else if (mMime.find("vp9") == std::string::npos) {
         ASSERT_TRUE(mCsd) << "CSD Buffer not received";
     }
 
@@ -697,8 +662,7 @@
 
 class Codec2VideoEncResolutionTest
     : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+      public ::testing::WithParamInterface<EncodeResolutionTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -710,8 +674,8 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
     std::ifstream eleStream;
-    int32_t nWidth = std::stoi(std::get<2>(GetParam()));
-    int32_t nHeight = std::stoi(std::get<3>(GetParam()));
+    int32_t nWidth = std::get<2>(GetParam());
+    int32_t nHeight = std::get<3>(GetParam());
     ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
     mEos = false;
 
@@ -743,14 +707,16 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(NonStdSizes, Codec2VideoEncResolutionTest,
-                         ::testing::ValuesIn(kEncodeResolutionTestParameters));
+                         ::testing::ValuesIn(kEncodeResolutionTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(EncodeTestwithEOS, Codec2VideoEncEncodeTest,
-                         ::testing::ValuesIn(kEncodeTestParameters));
+                         ::testing::ValuesIn(kEncodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 TEST_P(Codec2VideoEncHidlTest, AdaptiveBitrateTest) {
     description("Encodes input file for different bitrates");
@@ -841,38 +807,26 @@
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
     for (auto params : kTestParameters) {
-        constexpr char const* kBoolString[] = { "false", "true" };
         for (size_t i = 0; i < 1 << 3; ++i) {
             kEncodeTestParameters.push_back(std::make_tuple(
-                    std::get<0>(params), std::get<1>(params),
-                    kBoolString[i & 1],
-                    kBoolString[(i >> 1) & 1],
-                    kBoolString[(i >> 2) & 1]));
+                    std::get<0>(params), std::get<1>(params), i & 1, (i >> 1) & 1, (i >> 2) & 1));
         }
 
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "52", "18"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 52, 18));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "365", "365"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 365, 365));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "484", "362"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 484, 362));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "244", "488"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 244, 488));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "852", "608"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 852, 608));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1400", "442"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1400, 442));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 839a910..0eeedb6 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -15,7 +15,6 @@
     defaults: ["hidl_defaults"],
 
     srcs: [
-        "OutputBufferQueue.cpp",
         "types.cpp",
     ],
 
@@ -176,14 +175,3 @@
     ],
 }
 
-// Alias to the latest "defaults" for Codec 2.0 HAL service implementations
-cc_defaults {
-    name: "libcodec2-hidl-defaults",
-    defaults: ["libcodec2-hidl-defaults@1.1"],
-}
-
-// Alias to the latest "defaults" for Codec 2.0 HAL client
-cc_defaults {
-    name: "libcodec2-hidl-client-defaults",
-    defaults: ["libcodec2-hidl-client-defaults@1.1"],
-}
diff --git a/media/codec2/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hidl/1.1/utils/ComponentStore.cpp
index 163686d..d47abdd 100644
--- a/media/codec2/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hidl/1.1/utils/ComponentStore.cpp
@@ -366,6 +366,9 @@
             mStore->createComponent(name, &c2component));
 
     if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
         onInterfaceLoaded(c2component->intf());
         component = new Component(c2component, listener, this, pool);
         if (!component) {
diff --git a/media/codec2/hidl/1.2/utils/Android.bp b/media/codec2/hidl/1.2/utils/Android.bp
new file mode 100644
index 0000000..e4e4ad5
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Android.bp
@@ -0,0 +1,206 @@
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-hidl-client-defaults instead
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_hidl_client@1.2",
+
+    defaults: ["hidl_defaults"],
+
+    srcs: [
+        "types.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libbase",
+        "libcodec2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libcodec2_vndk",
+        "libcutils",
+        "libgui",
+        "libhidlbase",
+        "liblog",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libgui",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+
+    // Device does not boot when global ThinLTO is enabled for this library.
+    // http://b/170595429
+    lto: {
+        never: true,
+    },
+}
+
+
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-hidl-defaults instead
+cc_library {
+    name: "libcodec2_hidl@1.2",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+        "test_com.android.media.swcodec",
+    ],
+
+    defaults: ["hidl_defaults"],
+
+    srcs: [
+        "Component.cpp",
+        "ComponentInterface.cpp",
+        "ComponentStore.cpp",
+        "Configurable.cpp",
+        "InputBufferManager.cpp",
+        "InputSurface.cpp",
+        "InputSurfaceConnection.cpp",
+        "types.cpp",
+    ],
+
+    header_libs: [
+        "libbinder_headers",
+        "libsystem_headers",
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.graphics.bufferqueue@1.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+        "android.hardware.graphics.common@1.0",
+        "android.hardware.media@1.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "android.hardware.media.omx@1.0",
+        "libbase",
+        "libcodec2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl_plugin_stub",
+        "libcodec2_vndk",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libstagefright_bufferpool@2.0.1",
+        "libstagefright_bufferqueue_helper_novndk",
+        "libui",
+        "libutils",
+    ],
+
+    target: {
+        vendor: {
+            exclude_shared_libs: [
+                "libstagefright_bufferqueue_helper_novndk",
+                "libcodec2_hidl_plugin_stub",
+            ],
+            shared_libs: [
+                "libstagefright_bufferqueue_helper",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+        apex: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin_stub",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+    },
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_vndk",
+        "libhidlbase",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-hidl-defaults@1.2",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl@1.2",
+        "libcodec2_vndk",
+        "libhidlbase",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL client
+cc_defaults {
+    name: "libcodec2-hidl-client-defaults@1.2",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
+        "libcodec2_vndk",
+        "libhidlbase",
+    ],
+}
+
+// Alias to the latest "defaults" for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-hidl-defaults",
+    defaults: ["libcodec2-hidl-defaults@1.2"],
+}
+
+// Alias to the latest "defaults" for Codec 2.0 HAL client
+cc_defaults {
+    name: "libcodec2-hidl-client-defaults",
+    defaults: ["libcodec2-hidl-client-defaults@1.2"],
+}
+
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
new file mode 100644
index 0000000..8924e6d
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -0,0 +1,535 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-Component@1.2"
+#include <android-base/logging.h>
+
+#include <codec2/hidl/1.2/Component.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/InputBufferManager.h>
+
+#ifndef __ANDROID_APEX__
+#include <FilterWrapper.h>
+#endif
+
+#include <hidl/HidlBinderSupport.h>
+#include <utils/Timers.h>
+
+#include <C2BqBufferPriv.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <chrono>
+#include <thread>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using namespace ::android;
+
+// ComponentListener wrapper
+struct Component::Listener : public C2Component::Listener {
+
+    Listener(const sp<Component>& component) :
+        mComponent(component),
+        mListener(component->mListener) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            uint32_t errorCode) override {
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            Return<void> transStatus = listener->onError(Status::OK, errorCode);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onError_nb -- "
+                           << "transaction failed.";
+            }
+        }
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            hidl_vec<SettingResult> settingResults(c2settingResult.size());
+            size_t ix = 0;
+            for (const std::shared_ptr<C2SettingResult> &c2result :
+                    c2settingResult) {
+                if (c2result) {
+                    if (!objcpy(&settingResults[ix++], *c2result)) {
+                        break;
+                    }
+                }
+            }
+            settingResults.resize(ix);
+            Return<void> transStatus = listener->onTripped(settingResults);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onTripped_nb -- "
+                           << "transaction failed.";
+            }
+        }
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        for (const std::unique_ptr<C2Work>& work : c2workItems) {
+            if (work) {
+                if (work->worklets.empty()
+                        || !work->worklets.back()
+                        || (work->worklets.back()->output.flags &
+                            C2FrameData::FLAG_INCOMPLETE) == 0) {
+                    InputBufferManager::
+                            unregisterFrameData(mListener, work->input);
+                }
+            }
+        }
+
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            WorkBundle workBundle;
+
+            sp<Component> strongComponent = mComponent.promote();
+            beginTransferBufferQueueBlocks(c2workItems, true);
+            if (!objcpy(&workBundle, c2workItems, strongComponent ?
+                    &strongComponent->mBufferPoolSender : nullptr)) {
+                LOG(ERROR) << "Component::Listener::onWorkDone_nb -- "
+                           << "received corrupted work items.";
+                endTransferBufferQueueBlocks(c2workItems, false, true);
+                return;
+            }
+            Return<void> transStatus = listener->onWorkDone(workBundle);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onWorkDone_nb -- "
+                           << "transaction failed.";
+                endTransferBufferQueueBlocks(c2workItems, false, true);
+                return;
+            }
+            endTransferBufferQueueBlocks(c2workItems, true, true);
+        }
+    }
+
+protected:
+    wp<Component> mComponent;
+    wp<IComponentListener> mListener;
+};
+
+// Component::Sink
+struct Component::Sink : public IInputSink {
+    std::shared_ptr<Component> mComponent;
+    sp<IConfigurable> mConfigurable;
+
+    virtual Return<Status> queue(const WorkBundle& workBundle) override {
+        return mComponent->queue(workBundle);
+    }
+
+    virtual Return<sp<IConfigurable>> getConfigurable() override {
+        return mConfigurable;
+    }
+
+    Sink(const std::shared_ptr<Component>& component);
+    virtual ~Sink() override;
+
+    // Process-wide map: Component::Sink -> C2Component.
+    static std::mutex sSink2ComponentMutex;
+    static std::map<IInputSink*, std::weak_ptr<C2Component>> sSink2Component;
+
+    static std::shared_ptr<C2Component> findLocalComponent(
+            const sp<IInputSink>& sink);
+};
+
+std::mutex
+        Component::Sink::sSink2ComponentMutex{};
+std::map<IInputSink*, std::weak_ptr<C2Component>>
+        Component::Sink::sSink2Component{};
+
+Component::Sink::Sink(const std::shared_ptr<Component>& component)
+        : mComponent{component},
+          mConfigurable{[&component]() -> sp<IConfigurable> {
+              Return<sp<IComponentInterface>> ret1 = component->getInterface();
+              if (!ret1.isOk()) {
+                  LOG(ERROR) << "Sink::Sink -- component's transaction failed.";
+                  return nullptr;
+              }
+              Return<sp<IConfigurable>> ret2 =
+                      static_cast<sp<IComponentInterface>>(ret1)->
+                      getConfigurable();
+              if (!ret2.isOk()) {
+                  LOG(ERROR) << "Sink::Sink -- interface's transaction failed.";
+                  return nullptr;
+              }
+              return static_cast<sp<IConfigurable>>(ret2);
+          }()} {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    sSink2Component.emplace(this, component->mComponent);
+}
+
+Component::Sink::~Sink() {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    sSink2Component.erase(this);
+}
+
+std::shared_ptr<C2Component> Component::Sink::findLocalComponent(
+        const sp<IInputSink>& sink) {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    auto i = sSink2Component.find(sink.get());
+    if (i == sSink2Component.end()) {
+        return nullptr;
+    }
+    return i->second.lock();
+}
+
+// Component
+Component::Component(
+        const std::shared_ptr<C2Component>& component,
+        const sp<IComponentListener>& listener,
+        const sp<ComponentStore>& store,
+        const sp<::android::hardware::media::bufferpool::V2_0::
+        IClientManager>& clientPoolManager)
+      : mComponent{component},
+        mInterface{new ComponentInterface(component->intf(),
+                                          store->getParameterCache())},
+        mListener{listener},
+        mStore{store},
+        mBufferPoolSender{clientPoolManager} {
+    // Retrieve supported parameters from store
+    // TODO: We could cache this per component/interface type
+    mInit = mInterface->status();
+}
+
+c2_status_t Component::status() const {
+    return mInit;
+}
+
+// Methods from ::android::hardware::media::c2::V1_1::IComponent
+Return<Status> Component::queue(const WorkBundle& workBundle) {
+    std::list<std::unique_ptr<C2Work>> c2works;
+
+    if (!objcpy(&c2works, workBundle)) {
+        return Status::CORRUPTED;
+    }
+
+    // Register input buffers.
+    for (const std::unique_ptr<C2Work>& work : c2works) {
+        if (work) {
+            InputBufferManager::
+                    registerFrameData(mListener, work->input);
+        }
+    }
+
+    return static_cast<Status>(mComponent->queue_nb(&c2works));
+}
+
+Return<void> Component::flush(flush_cb _hidl_cb) {
+    std::list<std::unique_ptr<C2Work>> c2flushedWorks;
+    c2_status_t c2res = mComponent->flush_sm(
+            C2Component::FLUSH_COMPONENT,
+            &c2flushedWorks);
+
+    // Unregister input buffers.
+    for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
+        if (work) {
+            if (work->worklets.empty()
+                    || !work->worklets.back()
+                    || (work->worklets.back()->output.flags &
+                        C2FrameData::FLAG_INCOMPLETE) == 0) {
+                InputBufferManager::
+                        unregisterFrameData(mListener, work->input);
+            }
+        }
+    }
+
+    WorkBundle flushedWorkBundle;
+    Status res = static_cast<Status>(c2res);
+    beginTransferBufferQueueBlocks(c2flushedWorks, true);
+    if (c2res == C2_OK) {
+        if (!objcpy(&flushedWorkBundle, c2flushedWorks, &mBufferPoolSender)) {
+            res = Status::CORRUPTED;
+        }
+    }
+    _hidl_cb(res, flushedWorkBundle);
+    endTransferBufferQueueBlocks(c2flushedWorks, true, true);
+    return Void();
+}
+
+Return<Status> Component::drain(bool withEos) {
+    return static_cast<Status>(mComponent->drain_nb(withEos ?
+            C2Component::DRAIN_COMPONENT_WITH_EOS :
+            C2Component::DRAIN_COMPONENT_NO_EOS));
+}
+
+Return<Status> Component::setOutputSurface(
+        uint64_t blockPoolId,
+        const sp<HGraphicBufferProducer2>& surface) {
+    std::shared_ptr<C2BlockPool> pool;
+    GetCodec2BlockPool(blockPoolId, mComponent, &pool);
+    if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                std::static_pointer_cast<C2BufferQueueBlockPool>(pool);
+        C2BufferQueueBlockPool::OnRenderCallback cb =
+            [this](uint64_t producer, int32_t slot, int64_t nsecs) {
+                // TODO: batch this
+                hidl_vec<IComponentListener::RenderedFrame> rendered;
+                rendered.resize(1);
+                rendered[0] = { producer, slot, nsecs };
+                (void)mListener->onFramesRendered(rendered).isOk();
+        };
+        if (bqPool) {
+            bqPool->setRenderCallback(cb);
+            bqPool->configureProducer(surface);
+        }
+    }
+    return Status::OK;
+}
+
+Return<void> Component::connectToInputSurface(
+        const sp<IInputSurface>& inputSurface,
+        connectToInputSurface_cb _hidl_cb) {
+    Status status;
+    sp<IInputSurfaceConnection> connection;
+    auto transStatus = inputSurface->connect(
+            asInputSink(),
+            [&status, &connection](
+                    Status s, const sp<IInputSurfaceConnection>& c) {
+                status = s;
+                connection = c;
+            }
+        );
+    _hidl_cb(status, connection);
+    return Void();
+}
+
+Return<void> Component::connectToOmxInputSurface(
+        const sp<HGraphicBufferProducer1>& producer,
+        const sp<::android::hardware::media::omx::V1_0::
+        IGraphicBufferSource>& source,
+        connectToOmxInputSurface_cb _hidl_cb) {
+    (void)producer;
+    (void)source;
+    (void)_hidl_cb;
+    return Void();
+}
+
+Return<Status> Component::disconnectFromInputSurface() {
+    // TODO implement
+    return Status::OK;
+}
+
+namespace /* unnamed */ {
+
+struct BlockPoolIntf : public ConfigurableC2Intf {
+    BlockPoolIntf(const std::shared_ptr<C2BlockPool>& pool)
+          : ConfigurableC2Intf{
+                "C2BlockPool:" +
+                    (pool ? std::to_string(pool->getLocalId()) : "null"),
+                0},
+            mPool{pool} {
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*>& params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures
+            ) override {
+        (void)params;
+        (void)mayBlock;
+        (void)failures;
+        return C2_OK;
+    }
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index>& indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const params
+            ) const override {
+        (void)indices;
+        (void)mayBlock;
+        (void)params;
+        return C2_OK;
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override {
+        (void)params;
+        return C2_OK;
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override {
+        (void)fields;
+        (void)mayBlock;
+        return C2_OK;
+    }
+
+protected:
+    std::shared_ptr<C2BlockPool> mPool;
+};
+
+} // unnamed namespace
+
+Return<void> Component::createBlockPool(
+        uint32_t allocatorId,
+        createBlockPool_cb _hidl_cb) {
+    std::shared_ptr<C2BlockPool> blockPool;
+#ifdef __ANDROID_APEX__
+    c2_status_t status = CreateCodec2BlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#else
+    c2_status_t status = ComponentStore::GetFilterWrapper()->createBlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#endif
+    if (status != C2_OK) {
+        blockPool = nullptr;
+    }
+    if (blockPool) {
+        mBlockPoolsMutex.lock();
+        mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+        mBlockPoolsMutex.unlock();
+    } else if (status == C2_OK) {
+        status = C2_CORRUPTED;
+    }
+
+    _hidl_cb(static_cast<Status>(status),
+            blockPool ? blockPool->getLocalId() : 0,
+            new CachedConfigurable(
+            std::make_unique<BlockPoolIntf>(blockPool)));
+    return Void();
+}
+
+Return<Status> Component::destroyBlockPool(uint64_t blockPoolId) {
+    std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+    return mBlockPools.erase(blockPoolId) == 1 ?
+            Status::OK : Status::CORRUPTED;
+}
+
+Return<Status> Component::start() {
+    return static_cast<Status>(mComponent->start());
+}
+
+Return<Status> Component::stop() {
+    InputBufferManager::unregisterFrameData(mListener);
+    return static_cast<Status>(mComponent->stop());
+}
+
+Return<Status> Component::reset() {
+    Status status = static_cast<Status>(mComponent->reset());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mBlockPools.clear();
+    }
+    InputBufferManager::unregisterFrameData(mListener);
+    return status;
+}
+
+Return<Status> Component::release() {
+    Status status = static_cast<Status>(mComponent->release());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mBlockPools.clear();
+    }
+    InputBufferManager::unregisterFrameData(mListener);
+    return status;
+}
+
+Return<sp<IComponentInterface>> Component::getInterface() {
+    return sp<IComponentInterface>(mInterface);
+}
+
+Return<sp<IInputSink>> Component::asInputSink() {
+    std::lock_guard<std::mutex> lock(mSinkMutex);
+    if (!mSink) {
+        mSink = new Sink(shared_from_this());
+    }
+    return {mSink};
+}
+
+Return<void> Component::configureVideoTunnel(
+        uint32_t avSyncHwId, configureVideoTunnel_cb _hidl_cb) {
+    (void)avSyncHwId;
+    _hidl_cb(Status::OMITTED, hidl_handle{});
+    return Void();
+}
+
+Return<Status> Component::setOutputSurfaceWithSyncObj(
+        uint64_t blockPoolId, const sp<HGraphicBufferProducer2>& surface,
+        const SurfaceSyncObj& syncObject) {
+    std::shared_ptr<C2BlockPool> pool;
+    GetCodec2BlockPool(blockPoolId, mComponent, &pool);
+    if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                std::static_pointer_cast<C2BufferQueueBlockPool>(pool);
+        C2BufferQueueBlockPool::OnRenderCallback cb =
+            [this](uint64_t producer, int32_t slot, int64_t nsecs) {
+                // TODO: batch this
+                hidl_vec<IComponentListener::RenderedFrame> rendered;
+                rendered.resize(1);
+                rendered[0] = { producer, slot, nsecs };
+                (void)mListener->onFramesRendered(rendered).isOk();
+        };
+        if (bqPool) {
+            const native_handle_t *h = syncObject.syncMemory;
+            native_handle_t *syncMemory = h ? native_handle_clone(h) : nullptr;
+            uint64_t bqId = syncObject.bqId;
+            uint32_t generationId = syncObject.generationId;
+            uint64_t consumerUsage = syncObject.consumerUsage;
+
+            bqPool->setRenderCallback(cb);
+            bqPool->configureProducer(surface, syncMemory, bqId,
+                                      generationId, consumerUsage);
+        }
+    }
+    return Status::OK;
+}
+
+std::shared_ptr<C2Component> Component::findLocalComponent(
+        const sp<IInputSink>& sink) {
+    return Component::Sink::findLocalComponent(sink);
+}
+
+void Component::initListener(const sp<Component>& self) {
+    std::shared_ptr<C2Component::Listener> c2listener =
+            std::make_shared<Listener>(self);
+    c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
+    if (res != C2_OK) {
+        mInit = res;
+    }
+}
+
+Component::~Component() {
+    InputBufferManager::unregisterFrameData(mListener);
+    mStore->reportComponentDeath(this);
+}
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/ComponentInterface.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/ComponentInterface.cpp
index 65756e8..30fe4d6 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/ComponentInterface.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
diff --git a/media/codec2/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hidl/1.2/utils/ComponentStore.cpp
new file mode 100644
index 0000000..9fac5d5
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/ComponentStore.cpp
@@ -0,0 +1,562 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-ComponentStore@1.2"
+#include <android-base/logging.h>
+
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/InputSurface.h>
+#include <codec2/hidl/1.2/types.h>
+
+#include <android-base/file.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <utils/Errors.h>
+
+#include <C2PlatformSupport.h>
+#include <util/C2InterfaceHelper.h>
+
+#include <chrono>
+#include <ctime>
+#include <iomanip>
+#include <ostream>
+#include <sstream>
+
+#ifndef __ANDROID_APEX__
+#include <codec2/hidl/plugin/FilterPlugin.h>
+#include <dlfcn.h>
+#include <C2Config.h>
+#include <DefaultFilterPlugin.h>
+#include <FilterWrapper.h>
+#endif
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using namespace ::android;
+using ::android::GraphicBufferSource;
+using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
+
+namespace /* unnamed */ {
+
+struct StoreIntf : public ConfigurableC2Intf {
+    StoreIntf(const std::shared_ptr<C2ComponentStore>& store)
+          : ConfigurableC2Intf{store ? store->getName() : "", 0},
+            mStore{store} {
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>> *const failures
+            ) override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && params.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->config_sm(params, failures);
+    }
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index> &indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>> *const params) const override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && indices.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->query_sm({}, indices, params);
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params
+            ) const override {
+        return mStore->querySupportedParams_nb(params);
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery> &fields,
+            c2_blocking_t mayBlock) const override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && fields.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->querySupportedValues_sm(fields);
+    }
+
+protected:
+    std::shared_ptr<C2ComponentStore> mStore;
+};
+
+} // unnamed namespace
+
+struct ComponentStore::StoreParameterCache : public ParameterCache {
+    std::mutex mStoreMutex;
+    ComponentStore* mStore;
+
+    StoreParameterCache(ComponentStore* store): mStore{store} {
+    }
+
+    virtual c2_status_t validate(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>>& params
+            ) override {
+        std::scoped_lock _lock(mStoreMutex);
+        return mStore ? mStore->validateSupportedParams(params) : C2_NO_INIT;
+    }
+
+    void onStoreDestroyed() {
+        std::scoped_lock _lock(mStoreMutex);
+        mStore = nullptr;
+    }
+};
+
+ComponentStore::ComponentStore(const std::shared_ptr<C2ComponentStore>& store)
+      : mConfigurable{new CachedConfigurable(std::make_unique<StoreIntf>(store))},
+        mParameterCache{std::make_shared<StoreParameterCache>(this)},
+        mStore{store} {
+
+    std::shared_ptr<C2ComponentStore> platformStore = android::GetCodec2PlatformComponentStore();
+    SetPreferredCodec2ComponentStore(store);
+
+    // Retrieve struct descriptors
+    mParamReflector = mStore->getParamReflector();
+
+    // Retrieve supported parameters from store
+    using namespace std::placeholders;
+    mInit = mConfigurable->init(mParameterCache);
+}
+
+ComponentStore::~ComponentStore() {
+    mParameterCache->onStoreDestroyed();
+}
+
+c2_status_t ComponentStore::status() const {
+    return mInit;
+}
+
+c2_status_t ComponentStore::validateSupportedParams(
+        const std::vector<std::shared_ptr<C2ParamDescriptor>>& params) {
+    c2_status_t res = C2_OK;
+
+    for (const std::shared_ptr<C2ParamDescriptor> &desc : params) {
+        if (!desc) {
+            // All descriptors should be valid
+            res = res ? res : C2_BAD_VALUE;
+            continue;
+        }
+        C2Param::CoreIndex coreIndex = desc->index().coreIndex();
+        std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+        auto it = mStructDescriptors.find(coreIndex);
+        if (it == mStructDescriptors.end()) {
+            std::shared_ptr<C2StructDescriptor> structDesc =
+                    mParamReflector->describe(coreIndex);
+            if (!structDesc) {
+                // All supported params must be described
+                res = C2_BAD_INDEX;
+            }
+            mStructDescriptors.insert({ coreIndex, structDesc });
+        }
+    }
+    return res;
+}
+
+std::shared_ptr<ParameterCache> ComponentStore::getParameterCache() const {
+    return mParameterCache;
+}
+
+#ifndef __ANDROID_APEX__
+// static
+std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
+    constexpr const char kPluginPath[] = "libc2filterplugin.so";
+    static std::shared_ptr<FilterWrapper> wrapper = FilterWrapper::Create(
+            std::make_unique<DefaultFilterPlugin>(kPluginPath));
+    return wrapper;
+}
+#endif
+
+// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
+Return<void> ComponentStore::createComponent(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+Return<void> ComponentStore::createInterface(
+        const hidl_string& name,
+        createInterface_cb _hidl_cb) {
+    std::shared_ptr<C2ComponentInterface> c2interface;
+    c2_status_t res = mStore->createInterface(name, &c2interface);
+    sp<IComponentInterface> interface;
+    if (res == C2_OK) {
+#ifndef __ANDROID_APEX__
+        c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
+#endif
+        onInterfaceLoaded(c2interface);
+        interface = new ComponentInterface(c2interface, mParameterCache);
+    }
+    _hidl_cb(static_cast<Status>(res), interface);
+    return Void();
+}
+
+Return<void> ComponentStore::listComponents(listComponents_cb _hidl_cb) {
+    std::vector<std::shared_ptr<const C2Component::Traits>> c2traits =
+            mStore->listComponents();
+    hidl_vec<IComponentStore::ComponentTraits> traits(c2traits.size());
+    size_t ix = 0;
+    for (const std::shared_ptr<const C2Component::Traits> &c2trait : c2traits) {
+        if (c2trait) {
+            if (objcpy(&traits[ix], *c2trait)) {
+                ++ix;
+            } else {
+                break;
+            }
+        }
+    }
+    traits.resize(ix);
+    _hidl_cb(Status::OK, traits);
+    return Void();
+}
+
+Return<void> ComponentStore::createInputSurface(createInputSurface_cb _hidl_cb) {
+    sp<GraphicBufferSource> source = new GraphicBufferSource();
+    if (source->initCheck() != OK) {
+        _hidl_cb(Status::CORRUPTED, nullptr);
+        return Void();
+    }
+    using namespace std::placeholders;
+    sp<InputSurface> inputSurface = new InputSurface(
+            mParameterCache,
+            std::make_shared<C2ReflectorHelper>(),
+            source->getHGraphicBufferProducer(),
+            source);
+    _hidl_cb(inputSurface ? Status::OK : Status::NO_MEMORY,
+             inputSurface);
+    return Void();
+}
+
+void ComponentStore::onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf) {
+    // invalidate unsupported struct descriptors if a new interface is loaded as it may have
+    // exposed new descriptors
+    std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+    if (!mLoadedInterfaces.count(intf->getName())) {
+        mUnsupportedStructDescriptors.clear();
+        mLoadedInterfaces.emplace(intf->getName());
+    }
+}
+
+Return<void> ComponentStore::getStructDescriptors(
+        const hidl_vec<uint32_t>& indices,
+        getStructDescriptors_cb _hidl_cb) {
+    hidl_vec<StructDescriptor> descriptors(indices.size());
+    size_t dstIx = 0;
+    Status res = Status::OK;
+    for (size_t srcIx = 0; srcIx < indices.size(); ++srcIx) {
+        std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+        const C2Param::CoreIndex coreIndex = C2Param::CoreIndex(indices[srcIx]).coreIndex();
+        const auto item = mStructDescriptors.find(coreIndex);
+        if (item == mStructDescriptors.end()) {
+            // not in the cache, and not known to be unsupported, query local reflector
+            if (!mUnsupportedStructDescriptors.count(coreIndex)) {
+                std::shared_ptr<C2StructDescriptor> structDesc =
+                    mParamReflector->describe(coreIndex);
+                if (!structDesc) {
+                    mUnsupportedStructDescriptors.emplace(coreIndex);
+                } else {
+                    mStructDescriptors.insert({ coreIndex, structDesc });
+                    if (objcpy(&descriptors[dstIx], *structDesc)) {
+                        ++dstIx;
+                        continue;
+                    }
+                    res = Status::CORRUPTED;
+                    break;
+                }
+            }
+            res = Status::NOT_FOUND;
+        } else if (item->second) {
+            if (objcpy(&descriptors[dstIx], *item->second)) {
+                ++dstIx;
+                continue;
+            }
+            res = Status::CORRUPTED;
+            break;
+        } else {
+            res = Status::NO_MEMORY;
+            break;
+        }
+    }
+    descriptors.resize(dstIx);
+    _hidl_cb(res, descriptors);
+    return Void();
+}
+
+Return<sp<IClientManager>> ComponentStore::getPoolClientManager() {
+    return ClientManager::getInstance();
+}
+
+Return<Status> ComponentStore::copyBuffer(const Buffer& src, const Buffer& dst) {
+    // TODO implement
+    (void)src;
+    (void)dst;
+    return Status::OMITTED;
+}
+
+Return<sp<IConfigurable>> ComponentStore::getConfigurable() {
+    return mConfigurable;
+}
+
+// Methods from ::android::hardware::media::c2::V1_1::IComponentStore
+Return<void> ComponentStore::createComponent_1_1(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_1_1_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+// Methods from ::android::hardware::media::c2::V1_2::IComponentStore
+Return<void> ComponentStore::createComponent_1_2(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_1_2_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+// Called from createComponent() after a successful creation of `component`.
+void ComponentStore::reportComponentBirth(Component* component) {
+    ComponentStatus componentStatus;
+    componentStatus.c2Component = component->mComponent;
+    componentStatus.birthTime = std::chrono::system_clock::now();
+
+    std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+    mComponentRoster.emplace(component, componentStatus);
+}
+
+// Called from within the destructor of `component`. No virtual function calls
+// are made on `component` here.
+void ComponentStore::reportComponentDeath(Component* component) {
+    std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+    mComponentRoster.erase(component);
+}
+
+// Dumps component traits.
+std::ostream& ComponentStore::dump(
+        std::ostream& out,
+        const std::shared_ptr<const C2Component::Traits>& comp) {
+
+    constexpr const char indent[] = "    ";
+
+    out << indent << "name: " << comp->name << std::endl;
+    out << indent << "domain: " << comp->domain << std::endl;
+    out << indent << "kind: " << comp->kind << std::endl;
+    out << indent << "rank: " << comp->rank << std::endl;
+    out << indent << "mediaType: " << comp->mediaType << std::endl;
+    out << indent << "aliases:";
+    for (const auto& alias : comp->aliases) {
+        out << ' ' << alias;
+    }
+    out << std::endl;
+
+    return out;
+}
+
+// Dumps component status.
+std::ostream& ComponentStore::dump(
+        std::ostream& out,
+        ComponentStatus& compStatus) {
+
+    constexpr const char indent[] = "    ";
+
+    // Print birth time.
+    std::chrono::milliseconds ms =
+            std::chrono::duration_cast<std::chrono::milliseconds>(
+                compStatus.birthTime.time_since_epoch());
+    std::time_t birthTime = std::chrono::system_clock::to_time_t(
+            compStatus.birthTime);
+    std::tm tm = *std::localtime(&birthTime);
+    out << indent << "Creation time: "
+        << std::put_time(&tm, "%Y-%m-%d %H:%M:%S")
+        << '.' << std::setfill('0') << std::setw(3) << ms.count() % 1000
+        << std::endl;
+
+    // Print name and id.
+    std::shared_ptr<C2ComponentInterface> intf = compStatus.c2Component->intf();
+    if (!intf) {
+        out << indent << "Unknown component -- null interface" << std::endl;
+        return out;
+    }
+    out << indent << "Name: " << intf->getName() << std::endl;
+    out << indent << "Id: " << intf->getId() << std::endl;
+
+    return out;
+}
+
+// Dumps information when lshal is called.
+Return<void> ComponentStore::debug(
+        const hidl_handle& handle,
+        const hidl_vec<hidl_string>& /* args */) {
+    LOG(INFO) << "debug -- dumping...";
+    const native_handle_t *h = handle.getNativeHandle();
+    if (!h || h->numFds != 1) {
+       LOG(ERROR) << "debug -- dumping failed -- "
+               "invalid file descriptor to dump to";
+       return Void();
+    }
+    std::ostringstream out;
+
+    { // Populate "out".
+
+        constexpr const char indent[] = "  ";
+
+        // Show name.
+        out << "Beginning of dump -- C2ComponentStore: "
+                << mStore->getName() << std::endl << std::endl;
+
+        // Retrieve the list of supported components.
+        std::vector<std::shared_ptr<const C2Component::Traits>> traitsList =
+                mStore->listComponents();
+
+        // Dump the traits of supported components.
+        out << indent << "Supported components:" << std::endl << std::endl;
+        if (traitsList.size() == 0) {
+            out << indent << indent << "NONE" << std::endl << std::endl;
+        } else {
+            for (const auto& traits : traitsList) {
+                dump(out, traits) << std::endl;
+            }
+        }
+
+        // Dump active components.
+        {
+            out << indent << "Active components:" << std::endl << std::endl;
+            std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+            if (mComponentRoster.size() == 0) {
+                out << indent << indent << "NONE" << std::endl << std::endl;
+            } else {
+                for (auto& pair : mComponentRoster) {
+                    dump(out, pair.second) << std::endl;
+                }
+            }
+        }
+
+        out << "End of dump -- C2ComponentStore: "
+                << mStore->getName() << std::endl;
+    }
+
+    if (!android::base::WriteStringToFd(out.str(), h->data[0])) {
+        PLOG(WARNING) << "debug -- dumping failed -- write()";
+    } else {
+        LOG(INFO) << "debug -- dumping succeeded";
+    }
+    return Void();
+}
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/Configurable.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/Configurable.cpp
index 65756e8..243870e 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/Configurable.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/Configurable.h>
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/InputBufferManager.cpp
similarity index 84%
rename from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
rename to media/codec2/hidl/1.2/utils/InputBufferManager.cpp
index 65756e8..1120075 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/InputBufferManager.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/InputBufferManager.h>
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/InputSurface.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/InputSurface.cpp
index 65756e8..7c4d28b 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/InputSurface.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/InputSurface.h>
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
index 65756e8..1bd58c2 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/InputSurfaceConnection.h>
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
new file mode 100644
index 0000000..7937664
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
+
+#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
+#include <android/hardware/media/c2/1.2/IComponent.h>
+#include <android/hardware/media/c2/1.0/IComponentInterface.h>
+#include <android/hardware/media/c2/1.0/IComponentListener.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <android/hardware/media/c2/1.0/IInputSink.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+#include <hidl/Status.h>
+#include <hwbinder/IBinder.h>
+
+#include <C2Component.h>
+#include <C2Buffer.h>
+#include <C2.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+
+using ::android::hardware::media::c2::V1_2::IComponent;
+using ::android::hardware::media::c2::V1_0::IComponentListener;
+
+namespace utils {
+
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::IBinder;
+using ::android::sp;
+using ::android::wp;
+
+struct ComponentStore;
+
+struct Component : public IComponent,
+                   public std::enable_shared_from_this<Component> {
+    Component(
+            const std::shared_ptr<C2Component>&,
+            const sp<IComponentListener>& listener,
+            const sp<ComponentStore>& store,
+            const sp<::android::hardware::media::bufferpool::V2_0::
+                IClientManager>& clientPoolManager);
+    c2_status_t status() const;
+
+    typedef ::android::hardware::graphics::bufferqueue::V1_0::
+            IGraphicBufferProducer HGraphicBufferProducer1;
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer2;
+
+    // Methods from IComponent follow.
+    virtual Return<Status> queue(const WorkBundle& workBundle) override;
+    virtual Return<void> flush(flush_cb _hidl_cb) override;
+    virtual Return<Status> drain(bool withEos) override;
+    virtual Return<Status> setOutputSurface(
+            uint64_t blockPoolId,
+            const sp<HGraphicBufferProducer2>& surface) override;
+    virtual Return<void> connectToInputSurface(
+            const sp<IInputSurface>& inputSurface,
+            connectToInputSurface_cb _hidl_cb) override;
+    virtual Return<void> connectToOmxInputSurface(
+            const sp<HGraphicBufferProducer1>& producer,
+            const sp<::android::hardware::media::omx::V1_0::
+            IGraphicBufferSource>& source,
+            connectToOmxInputSurface_cb _hidl_cb) override;
+    virtual Return<Status> disconnectFromInputSurface() override;
+    virtual Return<void> createBlockPool(
+            uint32_t allocatorId,
+            createBlockPool_cb _hidl_cb) override;
+    virtual Return<Status> destroyBlockPool(uint64_t blockPoolId) override;
+    virtual Return<Status> start() override;
+    virtual Return<Status> stop() override;
+    virtual Return<Status> reset() override;
+    virtual Return<Status> release() override;
+    virtual Return<sp<IComponentInterface>> getInterface() override;
+    virtual Return<sp<IInputSink>> asInputSink() override;
+    virtual Return<void> configureVideoTunnel(
+            uint32_t avSyncHwId, configureVideoTunnel_cb _hidl_cb) override;
+    virtual Return<Status> setOutputSurfaceWithSyncObj(
+            uint64_t blockPoolId,
+            const sp<HGraphicBufferProducer2>& surface,
+            const SurfaceSyncObj& syncObject) override;
+
+
+    // Returns a C2Component associated to the given sink if the sink is indeed
+    // a local component. Returns nullptr otherwise.
+    //
+    // This function is used by InputSurface::connect().
+    static std::shared_ptr<C2Component> findLocalComponent(
+            const sp<IInputSink>& sink);
+
+protected:
+    c2_status_t mInit;
+    std::shared_ptr<C2Component> mComponent;
+    sp<ComponentInterface> mInterface;
+    sp<IComponentListener> mListener;
+    sp<ComponentStore> mStore;
+    ::android::hardware::media::c2::V1_2::utils::DefaultBufferPoolSender
+            mBufferPoolSender;
+
+    struct Sink;
+    std::mutex mSinkMutex;
+    sp<Sink> mSink;
+
+    std::mutex mBlockPoolsMutex;
+    // This map keeps C2BlockPool objects that are created by createBlockPool()
+    // alive. These C2BlockPool objects can be deleted by calling
+    // destroyBlockPool(), reset() or release(), or by destroying the component.
+    std::map<uint64_t, std::shared_ptr<C2BlockPool>> mBlockPools;
+
+    void initListener(const sp<Component>& self);
+
+    virtual ~Component() override;
+
+    friend struct ComponentStore;
+
+    struct Listener;
+};
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
similarity index 65%
copy from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
copy to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
index f77852d..09d9f93 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/ComponentInterface.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::ComponentInterface;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
new file mode 100644
index 0000000..e95a651
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
+
+#include <codec2/hidl/1.2/Component.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+
+#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <hidl/Status.h>
+
+#include <C2Component.h>
+#include <C2Param.h>
+#include <C2.h>
+
+#include <chrono>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+#include <vector>
+
+namespace android {
+class FilterWrapper;
+
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::bufferpool::V2_0::IClientManager;
+
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::sp;
+
+struct ComponentStore : public IComponentStore {
+    ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
+    virtual ~ComponentStore();
+
+    /**
+     * Returns the status of the construction of this object.
+     */
+    c2_status_t status() const;
+
+    /**
+     * This function is called by CachedConfigurable::init() to validate
+     * supported parameters.
+     */
+    c2_status_t validateSupportedParams(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>>& params);
+
+    /**
+     * Returns the store's ParameterCache. This is used for validation by
+     * Configurable::init().
+     */
+    std::shared_ptr<ParameterCache> getParameterCache() const;
+
+    static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+
+    // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
+    virtual Return<void> createComponent(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_cb _hidl_cb) override;
+    virtual Return<void> createInterface(
+            const hidl_string& name,
+            createInterface_cb _hidl_cb) override;
+    virtual Return<void> listComponents(listComponents_cb _hidl_cb) override;
+    virtual Return<void> createInputSurface(
+            createInputSurface_cb _hidl_cb) override;
+    virtual Return<void> getStructDescriptors(
+            const hidl_vec<uint32_t>& indices,
+            getStructDescriptors_cb _hidl_cb) override;
+    virtual Return<sp<IClientManager>> getPoolClientManager() override;
+    virtual Return<Status> copyBuffer(
+            const Buffer& src,
+            const Buffer& dst) override;
+    virtual Return<sp<IConfigurable>> getConfigurable() override;
+
+    // Methods from ::android::hardware::media::c2::V1_1::IComponentStore.
+    virtual Return<void> createComponent_1_1(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_1_1_cb _hidl_cb) override;
+
+    // Methods from ::android::hardware::media::c2::V1_2::IComponentStore.
+    virtual Return<void> createComponent_1_2(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_1_2_cb _hidl_cb) override;
+
+    /**
+     * Dumps information when lshal is called.
+     */
+    virtual Return<void> debug(
+            const hidl_handle& handle,
+            const hidl_vec<hidl_string>& args) override;
+
+protected:
+    sp<CachedConfigurable> mConfigurable;
+    struct StoreParameterCache;
+    std::shared_ptr<StoreParameterCache> mParameterCache;
+
+    // Does bookkeeping for an interface that has been loaded.
+    void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
+
+    c2_status_t mInit;
+    std::shared_ptr<C2ComponentStore> mStore;
+    std::shared_ptr<C2ParamReflector> mParamReflector;
+
+    std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
+    std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
+    std::set<C2String> mLoadedInterfaces;
+    mutable std::mutex mStructDescriptorsMutex;
+
+    // ComponentStore keeps track of live Components.
+
+    struct ComponentStatus {
+        std::shared_ptr<C2Component> c2Component;
+        std::chrono::system_clock::time_point birthTime;
+    };
+
+    mutable std::mutex mComponentRosterMutex;
+    std::map<Component*, ComponentStatus> mComponentRoster;
+
+    // Called whenever Component is created.
+    void reportComponentBirth(Component* component);
+    // Called only from the destructor of Component.
+    void reportComponentDeath(Component* component);
+
+    friend Component;
+
+    // Helper functions for dumping.
+
+    std::ostream& dump(
+            std::ostream& out,
+            const std::shared_ptr<const C2Component::Traits>& comp);
+
+    std::ostream& dump(
+            std::ostream& out,
+            ComponentStatus& compStatus);
+
+};
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
new file mode 100644
index 0000000..2efad31
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
+#define CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
+
+#include <codec2/hidl/1.0/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::ConfigurableC2Intf;
+using ::android::hardware::media::c2::V1_0::utils::ParameterCache;
+using ::android::hardware::media::c2::V1_0::utils::CachedConfigurable;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
similarity index 65%
copy from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
copy to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
index f77852d..e4a5db4 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/InputBufferManager.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::InputBufferManager;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
similarity index 65%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
rename to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
index f77852d..3fae86b 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/InputSurface.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::InputSurface;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
similarity index 63%
copy from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
copy to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
index f77852d..13a8a61 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/InputSurfaceConnection.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::InputSurfaceConnection;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
new file mode 100644
index 0000000..d3180b0
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_TYPES_H
+#define CODEC2_HIDL_V1_2_UTILS_TYPES_H
+
+#include <android/hardware/media/c2/1.2/IComponent.h>
+#include <android/hardware/media/c2/1.0/IComponentInterface.h>
+#include <android/hardware/media/c2/1.0/IComponentListener.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <android/hardware/media/c2/1.0/IConfigurable.h>
+#include <android/hardware/media/c2/1.0/IInputSink.h>
+#include <android/hardware/media/c2/1.0/IInputSurface.h>
+#include <android/hardware/media/c2/1.0/IInputSurfaceConnection.h>
+
+#include <codec2/hidl/1.0/types.h>
+#include <android/hardware/media/c2/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+
+using ::android::hardware::media::c2::V1_0::BaseBlock;
+using ::android::hardware::media::c2::V1_0::Block;
+using ::android::hardware::media::c2::V1_0::Buffer;
+using ::android::hardware::media::c2::V1_0::FieldDescriptor;
+using ::android::hardware::media::c2::V1_0::FieldId;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValues;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValuesQuery;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValuesQueryResult;
+using ::android::hardware::media::c2::V1_0::FrameData;
+using ::android::hardware::media::c2::V1_0::InfoBuffer;
+using ::android::hardware::media::c2::V1_0::ParamDescriptor;
+using ::android::hardware::media::c2::V1_0::ParamField;
+using ::android::hardware::media::c2::V1_0::ParamFieldValues;
+using ::android::hardware::media::c2::V1_0::ParamIndex;
+using ::android::hardware::media::c2::V1_0::Params;
+using ::android::hardware::media::c2::V1_0::PrimitiveValue;
+using ::android::hardware::media::c2::V1_0::SettingResult;
+using ::android::hardware::media::c2::V1_0::Status;
+using ::android::hardware::media::c2::V1_0::StructDescriptor;
+using ::android::hardware::media::c2::V1_0::ValueRange;
+using ::android::hardware::media::c2::V1_0::Work;
+using ::android::hardware::media::c2::V1_0::WorkBundle;
+using ::android::hardware::media::c2::V1_0::WorkOrdinal;
+using ::android::hardware::media::c2::V1_0::Worklet;
+
+using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
+
+using ::android::hardware::media::c2::V1_0::IComponentInterface;
+using ::android::hardware::media::c2::V1_0::IComponentListener;
+using ::android::hardware::media::c2::V1_0::IConfigurable;
+using ::android::hardware::media::c2::V1_0::IInputSink;
+using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::android::hardware::media::c2::V1_0::IInputSurfaceConnection;
+
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::toC2Status;
+
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_Range;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_RangeInfo;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_Rect;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_RectInfo;
+
+using ::android::hardware::media::c2::V1_0::utils::objcpy;
+using ::android::hardware::media::c2::V1_0::utils::parseParamsBlob;
+using ::android::hardware::media::c2::V1_0::utils::createParamsBlob;
+using ::android::hardware::media::c2::V1_0::utils::copyParamsFromBlob;
+using ::android::hardware::media::c2::V1_0::utils::updateParamsFromBlob;
+
+using ::android::hardware::media::c2::V1_0::utils::BufferPoolSender;
+using ::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender;
+
+using ::android::hardware::media::c2::V1_0::utils::beginTransferBufferQueueBlock;
+using ::android::hardware::media::c2::V1_0::utils::beginTransferBufferQueueBlocks;
+using ::android::hardware::media::c2::V1_0::utils::endTransferBufferQueueBlock;
+using ::android::hardware::media::c2::V1_0::utils::endTransferBufferQueueBlocks;
+using ::android::hardware::media::c2::V1_0::utils::displayBufferQueueBlock;
+
+using ::android::hardware::media::c2::V1_0::utils::operator<<;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_TYPES_H
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/types.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/types.cpp
index 65756e8..9e0a08b 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/types.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/types.h>
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hidl/client/Android.bp
index 5a34c30..0e52813 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hidl/client/Android.bp
@@ -12,6 +12,11 @@
 
     srcs: [
         "client.cpp",
+        "output.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
     ],
 
     shared_libs: [
@@ -19,11 +24,13 @@
         "android.hardware.media.bufferpool@2.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
         "libbase",
         "libbinder",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
         "libcodec2_vndk",
         "libcutils",
         "libgui",
@@ -41,9 +48,11 @@
     export_shared_lib_headers: [
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
         "libcodec2_vndk",
     ],
 
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 341a577..d49141c 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -33,16 +33,17 @@
 
 #include <android-base/properties.h>
 #include <bufferpool/ClientManager.h>
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
 #include <codec2/hidl/1.0/types.h>
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
 #include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.2/types.h>
+#include <codec2/hidl/output.h>
 
 #include <cutils/native_handle.h>
 #include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
 #include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
 #include <hidl/HidlSupport.h>
 
+
 #include <deque>
 #include <iterator>
 #include <limits>
@@ -73,6 +74,7 @@
         V2_0::utils::B2HGraphicBufferProducer;
 using H2BGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
         V2_0::utils::H2BGraphicBufferProducer;
+using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
 
 namespace /* unnamed */ {
 
@@ -592,9 +594,9 @@
 
 // Codec2Client::Component::OutputBufferQueue
 struct Codec2Client::Component::OutputBufferQueue :
-        hardware::media::c2::V1_1::utils::OutputBufferQueue {
+        hardware::media::c2::OutputBufferQueue {
     OutputBufferQueue()
-          : hardware::media::c2::V1_1::utils::OutputBufferQueue() {
+          : hardware::media::c2::OutputBufferQueue() {
     }
 };
 
@@ -612,6 +614,7 @@
         },
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
+        mBase1_2{Base1_2::castFrom(base)},
         mServiceIndex{serviceIndex} {
     Return<sp<IClientManager>> transResult = base->getPoolClientManager();
     if (!transResult.isOk()) {
@@ -633,6 +636,10 @@
     return mBase1_1;
 }
 
+sp<Codec2Client::Base1_2> const& Codec2Client::getBase1_2() const {
+    return mBase1_2;
+}
+
 std::string const& Codec2Client::getServiceName() const {
     return GetServiceNames()[mServiceIndex];
 }
@@ -645,8 +652,9 @@
     c2_status_t status;
     sp<Component::HidlListener> hidlListener = new Component::HidlListener{};
     hidlListener->base = listener;
-    Return<void> transStatus = mBase1_1 ?
-        mBase1_1->createComponent_1_1(
+    Return<void> transStatus;
+    if (mBase1_2) {
+        transStatus = mBase1_2->createComponent_1_2(
             name,
             hidlListener,
             ClientManager::getInstance(),
@@ -659,8 +667,25 @@
                 }
                 *component = std::make_shared<Codec2Client::Component>(c);
                 hidlListener->component = *component;
-            }) :
-        mBase1_0->createComponent(
+            });
+    }
+    else if (mBase1_1) {
+        transStatus = mBase1_1->createComponent_1_1(
+            name,
+            hidlListener,
+            ClientManager::getInstance(),
+            [&status, component, hidlListener](
+                    Status s,
+                    const sp<IComponent>& c) {
+                status = static_cast<c2_status_t>(s);
+                if (status != C2_OK) {
+                    return;
+                }
+                *component = std::make_shared<Codec2Client::Component>(c);
+                hidlListener->component = *component;
+            });
+    } else if (mBase1_0) { // ver1_0
+        transStatus = mBase1_0->createComponent(
             name,
             hidlListener,
             ClientManager::getInstance(),
@@ -674,6 +699,9 @@
                 *component = std::make_shared<Codec2Client::Component>(c);
                 hidlListener->component = *component;
             });
+    } else {
+        status = C2_CORRUPTED;
+    }
     if (!transStatus.isOk()) {
         LOG(ERROR) << "createComponent(" << name.c_str()
                    << ") -- transaction failed.";
@@ -1193,6 +1221,7 @@
         },
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
+        mBase1_2{Base1_2::castFrom(base)},
         mBufferPoolSender{std::make_unique<BufferPoolSender>()},
         mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
 }
@@ -1215,6 +1244,30 @@
         },
         mBase1_0{base},
         mBase1_1{base},
+        mBase1_2{Base1_2::castFrom(base)},
+        mBufferPoolSender{std::make_unique<BufferPoolSender>()},
+        mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
+}
+
+Codec2Client::Component::Component(const sp<Base1_2>& base)
+      : Configurable{
+            [base]() -> sp<IConfigurable> {
+                Return<sp<IComponentInterface>> transResult1 =
+                        base->getInterface();
+                if (!transResult1.isOk()) {
+                    return nullptr;
+                }
+                Return<sp<IConfigurable>> transResult2 =
+                        static_cast<sp<IComponentInterface>>(transResult1)->
+                        getConfigurable();
+                return transResult2.isOk() ?
+                        static_cast<sp<IConfigurable>>(transResult2) :
+                        nullptr;
+            }()
+        },
+        mBase1_0{base},
+        mBase1_1{base},
+        mBase1_2{base},
         mBufferPoolSender{std::make_unique<BufferPoolSender>()},
         mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
 }
@@ -1440,21 +1493,28 @@
         igbp = new B2HGraphicBufferProducer2(surface);
     }
 
+    std::shared_ptr<SurfaceSyncObj> syncObj;
+
     if (!surface) {
-        mOutputBufferQueue->configure(nullIgbp, generation, 0);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
     } else if (surface->getUniqueId(&bqId) != OK) {
         LOG(ERROR) << "setOutputSurface -- "
                    "cannot obtain bufferqueue id.";
         bqId = 0;
-        mOutputBufferQueue->configure(nullIgbp, generation, 0);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
     } else {
-        mOutputBufferQueue->configure(surface, generation, bqId);
+        mOutputBufferQueue->configure(surface, generation, bqId, nullptr);
     }
-    ALOGD("generation remote change %u", generation);
+    ALOGD("surface generation remote change %u HAL ver: %s",
+          generation, syncObj ? "1.2" : "1.0");
 
-    Return<Status> transStatus = mBase1_0->setOutputSurface(
-            static_cast<uint64_t>(blockPoolId),
-            bqId == 0 ? nullHgbp : igbp);
+    Return<Status> transStatus = syncObj ?
+            mBase1_2->setOutputSurfaceWithSyncObj(
+                    static_cast<uint64_t>(blockPoolId),
+                    bqId == 0 ? nullHgbp : igbp, *syncObj) :
+            mBase1_0->setOutputSurface(
+                    static_cast<uint64_t>(blockPoolId),
+                    bqId == 0 ? nullHgbp : igbp);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
@@ -1464,6 +1524,7 @@
     if (status != C2_OK) {
         LOG(DEBUG) << "setOutputSurface -- call failed: " << status << ".";
     }
+    ALOGD("Surface configure completed");
     return status;
 }
 
@@ -1474,6 +1535,11 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
+        int maxDequeueCount) {
+    mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
+}
+
 c2_status_t Codec2Client::Component::connectToInputSurface(
         const std::shared_ptr<InputSurface>& inputSurface,
         std::shared_ptr<InputSurfaceConnection>* connection) {
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index bbb2b96..eca268e 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -78,6 +78,11 @@
 struct IComponentStore;
 }  // namespace android::hardware::media::c2::V1_1
 
+namespace android::hardware::media::c2::V1_2 {
+struct IComponent;
+struct IComponentStore;
+}  // namespace android::hardware::media::c2::V1_2
+
 namespace android::hardware::media::bufferpool::V2_0 {
 struct IClientManager;
 }  // namespace android::hardware::media::bufferpool::V2_0
@@ -137,6 +142,7 @@
 
     typedef ::android::hardware::media::c2::V1_0::IComponentStore Base1_0;
     typedef ::android::hardware::media::c2::V1_1::IComponentStore Base1_1;
+    typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
     typedef Base1_0 Base;
 
     struct Listener;
@@ -156,6 +162,7 @@
     sp<Base> const& getBase() const;
     sp<Base1_0> const& getBase1_0() const;
     sp<Base1_1> const& getBase1_1() const;
+    sp<Base1_2> const& getBase1_2() const;
 
     std::string const& getServiceName() const;
 
@@ -228,6 +235,7 @@
 protected:
     sp<Base1_0> mBase1_0;
     sp<Base1_1> mBase1_1;
+    sp<Base1_2> mBase1_2;
 
     // Finds the first store where the predicate returns C2_OK and returns the
     // last predicate result. The predicate will be tried on all stores. The
@@ -318,6 +326,7 @@
 
     typedef ::android::hardware::media::c2::V1_0::IComponent Base1_0;
     typedef ::android::hardware::media::c2::V1_1::IComponent Base1_1;
+    typedef ::android::hardware::media::c2::V1_2::IComponent Base1_2;
     typedef Base1_0 Base;
 
     c2_status_t createBlockPool(
@@ -398,6 +407,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // Set max dequeue count for output surface.
+    void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
+
     // Connect to a given InputSurface.
     c2_status_t connectToInputSurface(
             const std::shared_ptr<InputSurface>& inputSurface,
@@ -413,12 +425,14 @@
     // base cannot be null.
     Component(const sp<Base>& base);
     Component(const sp<Base1_1>& base);
+    Component(const sp<Base1_2>& base);
 
     ~Component();
 
 protected:
     sp<Base1_0> mBase1_0;
     sp<Base1_1> mBase1_1;
+    sp<Base1_2> mBase1_2;
 
     struct BufferPoolSender;
     std::unique_ptr<BufferPoolSender> mBufferPoolSender;
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
similarity index 83%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h
rename to media/codec2/hidl/client/include/codec2/hidl/output.h
index 80368f7..0f03b36 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -19,16 +19,17 @@
 
 #include <gui/IGraphicBufferProducer.h>
 #include <codec2/hidl/1.0/types.h>
+#include <codec2/hidl/1.2/types.h>
 #include <C2Work.h>
 
 struct C2_HIDE _C2BlockPoolData;
+class C2SurfaceSyncMemory;
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_0 {
-namespace utils {
+
 
 // BufferQueue-Based Block Operations
 // ==================================
@@ -45,7 +46,8 @@
     // Graphic blocks from older surface will be migrated to new surface.
     bool configure(const sp<IGraphicBufferProducer>& igbp,
                    uint32_t generation,
-                   uint64_t bqId);
+                   uint64_t bqId,
+                   std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
 
     // Render a graphic block to current surface.
     status_t outputBuffer(
@@ -61,22 +63,27 @@
     void holdBufferQueueBlocks(
             const std::list<std::unique_ptr<C2Work>>& workList);
 
+    // Update # of max dequeue buffer from BQ. If # of max dequeued buffer is shared
+    // via shared memory between HAL and framework, Update # of max dequeued buffer
+    // and synchronize.
+    void updateMaxDequeueBufferCount(int maxDequeueBufferCount);
+
 private:
 
     std::mutex mMutex;
     sp<IGraphicBufferProducer> mIgbp;
     uint32_t mGeneration;
     uint64_t mBqId;
+    int32_t mMaxDequeueBufferCount;
     std::shared_ptr<int> mOwner;
     // To migrate existing buffers
     sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
 
     bool registerBuffer(const C2ConstGraphicBlock& block);
 };
 
-}  // namespace utils
-}  // namespace V1_0
 }  // namespace c2
 }  // namespace media
 }  // namespace hardware
diff --git a/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp b/media/codec2/hidl/client/output.cpp
similarity index 66%
rename from media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp
rename to media/codec2/hidl/client/output.cpp
index c4a72ef..7df0da2 100644
--- a/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -19,13 +19,16 @@
 #include <android-base/logging.h>
 
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
+#include <codec2/hidl/output.h>
+#include <cutils/ashmem.h>
 #include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
+#include <sys/mman.h>
 
 #include <C2AllocatorGralloc.h>
 #include <C2BlockInternal.h>
 #include <C2Buffer.h>
 #include <C2PlatformSupport.h>
+#include <C2SurfaceSyncObj.h>
 
 #include <iomanip>
 
@@ -33,8 +36,6 @@
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_0 {
-namespace utils {
 
 using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
         V2_0::IGraphicBufferProducer;
@@ -105,7 +106,8 @@
 status_t attachToBufferQueue(const C2ConstGraphicBlock& block,
                              const sp<IGraphicBufferProducer>& igbp,
                              uint32_t generation,
-                             int32_t* bqSlot) {
+                             int32_t* bqSlot,
+                             std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     if (!igbp) {
         LOG(WARNING) << "attachToBufferQueue -- null producer.";
         return NO_INIT;
@@ -126,7 +128,25 @@
             << ", stride " << graphicBuffer->getStride()
             << ", generation " << graphicBuffer->getGenerationNumber();
 
-    status_t result = igbp->attachBuffer(bqSlot, graphicBuffer);
+    C2SyncVariables *syncVar = syncMem ? syncMem->mem() : nullptr;
+    status_t result = OK;
+    if (syncVar) {
+        syncVar->lock();
+        if (!syncVar->isDequeueableLocked() ||
+            syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_SWITCHING) {
+            syncVar->unlock();
+            LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
+                            "status = " << INVALID_OPERATION << ".";
+            return INVALID_OPERATION;
+        }
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+        if (result == OK) {
+            syncVar->notifyDequeuedLocked();
+        }
+        syncVar->unlock();
+    } else {
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+    }
     if (result != OK) {
         LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
                         "status = " << result << ".";
@@ -157,7 +177,40 @@
 
 bool OutputBufferQueue::configure(const sp<IGraphicBufferProducer>& igbp,
                                   uint32_t generation,
-                                  uint64_t bqId) {
+                                  uint64_t bqId,
+                                  std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj) {
+    uint64_t consumerUsage = 0;
+    if (igbp->getConsumerUsage(&consumerUsage) != OK) {
+        ALOGW("failed to get consumer usage");
+    }
+
+    // TODO : Abstract creation process into C2SurfaceSyncMemory class.
+    // use C2LinearBlock instead ashmem.
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    if (syncObj && igbp) {
+        bool mapped = false;
+        int memFd = ashmem_create_region("C2SurfaceMem", sizeof(C2SyncVariables));
+        size_t memSize = memFd < 0 ? 0 : ashmem_get_size_region(memFd);
+        if (memSize > 0) {
+            syncMem = C2SurfaceSyncMemory::Create(memFd, memSize);
+            if (syncMem) {
+                mapped = true;
+                *syncObj = std::make_shared<V1_2::SurfaceSyncObj>();
+                (*syncObj)->syncMemory = syncMem->handle();
+                (*syncObj)->bqId = bqId;
+                (*syncObj)->generationId = generation;
+                (*syncObj)->consumerUsage = consumerUsage;
+                ALOGD("C2SurfaceSyncMemory created %zu(%zu)", sizeof(C2SyncVariables), memSize);
+            }
+        }
+        if (!mapped) {
+            if (memFd >= 0) {
+                ::close(memFd);
+            }
+            ALOGW("SurfaceSyncObj creation failure");
+        }
+    }
+
     size_t tryNum = 0;
     size_t success = 0;
     sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
@@ -168,6 +221,19 @@
         if (generation == mGeneration) {
             return false;
         }
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
+        C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
+        if (oldSync) {
+            oldSync->lock();
+            oldSync->setSyncStatusLocked(C2SyncVariables::STATUS_SWITCHING);
+            oldSync->unlock();
+        }
+        mSyncMem.reset();
+        if (syncMem) {
+            mSyncMem = syncMem;
+        }
+        C2SyncVariables *newSync = mSyncMem ? mSyncMem->mem() : nullptr;
+
         mIgbp = igbp;
         mGeneration = generation;
         mBqId = bqId;
@@ -183,14 +249,31 @@
             }
             ++tryNum;
             int bqSlot;
+
+            // Update buffer's generation and usage.
+            if ((mBuffers[i]->getUsage() & consumerUsage) != consumerUsage) {
+                mBuffers[i] = new GraphicBuffer(
+                    mBuffers[i]->handle, GraphicBuffer::CLONE_HANDLE,
+                    mBuffers[i]->width, mBuffers[i]->height,
+                    mBuffers[i]->format, mBuffers[i]->layerCount,
+                    mBuffers[i]->getUsage() | consumerUsage,
+                    mBuffers[i]->stride);
+                if (mBuffers[i]->initCheck() != OK) {
+                    ALOGW("%s() failed to update usage, original usage=%" PRIx64
+                          ", consumer usage=%" PRIx64,
+                          __func__, mBuffers[i]->getUsage(), consumerUsage);
+                    continue;
+                }
+            }
             mBuffers[i]->setGenerationNumber(generation);
+
             status_t result = igbp->attachBuffer(&bqSlot, mBuffers[i]);
             if (result != OK) {
                 continue;
             }
             bool attach =
                     _C2BlockFactory::EndAttachBlockToBufferQueue(
-                            data, mOwner, getHgbp(mIgbp),
+                            data, mOwner, getHgbp(mIgbp), mSyncMem,
                             generation, bqId, bqSlot);
             if (!attach) {
                 igbp->cancelBuffer(bqSlot, Fence::NO_FENCE);
@@ -204,8 +287,12 @@
             mBuffers[i] = buffers[i];
             mPoolDatas[i] = poolDatas[i];
         }
+        if (newSync) {
+            newSync->setInitialDequeueCount(mMaxDequeueBufferCount, success);
+        }
     }
-    ALOGD("remote graphic buffer migration %zu/%zu", success, tryNum);
+    ALOGD("remote graphic buffer migration %zu/%zu",
+          success, tryNum);
     return true;
 }
 
@@ -236,7 +323,7 @@
                      << ", bqSlot " << oldSlot
                      << ", generation " << mGeneration
                      << ".";
-        _C2BlockFactory::HoldBlockFromBufferQueue(data, mOwner, getHgbp(mIgbp));
+        _C2BlockFactory::HoldBlockFromBufferQueue(data, mOwner, getHgbp(mIgbp), mSyncMem);
         mPoolDatas[oldSlot] = data;
         mBuffers[oldSlot] = createGraphicBuffer(block);
         mBuffers[oldSlot]->setGenerationNumber(mGeneration);
@@ -256,25 +343,39 @@
     uint32_t generation;
     uint64_t bqId;
     int32_t bqSlot;
-    bool display = displayBufferQueueBlock(block);
+    bool display = V1_0::utils::displayBufferQueueBlock(block);
     if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
         bqId == 0) {
         // Block not from bufferqueue -- it must be attached before queuing.
 
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem;
         mMutex.lock();
         sp<IGraphicBufferProducer> outputIgbp = mIgbp;
         uint32_t outputGeneration = mGeneration;
+        syncMem = mSyncMem;
         mMutex.unlock();
 
         status_t status = attachToBufferQueue(
-                block, outputIgbp, outputGeneration, &bqSlot);
+                block, outputIgbp, outputGeneration, &bqSlot, syncMem);
+
         if (status != OK) {
             LOG(WARNING) << "outputBuffer -- attaching failed.";
             return INVALID_OPERATION;
         }
 
-        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
-                                     input, output);
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if(syncVar) {
+            syncVar->lock();
+            status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                         input, output);
+            if (status == OK) {
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        } else {
+            status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                         input, output);
+        }
         if (status != OK) {
             LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
                        "on non-bufferqueue-based block. "
@@ -284,10 +385,12 @@
         return OK;
     }
 
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
     mMutex.lock();
     sp<IGraphicBufferProducer> outputIgbp = mIgbp;
     uint32_t outputGeneration = mGeneration;
     uint64_t outputBqId = mBqId;
+    syncMem = mSyncMem;
     mMutex.unlock();
 
     if (!outputIgbp) {
@@ -308,8 +411,21 @@
         return DEAD_OBJECT;
     }
 
-    status_t status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
-                                          input, output);
+    auto syncVar = syncMem ? syncMem->mem() : nullptr;
+    status_t status = OK;
+    if (syncVar) {
+        syncVar->lock();
+        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                                  input, output);
+        if (status == OK) {
+            syncVar->notifyQueuedLocked();
+        }
+        syncVar->unlock();
+    } else {
+        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                                  input, output);
+    }
+
     if (status != OK) {
         LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
                    "on bufferqueue-based block. "
@@ -326,8 +442,18 @@
                            this, std::placeholders::_1));
 }
 
-}  // namespace utils
-}  // namespace V1_0
+void OutputBufferQueue::updateMaxDequeueBufferCount(int maxDequeueBufferCount) {
+    mMutex.lock();
+    mMaxDequeueBufferCount = maxDequeueBufferCount;
+    auto syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+    if (syncVar) {
+        syncVar->lock();
+        syncVar->updateMaxDequeueCountLocked(maxDequeueBufferCount);
+        syncVar->unlock();
+    }
+    mMutex.unlock();
+}
+
 }  // namespace c2
 }  // namespace media
 }  // namespace hardware
diff --git a/media/codec2/hidl/plugin/Android.bp b/media/codec2/hidl/plugin/Android.bp
index 4708b12..873bb02 100644
--- a/media/codec2/hidl/plugin/Android.bp
+++ b/media/codec2/hidl/plugin/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libcodec2_hidl_plugin_headers",
     vendor_available: true,
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 0b38bc1..bed8aeb 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -19,7 +19,6 @@
 #include <android-base/logging.h>
 
 #include <set>
-#include <sstream>
 
 #include <dlfcn.h>
 
@@ -383,6 +382,9 @@
         // Configure the next interface with the params.
         std::vector<C2Param *> configParams;
         for (size_t i = 0; i < heapParams.size(); ++i) {
+            if (!heapParams[i]) {
+                continue;
+            }
             if (heapParams[i]->forStream()) {
                 heapParams[i] = C2Param::CopyAsStream(
                         *heapParams[i], false /* output */, heapParams[i]->stream());
@@ -782,10 +784,7 @@
         if (C2_OK != mStore->createComponent(filter.traits.name, &comp)) {
             return {};
         }
-        if (C2_OK != mStore->createInterface(filter.traits.name, &intf)) {
-            return {};
-        }
-        filters.push_back({comp, intf, filter.traits, filter.desc});
+        filters.push_back({comp, comp->intf(), filter.traits, filter.desc});
     }
     return filters;
 }
@@ -869,7 +868,7 @@
     }
     std::vector<Component> filters = createFilters();
     std::shared_ptr wrapped = std::make_shared<WrappedDecoder>(
-            comp, std::move(filters), weak_from_this());
+            comp, std::vector(filters), weak_from_this());
     {
         std::unique_lock lock(mWrappedComponentsMutex);
         std::vector<std::weak_ptr<const C2Component>> &components =
diff --git a/media/codec2/hidl/plugin/samples/Android.bp b/media/codec2/hidl/plugin/samples/Android.bp
index c823e31..32b760d 100644
--- a/media/codec2/hidl/plugin/samples/Android.bp
+++ b/media/codec2/hidl/plugin/samples/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "sample-codec2-hidl-plugin-defaults",
 
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index 5a23217..bb9f51f 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -39,11 +39,11 @@
 }
 
 cc_binary {
-    name: "android.hardware.media.c2@1.1-default-service",
+    name: "android.hardware.media.c2@1.2-default-service",
     vendor: true,
     relative_install_path: "hw",
 
-    init_rc: ["android.hardware.media.c2@1.1-default-service.rc"],
+    init_rc: ["android.hardware.media.c2@1.2-default-service.rc"],
 
     defaults: ["libcodec2-hidl-defaults"],
     srcs: [
@@ -55,7 +55,7 @@
         "libavservices_minijail_vendor",
         "libbinder",
     ],
-    required: ["android.hardware.media.c2@1.1-default-seccomp_policy"],
+    required: ["android.hardware.media.c2@1.2-default-seccomp_policy"],
 
     // The content in manifest_media_c2_V1_1_default.xml can be included
     // directly in the main device manifest.xml file or via vintf_fragments.
@@ -73,23 +73,23 @@
 // Files in the "seccomp_policy" subdirectory are only provided as examples.
 // They may not work on some devices and/or architectures without modification.
 prebuilt_etc {
-    name: "android.hardware.media.c2@1.1-default-seccomp_policy",
+    name: "android.hardware.media.c2@1.2-default-seccomp_policy",
     vendor: true,
     sub_dir: "seccomp_policy",
 
     // If a specific architecture is targeted, multiple choices are not needed.
     arch: {
         arm: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy",
         },
         arm64: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy",
         },
         x86: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy",
         },
         x86_64: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy",
         },
     },
 
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc
deleted file mode 100644
index 44f2d8e..0000000
--- a/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service android-hardware-media-c2-hal-1-1 /vendor/bin/hw/android.hardware.media.c2@1.1-default-service
-    class hal
-    user mediacodec
-    group camera mediadrm drmrpc
-    ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
-
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
new file mode 100644
index 0000000..03f6e3d
--- /dev/null
+++ b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
@@ -0,0 +1,7 @@
+service android-hardware-media-c2-hal-1-2 /vendor/bin/hw/android.hardware.media.c2@1.2-default-service
+    class hal
+    user mediacodec
+    group camera mediadrm drmrpc
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
+
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml b/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
new file mode 100644
index 0000000..a5e8d87
--- /dev/null
+++ b/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
@@ -0,0 +1,11 @@
+<manifest version="1.0" type="device">
+    <hal>
+        <name>android.hardware.media.c2</name>
+        <transport>hwbinder</transport>
+        <version>1.2</version>
+        <interface>
+            <name>IComponentStore</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy
deleted file mode 100644
index d9c4045..0000000
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-read: 1
-mprotect: 1
-prctl: 1
-openat: 1
-open: 1
-getuid32: 1
-getuid: 1
-getrlimit: 1
-writev: 1
-ioctl: 1
-close: 1
-mmap2: 1
-mmap: 1
-fstat64: 1
-fstat: 1
-stat64: 1
-statfs64: 1
-madvise: 1
-fstatat64: 1
-newfstatat: 1
-futex: 1
-munmap: 1
-faccessat: 1
-_llseek: 1
-lseek: 1
-clone: 1
-sigaltstack: 1
-setpriority: 1
-restart_syscall: 1
-exit: 1
-exit_group: 1
-rt_sigreturn: 1
-ugetrlimit: 1
-readlink: 1
-readlinkat: 1
-_llseek: 1
-fstatfs64: 1
-fstatfs: 1
-pread64: 1
-mremap: 1
-dup: 1
-set_tid_address: 1
-write: 1
-nanosleep: 1
-sched_setscheduler: 1
-uname: 1
-memfd_create: 1
-ftruncate: 1
-ftruncate64: 1
-
-# Required by AddressSanitizer
-gettid: 1
-sched_yield: 1
-getpid: 1
-gettid: 1
-
-@include /system/etc/seccomp_policy/crash_dump.x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
similarity index 96%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
index 4faf8b2..f701987 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
@@ -1,4 +1,4 @@
-# Copyright (C) 2019 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
similarity index 95%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
index d9c4045..056c690 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
similarity index 95%
copy from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
copy to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
index d9c4045..056c690 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hidl/services/vendor.cpp
index 3ddb039..0d0684d 100644
--- a/media/codec2/hidl/services/vendor.cpp
+++ b/media/codec2/hidl/services/vendor.cpp
@@ -15,11 +15,11 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "android.hardware.media.c2@1.1-service"
+#define LOG_TAG "android.hardware.media.c2@1.2-service"
 
 #include <android-base/logging.h>
 #include <binder/ProcessState.h>
-#include <codec2/hidl/1.1/ComponentStore.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
 #include <hidl/HidlTransportSupport.h>
 #include <minijail.h>
 
@@ -31,13 +31,13 @@
 // "android.hardware.media.c2@1.1-default-seccomp_policy" in Android.bp.
 static constexpr char kBaseSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/"
-        "android.hardware.media.c2@1.1-default-seccomp-policy";
+        "android.hardware.media.c2@1.2-default-seccomp-policy";
 
 // Additional seccomp permissions can be added in this file.
 // This file does not exist by default.
 static constexpr char kExtSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/"
-        "android.hardware.media.c2@1.1-extended-seccomp-policy";
+        "android.hardware.media.c2@1.2-extended-seccomp-policy";
 
 class StoreImpl : public C2ComponentStore {
 public:
@@ -164,7 +164,7 @@
 
 int main(int /* argc */, char** /* argv */) {
     using namespace ::android;
-    LOG(DEBUG) << "android.hardware.media.c2@1.1-service starting...";
+    LOG(DEBUG) << "android.hardware.media.c2@1.2-service starting...";
 
     // Set up minijail to limit system calls.
     signal(SIGPIPE, SIG_IGN);
@@ -180,7 +180,7 @@
 
     // Create IComponentStore service.
     {
-        using namespace ::android::hardware::media::c2::V1_1;
+        using namespace ::android::hardware::media::c2::V1_2;
         sp<IComponentStore> store;
 
         // TODO: Replace this with
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index dd1f485..f66dc11 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -33,12 +33,14 @@
 #include <OMX_IndexExt.h>
 
 #include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Thread.h>
 
+#include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
 
 namespace android {
@@ -71,6 +73,23 @@
         jobs->cond.broadcast();
     }
 
+    void setDataspace(android_dataspace dataspace) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        ColorUtils::convertDataSpaceToV0(dataspace);
+        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+        int32_t standard;
+        int32_t transfer;
+        int32_t range;
+        ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+                && C2Mapper::map(transfer, &colorAspects->transfer)
+                && C2Mapper::map(range, &colorAspects->range)) {
+            jobs->configUpdate.push_back(std::move(colorAspects));
+        }
+    }
+
 protected:
     bool threadLoop() override {
         constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
@@ -102,6 +121,9 @@
                     uniqueFds.push_back(std::move(queue.workList.front().fd1));
                     queue.workList.pop_front();
                 }
+                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
+                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+                }
 
                 jobs.unlock();
                 for (int fenceFd : fenceFds) {
@@ -119,6 +141,7 @@
                 queued = true;
             }
             if (queued) {
+                jobs->configUpdate.clear();
                 return true;
             }
             if (i == 0) {
@@ -161,6 +184,7 @@
         std::map<std::weak_ptr<Codec2Client::Component>,
                  Queue,
                  std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+        std::vector<std::unique_ptr<C2Param>> configUpdate;
         Condition cond;
     };
     Mutexed<Jobs> mJobs;
@@ -172,6 +196,9 @@
       mQueueThread(new QueueThread) {
     android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
     mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = HAL_DATASPACE_UNKNOWN;
 }
 
 status_t C2OMXNode::freeNode() {
@@ -209,15 +236,23 @@
 
             pDef->nBufferCountActual = 16;
 
-            std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
-            C2PortActualDelayTuning::input inputDelay(0);
-            C2ActualPipelineDelayTuning pipelineDelay(0);
-            c2_status_t c2err = comp->query(
-                    {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
-            if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
-                pDef->nBufferCountActual = 4;
-                pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
-                pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+            // WORKAROUND: having more slots improve performance while consuming
+            // more memory. This is a temporary workaround to reduce memory for
+            // larger-than-4K scenario.
+            if (mWidth * mHeight > 4096 * 2340) {
+                std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+                C2PortActualDelayTuning::input inputDelay(0);
+                C2ActualPipelineDelayTuning pipelineDelay(0);
+                c2_status_t c2err = C2_NOT_FOUND;
+                if (comp) {
+                    c2err = comp->query(
+                            {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+                }
+                if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+                    pDef->nBufferCountActual = 4;
+                    pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
+                    pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+                }
             }
 
             pDef->eDomain = OMX_PortDomainVideo;
@@ -384,6 +419,8 @@
         if (err != OK) {
             (void)fd0.release();
             (void)fd1.release();
+            native_handle_close(handle);
+            native_handle_delete(handle);
             return UNKNOWN_ERROR;
         }
         block = _C2BlockFactory::CreateGraphicBlock(alloc);
@@ -451,8 +488,11 @@
     android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
     uint32_t pixelFormat = msg.u.event_data.data3;
 
-    // TODO: set dataspace on component to see if it impacts color aspects
     ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+    mQueueThread->setDataspace(dataSpace);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = dataSpace;
     return OK;
 }
 
@@ -485,4 +525,8 @@
     (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
 }
 
+android_dataspace C2OMXNode::getDataspace() {
+    return *mDataspace.lock();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 1717c96..9c04969 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -93,6 +93,11 @@
      */
     void onInputBufferDone(c2_cntr64_t index);
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    android_dataspace getDataspace();
+
 private:
     std::weak_ptr<Codec2Client::Component> mComp;
     sp<IOMXBufferSource> mBufferSource;
@@ -101,6 +106,7 @@
     uint32_t mWidth;
     uint32_t mHeight;
     uint64_t mUsage;
+    Mutexed<android_dataspace> mDataspace;
 
     // WORKAROUND: timestamp adjustment
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index de0a129..63ae5cd 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -44,6 +44,7 @@
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
 #include "CCodecBufferChannel.h"
@@ -210,8 +211,6 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        // NOTE: we do not use/pass through color aspects from GraphicBufferSource as we
-        // communicate that directly to the component.
         mSource->configure(
                 mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
         return OK;
@@ -248,19 +247,14 @@
         }
 
         size_t numSlots = 16;
-        // WORKAROUND: having more slots improve performance while consuming
-        // more memory. This is a temporary workaround to reduce memory for
-        // larger-than-4K scenario.
-        if (mWidth * mHeight > 4096 * 2340) {
-            constexpr OMX_U32 kPortIndexInput = 0;
+        constexpr OMX_U32 kPortIndexInput = 0;
 
-            OMX_PARAM_PORTDEFINITIONTYPE param;
-            param.nPortIndex = kPortIndexInput;
-            status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
-                                               &param, sizeof(param));
-            if (err == OK) {
-                numSlots = param.nBufferCountActual;
-            }
+        OMX_PARAM_PORTDEFINITIONTYPE param;
+        param.nPortIndex = kPortIndexInput;
+        status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
+                                           &param, sizeof(param));
+        if (err == OK) {
+            numSlots = param.nBufferCountActual;
         }
 
         for (size_t i = 0; i < numSlots; ++i) {
@@ -415,6 +409,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    android_dataspace getDataspace() override {
+        return mNode->getDataspace();
+    }
+
 private:
     sp<HGraphicBufferSource> mSource;
     sp<C2OMXNode> mNode;
@@ -492,7 +490,10 @@
     // We used to not report changes to these keys to the client.
     const static std::set<std::string> sIgnoredKeys({
             KEY_BIT_RATE,
+            KEY_FRAME_RATE,
             KEY_MAX_BIT_RATE,
+            KEY_MAX_WIDTH,
+            KEY_MAX_HEIGHT,
             "csd-0",
             "csd-1",
             "csd-2",
@@ -554,13 +555,15 @@
         }
 
         // Report to MediaCodec
-        // Note: for now we do not propagate the error code to MediaCodec as we would need
-        // to translate to a MediaCodec error.
+        // Note: for now we do not propagate the error code to MediaCodec
+        // except for C2_NO_MEMORY, as we would need to translate to a MediaCodec error.
         sp<CCodec> codec(mCodec.promote());
         if (!codec || !codec->mCallback) {
             return;
         }
-        codec->mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+        codec->mCallback->onError(
+                errorCode == C2_NO_MEMORY ? NO_MEMORY : UNKNOWN_ERROR,
+                ACTION_CODE_FATAL);
     }
 
     virtual void onDeath(
@@ -796,10 +799,30 @@
             mChannel->setMetaMode(CCodecBufferChannel::MODE_ANW);
         }
 
+        status_t err = OK;
         sp<RefBase> obj;
         sp<Surface> surface;
         if (msg->findObject("native-window", &obj)) {
             surface = static_cast<Surface *>(obj.get());
+            // setup tunneled playback
+            if (surface != nullptr) {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                if ((config->mDomain & Config::IS_DECODER)
+                        && (config->mDomain & Config::IS_VIDEO)) {
+                    int32_t tunneled;
+                    if (msg->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+                        ALOGI("Configuring TUNNELED video playback.");
+
+                        err = configureTunneledVideoPlayback(comp, &config->mSidebandHandle, msg);
+                        if (err != OK) {
+                            ALOGE("configureTunneledVideoPlayback failed!");
+                            return err;
+                        }
+                        config->mTunneled = true;
+                    }
+                }
+            }
             setSurface(surface);
         }
 
@@ -830,12 +853,14 @@
                 return BAD_VALUE;
             }
         }
+        int32_t width = 0;
+        int32_t height = 0;
         if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)) {
-            if (!msg->findInt32(KEY_WIDTH, &i32)) {
+            if (!msg->findInt32(KEY_WIDTH, &width)) {
                 ALOGD("width is missing, which is required for image/video components.");
                 return BAD_VALUE;
             }
-            if (!msg->findInt32(KEY_HEIGHT, &i32)) {
+            if (!msg->findInt32(KEY_HEIGHT, &height)) {
                 ALOGD("height is missing, which is required for image/video components.");
                 return BAD_VALUE;
             }
@@ -972,7 +997,15 @@
                 // needed for decoders.
                 if (!(config->mDomain & Config::IS_ENCODER)) {
                     if (surface == nullptr) {
-                        format = flexPixelFormat.value_or(COLOR_FormatYUV420Flexible);
+                        const char *prefix = "";
+                        if (flexSemiPlanarPixelFormat) {
+                            format = COLOR_FormatYUV420SemiPlanar;
+                            prefix = "semi-";
+                        } else {
+                            format = COLOR_FormatYUV420Planar;
+                        }
+                        ALOGD("Client requested ByteBuffer mode decoder w/o color format set: "
+                                "using default %splanar color format", prefix);
                     } else {
                         format = COLOR_FormatSurface;
                     }
@@ -1006,6 +1039,26 @@
             }
         }
 
+        /*
+         * Handle dataspace
+         */
+        int32_t usingRecorder;
+        if (msg->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
+            android_dataspace dataSpace = HAL_DATASPACE_BT709;
+            int32_t width, height;
+            if (msg->findInt32("width", &width)
+                    && msg->findInt32("height", &height)) {
+                ColorAspects aspects;
+                getColorAspectsFromFormat(msg, aspects);
+                setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
+                // TODO: read dataspace / color aspect from the component
+                setColorAspectsIntoFormat(aspects, const_cast<sp<AMessage> &>(msg));
+                dataSpace = getDataSpaceForColorAspects(aspects, true /* mayexpand */);
+            }
+            msg->setInt32("android._dataspace", (int32_t)dataSpace);
+            ALOGD("setting dataspace to %x", dataSpace);
+        }
+
         int32_t subscribeToAllVendorParams;
         if (msg->findInt32("x-*", &subscribeToAllVendorParams) && subscribeToAllVendorParams) {
             if (config->subscribeToAllVendorParams(comp, C2_MAY_BLOCK) != OK) {
@@ -1022,7 +1075,7 @@
             sdkParams = msg->dup();
             sdkParams->removeEntryAt(sdkParams->findEntryByName(PARAMETER_KEY_VIDEO_BITRATE));
         }
-        status_t err = config->getConfigUpdateFromSdkParams(
+        err = config->getConfigUpdateFromSdkParams(
                 comp, sdkParams, Config::IS_CONFIG, C2_DONT_BLOCK, &configUpdate);
         if (err != OK) {
             ALOGW("failed to convert configuration to c2 params");
@@ -1043,6 +1096,45 @@
             configUpdate.push_back(std::move(gop));
         }
 
+        if ((config->mDomain & Config::IS_ENCODER)
+                && (config->mDomain & Config::IS_VIDEO)) {
+            // we may not use all 3 of these entries
+            std::unique_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                C2StreamPictureQuantizationTuning::output::AllocUnique(3 /* flexCount */,
+                                                                       0u /* stream */);
+
+            int ix = 0;
+
+            int32_t iMax = INT32_MAX;
+            int32_t iMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MAX, &iMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MIN, &iMin);
+            if (iMax != INT32_MAX || iMin != INT32_MIN) {
+                qp->m.values[ix++] = {I_FRAME, iMin, iMax};
+            }
+
+            int32_t pMax = INT32_MAX;
+            int32_t pMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MAX, &pMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MIN, &pMin);
+            if (pMax != INT32_MAX || pMin != INT32_MIN) {
+                qp->m.values[ix++] = {P_FRAME, pMin, pMax};
+            }
+
+            int32_t bMax = INT32_MAX;
+            int32_t bMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MAX, &bMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MIN, &bMin);
+            if (bMax != INT32_MAX || bMin != INT32_MIN) {
+                qp->m.values[ix++] = {B_FRAME, bMin, bMax};
+            }
+
+            // adjust to reflect actual use.
+            qp->setFlexCount(ix);
+
+            configUpdate.push_back(std::move(qp));
+        }
+
         err = config->setParameters(comp, configUpdate, C2_DONT_BLOCK);
         if (err != OK) {
             ALOGW("failed to configure c2 params");
@@ -1133,13 +1225,14 @@
         int32_t clientPrepend;
         if ((config->mDomain & Config::IS_VIDEO)
                 && (config->mDomain & Config::IS_ENCODER)
-                && msg->findInt32(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES, &clientPrepend)
+                && msg->findInt32(KEY_PREPEND_HEADER_TO_SYNC_FRAMES, &clientPrepend)
                 && clientPrepend
                 && (!prepend || prepend.value != PREPEND_HEADER_TO_ALL_SYNC)) {
-            ALOGE("Failed to set KEY_PREPEND_HEADERS_TO_SYNC_FRAMES");
+            ALOGE("Failed to set KEY_PREPEND_HEADER_TO_SYNC_FRAMES");
             return BAD_VALUE;
         }
 
+        int32_t componentColorFormat = 0;
         if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))) {
             // propagate HDR static info to output format for both encoders and decoders
             // if component supports this info, we will update from component, but only the raw port,
@@ -1157,8 +1250,8 @@
             }
             if (config->mDomain & Config::IS_ENCODER) {
                 config->mInputFormat->setInt32(KEY_COLOR_FORMAT, format);
-                if (msg->findInt32("android._color-format", &format)) {
-                    config->mInputFormat->setInt32("android._color-format", format);
+                if (msg->findInt32("android._color-format", &componentColorFormat)) {
+                    config->mInputFormat->setInt32("android._color-format", componentColorFormat);
                 }
             } else {
                 config->mOutputFormat->setInt32(KEY_COLOR_FORMAT, format);
@@ -1216,8 +1309,59 @@
             config->mInputFormat->setInt32("color-transfer-request", colorTransferRequest);
         }
 
-        ALOGD("setup formats input: %s and output: %s",
-                config->mInputFormat->debugString().c_str(),
+        if (componentColorFormat != 0 && componentColorFormat != COLOR_FormatSurface) {
+            // Need to get stride/vstride
+            uint32_t pixelFormat = PIXEL_FORMAT_UNKNOWN;
+            if (C2Mapper::mapPixelFormatFrameworkToCodec(componentColorFormat, &pixelFormat)) {
+                // TODO: retrieve these values without allocating a buffer.
+                //       Currently allocating a buffer is necessary to retrieve the layout.
+                int64_t blockUsage =
+                    usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
+                std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
+                        width, height, pixelFormat, blockUsage, {comp->getName()});
+                sp<GraphicBlockBuffer> buffer;
+                if (block) {
+                    buffer = GraphicBlockBuffer::Allocate(
+                            config->mInputFormat,
+                            block,
+                            [](size_t size) -> sp<ABuffer> { return new ABuffer(size); });
+                } else {
+                    ALOGD("Failed to allocate a graphic block "
+                            "(width=%d height=%d pixelFormat=%u usage=%llx)",
+                            width, height, pixelFormat, (long long)blockUsage);
+                    // This means that byte buffer mode is not supported in this configuration
+                    // anyway. Skip setting stride/vstride to input format.
+                }
+                if (buffer) {
+                    sp<ABuffer> imageData = buffer->getImageData();
+                    MediaImage2 *img = nullptr;
+                    if (imageData && imageData->data()
+                            && imageData->size() >= sizeof(MediaImage2)) {
+                        img = (MediaImage2*)imageData->data();
+                    }
+                    if (img && img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+                        int32_t stride = img->mPlane[0].mRowInc;
+                        config->mInputFormat->setInt32(KEY_STRIDE, stride);
+                        if (img->mNumPlanes > 1 && stride > 0) {
+                            int64_t offsetDelta =
+                                (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
+                            if (offsetDelta % stride == 0) {
+                                int32_t vstride = int32_t(offsetDelta / stride);
+                                config->mInputFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+                            } else {
+                                ALOGD("Cannot report accurate slice height: "
+                                        "offsetDelta = %lld stride = %d",
+                                        (long long)offsetDelta, stride);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        ALOGD("setup formats input: %s",
+                config->mInputFormat->debugString().c_str());
+        ALOGD("setup formats output: %s",
                 config->mOutputFormat->debugString().c_str());
         return OK;
     };
@@ -1228,6 +1372,8 @@
     Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
     const std::unique_ptr<Config> &config = *configLocked;
 
+    config->queryConfiguration(comp);
+
     mCallback->onComponentConfigured(config->mInputFormat, config->mOutputFormat);
 }
 
@@ -1476,6 +1622,7 @@
         outputFormat = config->mOutputFormat = config->mOutputFormat->dup();
         if (config->mInputSurface) {
             err2 = config->mInputSurface->start();
+            config->mInputSurfaceDataspace = config->mInputSurface->getDataspace();
         }
         buffersBoundToCodec = config->mBuffersBoundToCodec;
     }
@@ -1563,6 +1710,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
     {
@@ -1612,6 +1760,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
 
@@ -1649,6 +1798,19 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    if (config->mTunneled && config->mSidebandHandle != nullptr) {
+        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+        status_t err = native_window_set_sideband_stream(
+                nativeWindow.get(),
+                const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+        if (err != OK) {
+            ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                    nativeWindow.get(), config->mSidebandHandle->handle(), err);
+            return err;
+        }
+    }
     return mChannel->setSurface(surface);
 }
 
@@ -1733,7 +1895,9 @@
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
+        sp<AMessage> outputFormat = config->mOutputFormat;
         config->queryConfiguration(comp);
+        RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
     }
 
     (void)mChannel->start(nullptr, nullptr, [&]{
@@ -1779,6 +1943,12 @@
         params->removeEntryAt(params->findEntryByName(KEY_BIT_RATE));
     }
 
+    int32_t syncId = 0;
+    if (params->findInt32("audio-hw-sync", &syncId)
+            || params->findInt32("hw-av-sync-id", &syncId)) {
+        configureTunneledVideoPlayback(comp, nullptr, params);
+    }
+
     Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
     const std::unique_ptr<Config> &config = *configLocked;
 
@@ -1850,6 +2020,39 @@
     config->setParameters(comp, params, C2_MAY_BLOCK);
 }
 
+status_t CCodec::querySupportedParameters(std::vector<std::string> *names) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->querySupportedParameters(names);
+}
+
+status_t CCodec::describeParameter(
+        const std::string &name, CodecParameterDescriptor *desc) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->describe(name, desc);
+}
+
+status_t CCodec::subscribeToParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->subscribeToVendorConfigUpdate(comp, names);
+}
+
+status_t CCodec::unsubscribeFromParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->unsubscribeFromVendorConfigUpdate(comp, names);
+}
+
 void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
         Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -2043,6 +2246,55 @@
     deadline->set(now + (timeout * mult), name);
 }
 
+status_t CCodec::configureTunneledVideoPlayback(
+        std::shared_ptr<Codec2Client::Component> comp,
+        sp<NativeHandle> *sidebandHandle,
+        const sp<AMessage> &msg) {
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+
+    std::unique_ptr<C2PortTunneledModeTuning::output> tunneledPlayback =
+        C2PortTunneledModeTuning::output::AllocUnique(
+            1,
+            C2PortTunneledModeTuning::Struct::SIDEBAND,
+            C2PortTunneledModeTuning::Struct::REALTIME,
+            0);
+    // TODO: use KEY_AUDIO_HW_SYNC, KEY_HARDWARE_AV_SYNC_ID when they are in MediaCodecConstants.h
+    if (msg->findInt32("audio-hw-sync", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::AUDIO_HW_SYNC;
+    } else if (msg->findInt32("hw-av-sync-id", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::HW_AV_SYNC;
+    } else {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::REALTIME;
+        tunneledPlayback->setFlexCount(0);
+    }
+    c2_status_t c2err = comp->config({ tunneledPlayback.get() }, C2_MAY_BLOCK, &failures);
+    if (c2err != C2_OK) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (sidebandHandle == nullptr) {
+        return OK;
+    }
+
+    std::vector<std::unique_ptr<C2Param>> params;
+    c2err = comp->query({}, {C2PortTunnelHandleTuning::output::PARAM_TYPE}, C2_DONT_BLOCK, &params);
+    if (c2err == C2_OK && params.size() == 1u) {
+        C2PortTunnelHandleTuning::output *videoTunnelSideband =
+            C2PortTunnelHandleTuning::output::From(params[0].get());
+        // Currently, Codec2 only supports non-fd case for sideband native_handle.
+        native_handle_t *handle = native_handle_create(0, videoTunnelSideband->flexCount());
+        *sidebandHandle = NativeHandle::create(handle, true /* ownsHandle */);
+        if (handle != nullptr && videoTunnelSideband->flexCount()) {
+            memcpy(handle->data, videoTunnelSideband->m.values,
+                    sizeof(int32_t) * videoTunnelSideband->flexCount());
+            return OK;
+        } else {
+            return NO_MEMORY;
+        }
+    }
+    return UNKNOWN_ERROR;
+}
+
 void CCodec::initiateReleaseIfStuck() {
     std::string name;
     bool pendingDeadline = false;
@@ -2055,7 +2307,9 @@
             pendingDeadline = true;
         }
     }
-    if (name.empty()) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    if (config->mTunneled == false && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
@@ -2075,7 +2329,13 @@
         return;
     }
 
-    ALOGW("previous call to %s exceeded timeout", name.c_str());
+    C2String compName;
+    {
+        Mutexed<State>::Locked state(mState);
+        compName = state->comp->getName();
+    }
+    ALOGW("[%s] previous call to %s exceeded timeout", compName.c_str(), name.c_str());
+
     initiateRelease(false);
     mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
 }
@@ -2448,4 +2708,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 44ebf84..d0c1357 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1177,9 +1177,10 @@
     if (outputFormat != nullptr) {
         sp<IGraphicBufferProducer> outputSurface;
         uint32_t outputGeneration;
+        int maxDequeueCount = 0;
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
-            output->maxDequeueBuffers = numOutputSlots +
+            maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
                     reorderDepth.value + kRenderingDepth;
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
@@ -1188,6 +1189,9 @@
             }
             outputGeneration = output->generation;
         }
+        if (maxDequeueCount > 0) {
+            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+        }
 
         bool graphic = (oStreamFormat.value == C2BufferData::GRAPHIC);
         C2BlockPool::local_id_t outputPoolId_;
@@ -1364,7 +1368,7 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    {
+    if (inputFormat || outputFormat) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
@@ -1464,14 +1468,14 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
-    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::reset() {
     stop();
+    if (mInputSurface != nullptr) {
+        mInputSurface.reset();
+    }
+    mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -1499,8 +1503,10 @@
 
 void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     ALOGV("[%s] flush", mName);
+    std::vector<uint64_t> indices;
     std::list<std::unique_ptr<C2Work>> configs;
     for (const std::unique_ptr<C2Work> &work : flushedWork) {
+        indices.push_back(work->input.ordinal.frameIndex.peeku());
         if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
             continue;
         }
@@ -1513,6 +1519,7 @@
         std::unique_ptr<C2Work> copy(new C2Work);
         copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
         copy->input.ordinal = work->input.ordinal;
+        copy->input.ordinal.frameIndex = mFrameIndex++;
         copy->input.buffers.insert(
                 copy->input.buffers.begin(),
                 work->input.buffers.begin(),
@@ -1541,7 +1548,12 @@
             output->buffers->flushStash();
         }
     }
-    mPipelineWatcher.lock()->flush();
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (uint64_t index : indices) {
+            watcher->onWorkDone(index);
+        }
+    }
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -1728,6 +1740,17 @@
                 }
                 break;
             }
+            case C2PortTunnelSystemTime::CORE_INDEX: {
+                C2PortTunnelSystemTime::output frameRenderTime;
+                if (frameRenderTime.updateFrom(*param)) {
+                    ALOGV("[%s] onWorkDone: frame rendered (sys:%lld ns, media:%lld us)",
+                          mName, (long long)frameRenderTime.value,
+                          (long long)worklet->output.ordinal.timestamp.peekll());
+                    mCCodecCallback->onOutputFramesRendered(
+                            worklet->output.ordinal.timestamp.peek(), frameRenderTime.value);
+                }
+                break;
+            }
             default:
                 ALOGV("[%s] onWorkDone: unrecognized config update (%08X)",
                       mName, param->index());
@@ -1755,15 +1778,22 @@
     if (needMaxDequeueBufferCountUpdate) {
         size_t numOutputSlots = 0;
         uint32_t reorderDepth = 0;
+        int maxDequeueCount = 0;
         {
             Mutexed<Output>::Locked output(mOutput);
             numOutputSlots = output->numSlots;
             reorderDepth = output->buffers->getReorderDepth();
         }
-        Mutexed<OutputSurface>::Locked output(mOutputSurface);
-        output->maxDequeueBuffers = numOutputSlots + reorderDepth + kRenderingDepth;
-        if (output->surface) {
-            output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
+        {
+            Mutexed<OutputSurface>::Locked output(mOutputSurface);
+            maxDequeueCount = output->maxDequeueBuffers =
+                    numOutputSlots + reorderDepth + kRenderingDepth;
+            if (output->surface) {
+                output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
+            }
+        }
+        if (maxDequeueCount > 0) {
+            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
         }
     }
 
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 2025da2..e7207a5 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -77,34 +77,39 @@
 void CCodecBuffers::handleImageData(const sp<Codec2Buffer> &buffer) {
     sp<ABuffer> imageDataCandidate = buffer->getImageData();
     if (imageDataCandidate == nullptr) {
+        if (mFormatWithImageData) {
+            // We previously sent the format with image data, so use the same format.
+            buffer->setFormat(mFormatWithImageData);
+        }
         return;
     }
-    sp<ABuffer> imageData;
-    if (!mFormat->findBuffer("image-data", &imageData)
-            || imageDataCandidate->size() != imageData->size()
-            || memcmp(imageDataCandidate->data(), imageData->data(), imageData->size()) != 0) {
+    if (!mLastImageData
+            || imageDataCandidate->size() != mLastImageData->size()
+            || memcmp(imageDataCandidate->data(),
+                      mLastImageData->data(),
+                      mLastImageData->size()) != 0) {
         ALOGD("[%s] updating image-data", mName);
-        sp<AMessage> newFormat = dupFormat();
-        newFormat->setBuffer("image-data", imageDataCandidate);
+        mFormatWithImageData = dupFormat();
+        mLastImageData = imageDataCandidate;
+        mFormatWithImageData->setBuffer("image-data", imageDataCandidate);
         MediaImage2 *img = (MediaImage2*)imageDataCandidate->data();
         if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
             int32_t stride = img->mPlane[0].mRowInc;
-            newFormat->setInt32(KEY_STRIDE, stride);
+            mFormatWithImageData->setInt32(KEY_STRIDE, stride);
             ALOGD("[%s] updating stride = %d", mName, stride);
             if (img->mNumPlanes > 1 && stride > 0) {
                 int64_t offsetDelta =
                     (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
                 int32_t vstride = int32_t(offsetDelta / stride);
-                newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+                mFormatWithImageData->setInt32(KEY_SLICE_HEIGHT, vstride);
                 ALOGD("[%s] updating vstride = %d", mName, vstride);
                 buffer->setRange(
                         img->mPlane[0].mOffset,
                         buffer->size() - img->mPlane[0].mOffset);
             }
         }
-        setFormat(newFormat);
-        buffer->setFormat(newFormat);
     }
+    buffer->setFormat(mFormatWithImageData);
 }
 
 // InputBuffers
@@ -273,22 +278,12 @@
 
     if (entry.notify && mFormat != outputFormat) {
         updateSkipCutBuffer(outputFormat);
-        sp<ABuffer> imageData;
-        if (mFormat->findBuffer("image-data", &imageData)) {
-            outputFormat->setBuffer("image-data", imageData);
-        }
-        int32_t stride;
-        if (mFormat->findInt32(KEY_STRIDE, &stride)) {
-            outputFormat->setInt32(KEY_STRIDE, stride);
-        }
-        int32_t sliceHeight;
-        if (mFormat->findInt32(KEY_SLICE_HEIGHT, &sliceHeight)) {
-            outputFormat->setInt32(KEY_SLICE_HEIGHT, sliceHeight);
-        }
+        // Trigger image data processing to the new format
+        mLastImageData.clear();
         ALOGV("[%s] popFromStashAndRegister: output format reference changed: %p -> %p",
                 mName, mFormat.get(), outputFormat.get());
-        ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
-                mName, outputFormat->debugString().c_str());
+        ALOGD("[%s] popFromStashAndRegister: at %lldus, output format changed to %s",
+                mName, (long long)entry.timestamp, outputFormat->debugString().c_str());
         setFormat(outputFormat);
     }
 
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 7c4e7b1..995d3a4 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -86,6 +86,9 @@
     // Format to be used for creating MediaCodec-facing buffers.
     sp<AMessage> mFormat;
 
+    sp<ABuffer> mLastImageData;
+    sp<AMessage> mFormatWithImageData;
+
 private:
     DISALLOW_EVIL_CONSTRUCTORS(CCodecBuffers);
 };
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 7214bf7..27e87e6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -18,11 +18,13 @@
 #define LOG_TAG "CCodecConfig"
 #include <cutils/properties.h>
 #include <log/log.h>
+#include <utils/NativeHandle.h>
 
 #include <C2Component.h>
 #include <C2Param.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecConstants.h>
 
 #include "CCodecConfig.h"
@@ -289,8 +291,8 @@
     std::vector<std::string> getPathsForDomain(
             Domain any, Domain all = Domain::ALL) const {
         std::vector<std::string> res;
-        for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mConfigMappers) {
-            for (const ConfigMapper &cm : el.second) {
+        for (const auto &[key, mappers] : mConfigMappers) {
+            for (const ConfigMapper &cm : mappers) {
                 ALOGV("filtering %s %x %x %x %x", cm.path().c_str(), cm.domain(), any,
                         (cm.domain() & any), (cm.domain() & any & all));
                 if ((cm.domain() & any) && ((cm.domain() & any & all) == (any & all))) {
@@ -321,7 +323,8 @@
 CCodecConfig::CCodecConfig()
     : mInputFormat(new AMessage),
       mOutputFormat(new AMessage),
-      mUsingSurface(false) { }
+      mUsingSurface(false),
+      mTunneled(false) { }
 
 void CCodecConfig::initializeStandardParams() {
     typedef Domain D;
@@ -359,7 +362,10 @@
         .limitTo(D::OUTPUT & D::READ));
 
     add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
-        .limitTo(D::ENCODER & D::OUTPUT));
+        .limitTo(D::ENCODER & D::CODED));
+    // Some audio decoders require bitrate information to be set
+    add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+        .limitTo(D::AUDIO & D::DECODER & D::CODED));
     // we also need to put the bitrate in the max bitrate field
     add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
         .limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -417,16 +423,17 @@
 
     // read back default for decoders. This is needed in case the component does not support
     // color aspects. In that case, these values get copied to color-* keys.
+    // TRICKY: We read these values at raw port, since that's where we want to read these.
     add(ConfigMapper("default-color-range",     C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "range")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::CODED & D::READ)
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ)
         .withC2Mappers<C2Color::range_t>());
     add(ConfigMapper("default-color-transfer",  C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "transfer")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::CODED & D::READ)
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ)
         .withC2Mappers<C2Color::transfer_t>());
     add(ConfigMapper("default-color-primaries", C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "primaries")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::CODED & D::READ));
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ));
     add(ConfigMapper("default-color-matrix",    C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "matrix")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::CODED & D::READ));
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ));
 
     // read back final for decoder output (also, configure final aspects as well. This should be
     // overwritten based on coded/default values if component supports color aspects, but is used
@@ -506,7 +513,7 @@
     add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
                      C2_PARAMKEY_SECURE_MODE, "value"));
 
-    add(ConfigMapper(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES,
+    add(ConfigMapper(KEY_PREPEND_HEADER_TO_SYNC_FRAMES,
                      C2_PARAMKEY_PREPEND_HEADER_MODE, "value")
         .limitTo(D::ENCODER & D::VIDEO)
         .withMappers([](C2Value v) -> C2Value {
@@ -530,7 +537,7 @@
             return C2Value();
         }));
     // remove when codecs switch to PARAMKEY
-    deprecated(ConfigMapper(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES,
+    deprecated(ConfigMapper(KEY_PREPEND_HEADER_TO_SYNC_FRAMES,
                             "coding.add-csd-to-sync-frames", "value")
                .limitTo(D::ENCODER & D::VIDEO));
     // convert to timestamp base
@@ -726,6 +733,17 @@
             return C2Value();
         }));
 
+    add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+        .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+        .withMapper([mapper](C2Value v) -> C2Value {
+            C2Config::profile_t c2 = PROFILE_UNUSED;
+            int32_t sdk;
+            if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                return c2;
+            }
+            return PROFILE_UNUSED;
+        }));
+
     // convert to dBFS and add default
     add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
         .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1058,7 +1076,7 @@
             std::vector<std::string> keys;
             mParamUpdater->getKeysForParamIndex(desc->index(), &keys);
             for (const std::string &key : keys) {
-                mVendorParamIndices.insert_or_assign(key, desc->index());
+                mVendorParams.insert_or_assign(key, desc);
             }
         }
     }
@@ -1125,6 +1143,12 @@
             insertion.first->second = std::move(p);
         }
     }
+    if (mInputSurface
+            && (domain & mOutputDomain)
+            && mInputSurfaceDataspace != mInputSurface->getDataspace()) {
+        changed = true;
+        mInputSurfaceDataspace = mInputSurface->getDataspace();
+    }
 
     ALOGV("updated configuration has %zu params (%s)", mCurrentConfig.size(),
             changed ? "CHANGED" : "no change");
@@ -1164,11 +1188,14 @@
 
     bool changed = false;
     if (domain & mInputDomain) {
-        sp<AMessage> oldFormat = mInputFormat->dup();
+        sp<AMessage> oldFormat = mInputFormat;
+        mInputFormat = mInputFormat->dup(); // trigger format changed
         mInputFormat->extend(getFormatForDomain(reflected, mInputDomain));
         if (mInputFormat->countEntries() != oldFormat->countEntries()
                 || mInputFormat->changesFrom(oldFormat)->countEntries() > 0) {
             changed = true;
+        } else {
+            mInputFormat = oldFormat; // no change
         }
     }
     if (domain & mOutputDomain) {
@@ -1190,8 +1217,8 @@
         const ReflectedParamUpdater::Dict &reflected,
         Domain portDomain) const {
     sp<AMessage> msg = new AMessage;
-    for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mStandardParams->getKeys()) {
-        for (const ConfigMapper &cm : el.second) {
+    for (const auto &[key, mappers] : mStandardParams->getKeys()) {
+        for (const ConfigMapper &cm : mappers) {
             if ((cm.domain() & portDomain) == 0 // input-output-coded-raw
                 || (cm.domain() & mDomain) != mDomain // component domain + kind (these must match)
                 || (cm.domain() & IS_READ) == 0) {
@@ -1215,26 +1242,26 @@
                 ALOGD("unexpected untyped query value for key: %s", cm.path().c_str());
                 continue;
             }
-            msg->setItem(el.first.c_str(), item);
+            msg->setItem(key.c_str(), item);
         }
     }
 
     bool input = (portDomain & Domain::IS_INPUT);
     std::vector<std::string> vendorKeys;
-    for (const std::pair<std::string, ReflectedParamUpdater::Value> &entry : reflected) {
-        auto it = mVendorParamIndices.find(entry.first);
-        if (it == mVendorParamIndices.end()) {
+    for (const auto &[key, value] : reflected) {
+        auto it = mVendorParams.find(key);
+        if (it == mVendorParams.end()) {
             continue;
         }
-        if (mSubscribedIndices.count(it->second) == 0) {
+        C2Param::Index index = it->second->index();
+        if (mSubscribedIndices.count(index) == 0) {
             continue;
         }
         // For vendor parameters, we only care about direction
-        if ((input && !it->second.forInput())
-                || (!input && !it->second.forOutput())) {
+        if ((input && !index.forInput())
+                || (!input && !index.forOutput())) {
             continue;
         }
-        const ReflectedParamUpdater::Value &value = entry.second;
         C2Value c2Value;
         sp<ABuffer> bufValue;
         AString strValue;
@@ -1246,10 +1273,10 @@
         } else if (value.find(&strValue)) {
             item.set(strValue);
         } else {
-            ALOGD("unexpected untyped query value for key: %s", entry.first.c_str());
+            ALOGD("unexpected untyped query value for key: %s", key.c_str());
             continue;
         }
-        msg->setItem(entry.first.c_str(), item);
+        msg->setItem(key.c_str(), item);
     }
 
     { // convert from Codec 2.0 rect to MediaFormat rect and add crop rect if not present
@@ -1309,6 +1336,14 @@
         }
     }
 
+    // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+    // as SDK doesn't have a way to signal default sbr mode based on profile and
+    // requires that the key isn't present in format to signal that
+    int sbrMode;
+    if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+        msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+    }
+
     { // convert color info
         // move default color to color aspect if not read from the component
         int32_t tmp;
@@ -1353,7 +1388,6 @@
             msg->removeEntryAt(msg->findEntryByName("color-matrix"));
         }
 
-
         // calculate dataspace for raw graphic buffers if not specified by component, or if
         // using surface with unspecified aspects (as those must be defaulted which may change
         // the dataspace)
@@ -1391,6 +1425,23 @@
             }
         }
 
+        if (mInputSurface) {
+            android_dataspace dataspace = mInputSurface->getDataspace();
+            ColorUtils::convertDataSpaceToV0(dataspace);
+            int32_t standard;
+            ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+            if (range != 0) {
+                msg->setInt32(KEY_COLOR_RANGE, range);
+            }
+            if (standard != 0) {
+                msg->setInt32(KEY_COLOR_STANDARD, standard);
+            }
+            if (transfer != 0) {
+                msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+            }
+            msg->setInt32("android._dataspace", dataspace);
+        }
+
         // HDR static info
 
         C2HdrStaticMetadataStruct hdr;
@@ -1808,8 +1859,81 @@
 status_t CCodecConfig::subscribeToAllVendorParams(
         const std::shared_ptr<Codec2Client::Configurable> &configurable,
         c2_blocking_t blocking) {
-    for (const std::pair<std::string, C2Param::Index> &entry : mVendorParamIndices) {
-        mSubscribedIndices.insert(entry.second);
+    for (const auto &[path, desc] : mVendorParams) {
+        mSubscribedIndices.insert(desc->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::querySupportedParameters(std::vector<std::string> *names) {
+    if (!names) {
+        return BAD_VALUE;
+    }
+    names->clear();
+    // TODO: expand to standard params
+    for (const auto &[key, desc] : mVendorParams) {
+        names->push_back(key);
+    }
+    return OK;
+}
+
+status_t CCodecConfig::describe(const std::string &name, CodecParameterDescriptor *desc) {
+    if (!desc) {
+        return BAD_VALUE;
+    }
+    // TODO: expand to standard params
+    desc->name = name;
+    switch (mParamUpdater->getTypeForKey(name)) {
+        case C2FieldDescriptor::INT32:
+        case C2FieldDescriptor::UINT32:
+        case C2FieldDescriptor::CNTR32:
+            desc->type = AMessage::kTypeInt32;
+            return OK;
+        case C2FieldDescriptor::INT64:
+        case C2FieldDescriptor::UINT64:
+        case C2FieldDescriptor::CNTR64:
+            desc->type = AMessage::kTypeInt64;
+            return OK;
+        case C2FieldDescriptor::FLOAT:
+            desc->type = AMessage::kTypeFloat;
+            return OK;
+        case C2FieldDescriptor::STRING:
+            desc->type = AMessage::kTypeString;
+            return OK;
+        case C2FieldDescriptor::BLOB:
+            desc->type = AMessage::kTypeBuffer;
+            return OK;
+        default:
+            return NAME_NOT_FOUND;
+    }
+}
+
+status_t CCodecConfig::subscribeToVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.insert(it->second->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::unsubscribeFromVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.erase(it->second->index());
     }
     return subscribeToConfigUpdate(configurable, {}, blocking);
 }
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 2895746..417b773 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -35,6 +35,8 @@
 namespace android {
 
 struct AMessage;
+struct CodecParameterDescriptor;
+class NativeHandle;
 struct StandardParams;
 
 /**
@@ -123,6 +125,7 @@
 
     std::shared_ptr<InputSurfaceWrapper> mInputSurface;
     std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
+    android_dataspace mInputSurfaceDataspace;
 
     /// the current configuration. Updated after configure() and based on configUpdate in
     /// onWorkDone
@@ -136,11 +139,15 @@
     /// For now support a validation function.
     std::map<C2Param::Index, LocalParamValidator> mLocalParams;
 
-    /// Vendor field name -> index map.
-    std::map<std::string, C2Param::Index> mVendorParamIndices;
+    /// Vendor field name -> desc map.
+    std::map<std::string, std::shared_ptr<C2ParamDescriptor>> mVendorParams;
 
     std::set<std::string> mLastConfig;
 
+    /// Tunneled codecs
+    bool mTunneled;
+    sp<NativeHandle> mSidebandHandle;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
@@ -321,6 +328,41 @@
         return Watcher<T>(index, this);
     }
 
+    /**
+     * Queries supported parameters and put the keys to |names|.
+     * TODO: currently this method queries vendor parameter keys only.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |names| is nullptr.
+     */
+    status_t querySupportedParameters(std::vector<std::string> *names);
+
+    /**
+     * Describe the parameter with |name|, filling the information into |desc|
+     * TODO: currently this method works only for vendor parameters.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |desc| is nullptr.
+     *         NAME_NOT_FOUND if |name| is not a recognized parameter name.
+     */
+    status_t describe(const std::string &name, CodecParameterDescriptor *desc);
+
+    /**
+     * Find corresponding indices for |names| and subscribe to them.
+     */
+    status_t subscribeToVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
+    /**
+     * Find corresponding indices for |names| and unsubscribe from them.
+     */
+    status_t unsubscribeFromVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
 private:
 
     /// initializes the standard MediaCodec to Codec 2.0 params mapping
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index fc4ee51..34e6a88 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -713,6 +713,8 @@
     c2_status_t err = mAlloc->priorGraphicAllocation(handle, &alloc);
     if (err != C2_OK) {
         ALOGD("Failed to wrap VideoNativeMetadata into C2GraphicAllocation");
+        native_handle_close(handle);
+        native_handle_delete(handle);
         return nullptr;
     }
     std::shared_ptr<C2GraphicBlock> block = _C2BlockFactory::CreateGraphicBlock(alloc);
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index a26f89e..77a63a7 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -338,6 +338,59 @@
     // parse default XML files
     parser.parseXmlFilesInSearchDirs();
 
+    // The mainline modules for media may optionally include some codec shaping information.
+    // Based on vendor partition SDK, and the brand/product/device information
+    // (expect to be empty in almost always)
+    //
+    {
+        // get build info so we know what file to search
+        // ro.vendor.build.fingerprint
+        std::string fingerprint = base::GetProperty("ro.vendor.build.fingerprint",
+                                               "brand/product/device:");
+        ALOGV("property_get for ro.vendor.build.fingerprint == '%s'", fingerprint.c_str());
+
+        // ro.vendor.build.version.sdk
+        std::string sdk = base::GetProperty("ro.vendor.build.version.sdk", "0");
+        ALOGV("property_get for ro.vendor.build.version.sdk == '%s'", sdk.c_str());
+
+        std::string brand;
+        std::string product;
+        std::string device;
+        size_t pos1;
+        pos1 = fingerprint.find('/');
+        if (pos1 != std::string::npos) {
+            brand = fingerprint.substr(0, pos1);
+            size_t pos2 = fingerprint.find('/', pos1+1);
+            if (pos2 != std::string::npos) {
+                product = fingerprint.substr(pos1+1, pos2 - pos1 - 1);
+                size_t pos3 = fingerprint.find('/', pos2+1);
+                if (pos3 != std::string::npos) {
+                    device = fingerprint.substr(pos2+1, pos3 - pos2 - 1);
+                    size_t pos4 = device.find(':');
+                    if (pos4 != std::string::npos) {
+                        device.resize(pos4);
+                    }
+                }
+            }
+        }
+
+        ALOGV("parsed: sdk '%s' brand '%s' product '%s' device '%s'",
+            sdk.c_str(), brand.c_str(), product.c_str(), device.c_str());
+
+        std::string base = "/apex/com.android.media/etc/formatshaper";
+
+        // looking in these directories within the apex
+        const std::vector<std::string> modulePathnames = {
+            base + "/" + sdk + "/" + brand + "/" + product + "/" + device,
+            base + "/" + sdk + "/" + brand + "/" + product,
+            base + "/" + sdk + "/" + brand,
+            base + "/" + sdk,
+            base
+        };
+
+        parser.parseXmlFilesInSearchDirs( { "media_codecs_shaping.xml" }, modulePathnames);
+    }
+
     if (parser.getParsingStatus() != OK) {
         ALOGD("XML parser no good");
         return OK;
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index 9cec23f..af054c7 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -143,6 +143,7 @@
 
     if (buffer->size() > 0) {
         mCurrentOrdinal.timestamp = timeUs;
+        mCurrentOrdinal.customOrdinal = timeUs;
     }
 
     size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
@@ -219,6 +220,7 @@
 
     ++mCurrentOrdinal.frameIndex;
     mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+    mCurrentOrdinal.customOrdinal = mCurrentOrdinal.timestamp;
     mCurrentBlock.reset();
     mWriteView.reset();
 }
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 479acb1..50d600c 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -106,6 +106,11 @@
      */
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    virtual android_dataspace getDataspace() { return mDataSpace; }
+
 protected:
     android_dataspace mDataSpace;
 };
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
 }
 
 void PipelineWatcher::flush() {
+    ALOGV("flush");
     mFramesInPipeline.clear();
 }
 
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.cpp b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
index f39051b..d14b9b0 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.cpp
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
@@ -288,6 +288,20 @@
     }
 }
 
+C2FieldDescriptor::type_t ReflectedParamUpdater::getTypeForKey(
+        const std::string &key) const {
+    auto it = mMap.find(key);
+    if (it == mMap.end()) {
+        return C2FieldDescriptor::type_t(~0);
+    }
+
+    if (it->second.fieldDesc) {
+        return it->second.fieldDesc->type();
+    }
+    // whole param is exposed as a blob
+    return C2FieldDescriptor::BLOB;
+}
+
 void ReflectedParamUpdater::updateParamsFromMessage(
         const Dict &params,
         std::vector<std::unique_ptr<C2Param>> *vec /* nonnull */) const {
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.h b/media/codec2/sfplugin/ReflectedParamUpdater.h
index 752c7e4..6dcf2a3 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.h
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.h
@@ -176,6 +176,14 @@
             std::vector<std::string> *keys /* nonnull */) const;
 
     /**
+     * Get field type for the given name
+     *
+     * \param key[in]   field name
+     * \return type of the field, or type_t(~0) if not found.
+     */
+    C2FieldDescriptor::type_t getTypeForKey(const std::string &name) const;
+
+    /**
      * Update C2Param objects from field name and value in AMessage object.
      *
      * \param params[in]    Dict object with field name to value pairs.
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index dbbb5d5..ec18128 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -65,6 +65,12 @@
     virtual void signalEndOfInputStream() override;
     virtual void signalRequestIDRFrame() override;
 
+    virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+    virtual status_t describeParameter(
+            const std::string &name, CodecParameterDescriptor *desc) override;
+    virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+    virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
+
     void initiateReleaseIfStuck();
     void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
     void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
@@ -126,6 +132,11 @@
             const std::chrono::milliseconds &timeout,
             const char *name);
 
+    status_t configureTunneledVideoPlayback(
+            const std::shared_ptr<Codec2Client::Component> comp,
+            sp<NativeHandle> *sidebandHandle,
+            const sp<AMessage> &msg);
+
     enum {
         kWhatAllocate,
         kWhatConfigure,
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index c9caa01..7c660dc 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -208,6 +208,24 @@
                         .withSetter(Setter<C2StreamPixelAspectRatioInfo::output>)
                         .build());
 
+                if (isEncoder) {
+                    addParameter(
+                            DefineParam(mInputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::input(0u))
+                            .withFields({C2F(mInputBitrate, value).any()})
+                            .withSetter(Setter<C2StreamBitrateInfo::input>)
+                            .build());
+
+                    addParameter(
+                            DefineParam(mOutputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::output(0u))
+                            .withFields({C2F(mOutputBitrate, value).any()})
+                            .calculatedAs(
+                                Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
+                                mInputBitrate)
+                            .build());
+                }
+
                 // TODO: more SDK params
             }
         private:
@@ -221,11 +239,19 @@
             std::shared_ptr<C2StreamVendorInt64Info::output> mInt64Output;
             std::shared_ptr<C2PortVendorStringInfo::input> mStringInput;
             std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
+            std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
+            std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
                 return C2R::Ok();
             }
+
+            template<typename ME, typename DEP>
+            static C2R Copy(bool, C2P<ME> &me, const C2P<DEP> &dep) {
+                me.set().value = dep.v.value;
+                return C2R::Ok();
+            }
         };
 
         Impl mImpl;
@@ -457,4 +483,97 @@
             << "mInputFormat = " << mConfig.mInputFormat->debugString().c_str();
 }
 
+TEST_F(CCodecConfigTest, DataspaceUpdate) {
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_AVC);
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+    class InputSurfaceStub : public InputSurfaceWrapper {
+    public:
+        ~InputSurfaceStub() override = default;
+        status_t connect(const std::shared_ptr<Codec2Client::Component> &) override {
+            return OK;
+        }
+        void disconnect() override {}
+        status_t start() override { return OK; }
+        status_t signalEndOfInputStream() override { return OK; }
+        status_t configure(Config &) override { return OK; }
+    };
+    mConfig.mInputSurface = std::make_shared<InputSurfaceStub>();
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_COLOR_RANGE, COLOR_RANGE_LIMITED);
+    format->setInt32(KEY_COLOR_STANDARD, COLOR_STANDARD_BT709);
+    format->setInt32(KEY_COLOR_TRANSFER, COLOR_TRANSFER_SDR_VIDEO);
+    format->setInt32(KEY_BIT_RATE, 100);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_TRUE(mConfig.updateConfiguration(configUpdate, D::ALL));
+
+    int32_t range{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_LIMITED, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t standard{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT709, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t transfer{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_SDR_VIDEO, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    mConfig.mInputSurface->setDataSpace(HAL_DATASPACE_BT2020_PQ);
+
+    // Dataspace from input surface should override the configured setting
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    // Simulate bitrate update
+    format = new AMessage;
+    format->setInt32(KEY_BIT_RATE, 200);
+    configUpdate.clear();
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_EQ(OK, mConfig.setParameters(mConfigurable, configUpdate, C2_MAY_BLOCK));
+
+    // Color information should remain the same
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 74e7ef1..2f4d6b1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -33,11 +33,13 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libnativewindow",
         "libstagefright_foundation",
         "libutils",
     ],
 
     static_libs: [
+        "libarect",
         "libyuv_static",
     ],
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bf2a07e..a78d811 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -23,6 +23,7 @@
 #include <list>
 #include <mutex>
 
+#include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/AUtils.h>
 
@@ -121,32 +122,69 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
-    if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = view.data()[0];
-        const uint8_t* src_u = view.data()[1];
-        const uint8_t* src_v = view.data()[2];
-        int32_t src_stride_y = view.layout().planes[0].rowInc;
-        int32_t src_stride_u = view.layout().planes[1].rowInc;
-        int32_t src_stride_v = view.layout().planes[2].rowInc;
-        uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
-        uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
-        uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
-        int32_t dst_stride_y = img->mPlane[0].mRowInc;
-        int32_t dst_stride_u = img->mPlane[1].mRowInc;
-        int32_t dst_stride_v = img->mPlane[2].mRowInc;
-        if (IsNV12(view) && IsI420(img)) {
+    const uint8_t* src_y = view.data()[0];
+    const uint8_t* src_u = view.data()[1];
+    const uint8_t* src_v = view.data()[2];
+    int32_t src_stride_y = view.layout().planes[0].rowInc;
+    int32_t src_stride_u = view.layout().planes[1].rowInc;
+    int32_t src_stride_v = view.layout().planes[2].rowInc;
+    uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
+    uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
+    uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
+    int32_t dst_stride_y = img->mPlane[0].mRowInc;
+    int32_t dst_stride_u = img->mPlane[1].mRowInc;
+    int32_t dst_stride_v = img->mPlane[2].mRowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+
+    if (IsNV12(view)) {
+        if (IsNV12(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(view)) {
+        if (IsNV12(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(img)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(view)) {
+        if (IsNV12(img)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(img)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
     return _ImageCopy<true>(view, img, imgBase);
@@ -156,32 +194,68 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
-    if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
-        const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
-        const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
-        int32_t src_stride_y = img->mPlane[0].mRowInc;
-        int32_t src_stride_u = img->mPlane[1].mRowInc;
-        int32_t src_stride_v = img->mPlane[2].mRowInc;
-        uint8_t* dst_y = view.data()[0];
-        uint8_t* dst_u = view.data()[1];
-        uint8_t* dst_v = view.data()[2];
-        int32_t dst_stride_y = view.layout().planes[0].rowInc;
-        int32_t dst_stride_u = view.layout().planes[1].rowInc;
-        int32_t dst_stride_v = view.layout().planes[2].rowInc;
-        if (IsNV12(img) && IsI420(view)) {
+    const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
+    const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
+    const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
+    int32_t src_stride_y = img->mPlane[0].mRowInc;
+    int32_t src_stride_u = img->mPlane[1].mRowInc;
+    int32_t src_stride_v = img->mPlane[2].mRowInc;
+    uint8_t* dst_y = view.data()[0];
+    uint8_t* dst_u = view.data()[1];
+    uint8_t* dst_v = view.data()[2];
+    int32_t dst_stride_y = view.layout().planes[0].rowInc;
+    int32_t dst_stride_u = view.layout().planes[1].rowInc;
+    int32_t dst_stride_v = view.layout().planes[2].rowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+    if (IsNV12(img)) {
+        if (IsNV12(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.width(),
-                                    view.height())) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(img)) {
+        if (IsNV12(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(view)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(img)) {
+        if (IsNV12(view)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.width(),
-                                    view.height())) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(view)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
     return _ImageCopy<false>(view, img, imgBase);
@@ -225,6 +299,20 @@
             && layout.planes[layout.PLANE_V].offset == 1);
 }
 
+bool IsNV21(const C2GraphicView &view) {
+    if (!IsYUV420(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 2
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_U].offset == 1
+            && layout.planes[layout.PLANE_V].colInc == 2
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_V].offset == 0);
+}
+
 bool IsI420(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
@@ -261,6 +349,15 @@
             && (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
 }
 
+bool IsNV21(const MediaImage2 *img) {
+    if (!IsYUV420(img)) {
+        return false;
+    }
+    return (img->mPlane[1].mColInc == 2
+            && img->mPlane[2].mColInc == 2
+            && (img->mPlane[1].mOffset - img->mPlane[2].mOffset == 1));
+}
+
 bool IsI420(const MediaImage2 *img) {
     if (!IsYUV420(img)) {
         return false;
@@ -270,6 +367,76 @@
             && img->mPlane[2].mOffset > img->mPlane[1].mOffset);
 }
 
+FlexLayout GetYuv420FlexibleLayout() {
+    static FlexLayout sLayout = []{
+        AHardwareBuffer_Desc desc = {
+            16,  // width
+            16,  // height
+            1,   // layers
+            AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+            AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+            0,   // stride
+            0,   // rfu0
+            0,   // rfu1
+        };
+        AHardwareBuffer *buffer = nullptr;
+        int ret = AHardwareBuffer_allocate(&desc, &buffer);
+        if (ret != 0) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        class AutoCloser {
+        public:
+            AutoCloser(AHardwareBuffer *buffer) : mBuffer(buffer), mLocked(false) {}
+            ~AutoCloser() {
+                if (mLocked) {
+                    AHardwareBuffer_unlock(mBuffer, nullptr);
+                }
+                AHardwareBuffer_release(mBuffer);
+            }
+
+            void setLocked() { mLocked = true; }
+
+        private:
+            AHardwareBuffer *mBuffer;
+            bool mLocked;
+        } autoCloser(buffer);
+        AHardwareBuffer_Planes planes;
+        ret = AHardwareBuffer_lockPlanes(
+                buffer,
+                AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                -1,       // fence
+                nullptr,  // rect
+                &planes);
+        if (ret != 0) {
+            AHardwareBuffer_release(buffer);
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        autoCloser.setLocked();
+        if (planes.planeCount != 3) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[0].pixelStride != 1) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[1].pixelStride == 1 && planes.planes[2].pixelStride == 1) {
+            return FLEX_LAYOUT_PLANAR;
+        }
+        if (planes.planes[1].pixelStride == 2 && planes.planes[2].pixelStride == 2) {
+            ssize_t uvDist =
+                static_cast<uint8_t *>(planes.planes[2].data) -
+                static_cast<uint8_t *>(planes.planes[1].data);
+            if (uvDist == 1) {
+                return FLEX_LAYOUT_SEMIPLANAR_UV;
+            } else if (uvDist == -1) {
+                return FLEX_LAYOUT_SEMIPLANAR_VU;
+            }
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        return FLEX_LAYOUT_UNKNOWN;
+    }();
+    return sLayout;
+}
+
 MediaImage2 CreateYUV420PlanarMediaImage2(
         uint32_t width, uint32_t height, uint32_t stride, uint32_t vstride) {
     return MediaImage2 {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index afadf00..af29e81 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -96,6 +96,11 @@
 bool IsNV12(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a NV21 layout.
+ */
+bool IsNV21(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a I420 layout.
  */
 bool IsI420(const C2GraphicView &view);
@@ -111,10 +116,26 @@
 bool IsNV12(const MediaImage2 *img);
 
 /**
+ * Returns true iff a MediaImage2 has a NV21 layout.
+ */
+bool IsNV21(const MediaImage2 *img);
+
+/**
  * Returns true iff a MediaImage2 has a I420 layout.
  */
 bool IsI420(const MediaImage2 *img);
 
+enum FlexLayout {
+    FLEX_LAYOUT_UNKNOWN,
+    FLEX_LAYOUT_PLANAR,
+    FLEX_LAYOUT_SEMIPLANAR_UV,
+    FLEX_LAYOUT_SEMIPLANAR_VU,
+};
+/**
+ * Returns layout of YCBCR_420_888 pixel format.
+ */
+FlexLayout GetYuv420FlexibleLayout();
+
 /**
  * A raw memory block to use for internal buffers.
  *
diff --git a/media/codec2/tests/vndk/C2BufferTest.cpp b/media/codec2/tests/vndk/C2BufferTest.cpp
index a9f8e17..0cfb465 100644
--- a/media/codec2/tests/vndk/C2BufferTest.cpp
+++ b/media/codec2/tests/vndk/C2BufferTest.cpp
@@ -16,11 +16,12 @@
 
 #include <gtest/gtest.h>
 
-#include <C2AllocatorIon.h>
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
+#include <C2Config.h>
 #include <C2ParamDef.h>
+#include <C2PlatformSupport.h>
 
 #include <system/graphics.h>
 
@@ -233,10 +234,10 @@
 public:
     C2BufferTest()
         : mBlockPoolId(C2BlockPool::PLATFORM_START),
-          mLinearAllocator(std::make_shared<C2AllocatorIon>('i')),
           mSize(0u),
           mAddr(nullptr),
           mGraphicAllocator(std::make_shared<C2AllocatorGralloc>('g')) {
+        getLinearAllocator(&mLinearAllocator);
     }
 
     ~C2BufferTest() = default;
@@ -329,6 +330,11 @@
     }
 
 private:
+    void getLinearAllocator(std::shared_ptr<C2Allocator>* mLinearAllocator) {
+        std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
+        ASSERT_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, mLinearAllocator), C2_OK);
+    }
+
     C2BlockPool::local_id_t mBlockPoolId;
     std::shared_ptr<C2Allocator> mLinearAllocator;
     std::shared_ptr<C2LinearAllocation> mLinearAllocation;
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 0401c1d..be81c84 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -36,9 +36,11 @@
         "C2Buffer.cpp",
         "C2Config.cpp",
         "C2DmaBufAllocator.cpp",
+        "C2Fence.cpp",
         "C2PlatformStorePluginLoader.cpp",
         "C2Store.cpp",
         "platform/C2BqBuffer.cpp",
+        "platform/C2SurfaceSyncObj.cpp",
         "types.cpp",
         "util/C2Debug.cpp",
         "util/C2InterfaceHelper.cpp",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 8e59df1..4ffa3f1 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -42,7 +42,9 @@
          * Usage mask that is passed through from gralloc to Codec 2.0 usage.
          */
         PASSTHROUGH_USAGE_MASK =
-            ~(GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK | GRALLOC_USAGE_PROTECTED)
+            ~static_cast<uint64_t>(GRALLOC_USAGE_SW_READ_MASK |
+                                   GRALLOC_USAGE_SW_WRITE_MASK |
+                                   GRALLOC_USAGE_PROTECTED)
     };
 
     // verify that passthrough mask is within the platform mask
@@ -546,7 +548,19 @@
             status_t err = GraphicBufferMapper::get().lockYCbCr(
                     const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
             if (err) {
-                ALOGE("failed transaction: lockYCbCr");
+                ALOGE("failed transaction: lockYCbCr (err=%d)", err);
+                return C2_CORRUPTED;
+            }
+            if (!ycbcrLayout.y || !ycbcrLayout.cb || !ycbcrLayout.cr
+                    || ycbcrLayout.ystride == 0
+                    || ycbcrLayout.cstride == 0
+                    || ycbcrLayout.chroma_step == 0) {
+                ALOGE("invalid layout: lockYCbCr (y=%s cb=%s cr=%s "
+                        "ystride=%zu cstride=%zu chroma_step=%zu)",
+                        ycbcrLayout.y ? "(non-null)" : "(null)",
+                        ycbcrLayout.cb ? "(non-null)" : "(null)",
+                        ycbcrLayout.cr ? "(non-null)" : "(null)",
+                        ycbcrLayout.ystride, ycbcrLayout.cstride, ycbcrLayout.chroma_step);
                 return C2_CORRUPTED;
             }
 
@@ -671,7 +685,10 @@
 
             status_t err = GraphicBufferMapper::get().lockYCbCr(
                     const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
-            if (err == OK) {
+            if (err == OK && ycbcrLayout.y && ycbcrLayout.cb && ycbcrLayout.cr
+                    && ycbcrLayout.ystride > 0
+                    && ycbcrLayout.cstride > 0
+                    && ycbcrLayout.chroma_step > 0) {
                 addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
                 addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
                 addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 85623b8..a8528df 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -30,10 +30,15 @@
 #include <C2ErrnoUtils.h>
 #include <C2HandleIonInternal.h>
 
+#include <android-base/properties.h>
+
 namespace android {
 
 namespace {
     constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* size_t <=> int(lo), int(hi) conversions */
@@ -376,14 +381,34 @@
         unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
     int bufferFd = -1;
     ion_user_handle_t buffer = -1;
-    size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - size) {
+        ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx", size, sPadding);
+        // use ImplV2 as there is no allocation anyways
+        return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+    }
+
+    size_t allocSize = size + sPadding;
+    if (align) {
+        if (align - 1 > SIZE_MAX - allocSize) {
+            ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx and alignment %#zx",
+                  size, sPadding, align);
+            // use ImplV2 as there is no allocation anyways
+            return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+        }
+        allocSize += align - 1;
+        allocSize &= ~(align - 1);
+    }
     int ret;
 
     if (ion_is_legacy(ionFd)) {
-        ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
+        ret = ion_alloc(ionFd, allocSize, align, heapMask, flags, &buffer);
         ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; buffer = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, buffer);
+              ionFd, allocSize, align, heapMask, flags, ret, buffer);
         if (ret == 0) {
             // get buffer fd for native handle constructor
             ret = ion_share(ionFd, buffer, &bufferFd);
@@ -392,15 +417,15 @@
                 buffer = -1;
             }
         }
-        return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
+        return new Impl(ionFd, allocSize, bufferFd, buffer, id, ret);
 
     } else {
-        ret = ion_alloc_fd(ionFd, alignedSize, align, heapMask, flags, &bufferFd);
+        ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
         ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; bufferFd = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, bufferFd);
+              ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
 
-        return new ImplV2(ionFd, alignedSize, bufferFd, id, ret);
+        return new ImplV2(ionFd, allocSize, bufferFd, id, ret);
     }
 }
 
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 750aa31..6d8552a 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -16,11 +16,13 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2DmaBufAllocator"
+
 #include <BufferAllocator/BufferAllocator.h>
 #include <C2Buffer.h>
 #include <C2Debug.h>
 #include <C2DmaBufAllocator.h>
 #include <C2ErrnoUtils.h>
+
 #include <linux/ion.h>
 #include <sys/mman.h>
 #include <unistd.h>  // getpagesize, size_t, close, dup
@@ -28,14 +30,15 @@
 
 #include <list>
 
-#ifdef __ANDROID_APEX__
 #include <android-base/properties.h>
-#endif
 
 namespace android {
 
 namespace {
-constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+    constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* =========================== BUFFER HANDLE =========================== */
@@ -250,8 +253,11 @@
     int ret = 0;
 
     bufferFd = alloc.Alloc(heap_name, size, flags);
-    if (bufferFd < 0) ret = bufferFd;
+    if (bufferFd < 0) {
+        ret = bufferFd;
+    }
 
+    // this may be a non-working handle if bufferFd is negative
     mHandle = C2HandleBuf(bufferFd, size);
     mId = id;
     mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
@@ -360,8 +366,22 @@
         return ret;
     }
 
+    // TODO: should we pad before mapping usage?
+
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - capacity) {
+        // size would overflow
+        ALOGD("dmabuf_alloc: size #%x cannot accommodate padding #%zx", capacity, sPadding);
+        return C2_NO_MEMORY;
+    }
+
+    size_t allocSize = (size_t)capacity + sPadding;
+    // TODO: should we align allocation size to mBlockSize to reflect the true allocation size?
     std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
-            mBufferAllocator, capacity, heap_name, flags, getId());
+            mBufferAllocator, allocSize, heap_name, flags, getId());
     ret = alloc->status();
     if (ret == C2_OK) {
         *allocation = alloc;
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
new file mode 100644
index 0000000..9c5183e
--- /dev/null
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2FenceFactory"
+#include <utils/Log.h>
+
+#include <C2FenceFactory.h>
+#include <C2SurfaceSyncObj.h>
+
+class C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
+
+    virtual bool valid() const = 0;
+
+    virtual bool ready() const = 0;
+
+    virtual int fd() const = 0;
+
+    virtual bool isHW() const = 0;
+
+    virtual ~Impl() = default;
+
+    Impl() = default;
+};
+
+c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
+    if (mImpl) {
+        return mImpl->wait(timeoutNs);
+    }
+    // null fence is always signalled.
+    return C2_OK;
+}
+
+bool C2Fence::valid() const {
+    if (mImpl) {
+        return mImpl->valid();
+    }
+    // null fence is always valid.
+    return true;
+}
+
+bool C2Fence::ready() const {
+    if (mImpl) {
+        return mImpl->ready();
+    }
+    // null fence is always signalled.
+    return true;
+}
+
+int C2Fence::fd() const {
+    if (mImpl) {
+        return mImpl->fd();
+    }
+    // null fence does not have fd.
+    return -1;
+}
+
+bool C2Fence::isHW() const {
+    if (mImpl) {
+        return mImpl->isHW();
+    }
+    return false;
+}
+
+/**
+ * Fence implementation for C2BufferQueueBlockPool based block allocation.
+ * The implementation supports all C2Fence interface except fd().
+ */
+class _C2FenceFactory::SurfaceFenceImpl: public C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+        if (mPtr) {
+            return mPtr->waitForChange(mWaitId, timeoutNs);
+        }
+        return C2_OK;
+    }
+
+    virtual bool valid() const {
+        return mPtr;
+    }
+
+    virtual bool ready() const {
+        uint32_t status;
+        if (mPtr) {
+            mPtr->lock();
+            status = mPtr->getWaitIdLocked();
+            mPtr->unlock();
+
+            return status != mWaitId;
+        }
+        return true;
+    }
+
+    virtual int fd() const {
+        // does not support fd, since this is shared mem and futex based
+        return -1;
+    }
+
+    virtual bool isHW() const {
+        return false;
+    }
+
+    virtual ~SurfaceFenceImpl() {};
+
+    SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
+            mSyncMem(syncMem),
+            mPtr(syncMem ? syncMem->mem() : nullptr),
+            mWaitId(syncMem ? waitId : 0) {}
+private:
+    const std::shared_ptr<const C2SurfaceSyncMemory> mSyncMem; // This is for life-cycle guarantee
+    C2SyncVariables *const mPtr;
+    const uint32_t mWaitId;
+};
+
+C2Fence::C2Fence(std::shared_ptr<Impl> impl) : mImpl(impl) {}
+
+C2Fence _C2FenceFactory::CreateSurfaceFence(
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+        uint32_t waitId) {
+    if (syncMem) {
+        C2Fence::Impl *p
+                = new _C2FenceFactory::SurfaceFenceImpl(syncMem, waitId);
+        if (p->valid()) {
+            return C2Fence(std::shared_ptr<C2Fence::Impl>(p));
+        } else {
+            delete p;
+        }
+    }
+    return C2Fence();
+}
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index 578cf76..1da3e14 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -37,7 +37,8 @@
  * Wrap the gralloc handle and metadata into Codec2 handle recognized by
  * C2AllocatorGralloc.
  *
- * @return a new NON-OWNING C2Handle that must be deleted using native_handle_delete.
+ * @return a new NON-OWNING C2Handle that must be closed and deleted using native_handle_close and
+ * native_handle_delete.
  */
 C2Handle *WrapNativeCodec2GrallocHandle(
         const native_handle_t *const handle,
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index e1a8138..b2636e9 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -20,9 +20,14 @@
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 
 #include <C2Buffer.h>
+#include <C2BlockInternal.h>
 
 #include <functional>
 
+namespace android {
+class GraphicBuffer;
+}  // namespace android
+
 class C2BufferQueueBlockPool : public C2BlockPool {
 public:
     C2BufferQueueBlockPool(const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId);
@@ -44,6 +49,14 @@
             C2MemoryUsage usage,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */) override;
 
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2MemoryUsage usage,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) override;
+
     typedef std::function<void(uint64_t producer, int32_t slot, int64_t nsecs)> OnRenderCallback;
 
     /**
@@ -67,6 +80,27 @@
      */
     virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
+    /**
+     * Configures an IGBP in order to create blocks. A newly created block is
+     * dequeued from the configured IGBP. Unique Id of IGBP and the slot number of
+     * blocks are passed via native_handle. Managing IGBP is responsibility of caller.
+     * When IGBP is not configured, block will be created via allocator.
+     * Since zero is not used for Unique Id of IGBP, if IGBP is not configured or producer
+     * is configured as nullptr, unique id which is bundled in native_handle is zero.
+     *
+     * \param producer      the IGBP, which will be used to fetch blocks
+     * \param syncMemory    Shared memory for synchronization of allocation & deallocation.
+     * \param bqId          Id of IGBP
+     * \param generationId  Generation Id for rendering output
+     * \param consumerUsage consumerUsage flagof the IGBP
+     */
+    virtual void configureProducer(
+            const android::sp<HGraphicBufferProducer> &producer,
+            native_handle_t *syncMemory,
+            uint64_t bqId,
+            uint32_t generationId,
+            uint64_t consumerUsage);
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
@@ -77,4 +111,72 @@
     friend struct C2BufferQueueBlockPoolData;
 };
 
+class C2SurfaceSyncMemory;
+
+struct C2BufferQueueBlockPoolData : public _C2BlockPoolData {
+public:
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer;
+
+    // Create a remote BlockPoolData.
+    C2BufferQueueBlockPoolData(
+            uint32_t generation, uint64_t bqId, int32_t bqSlot,
+            const std::shared_ptr<int> &owner,
+            const android::sp<HGraphicBufferProducer>& producer);
+
+    // Create a local BlockPoolData.
+    C2BufferQueueBlockPoolData(
+            uint32_t generation, uint64_t bqId, int32_t bqSlot,
+            const android::sp<HGraphicBufferProducer>& producer,
+            std::shared_ptr<C2SurfaceSyncMemory>, int noUse);
+
+    virtual ~C2BufferQueueBlockPoolData() override;
+
+    virtual type_t getType() const override;
+
+    int migrate(const android::sp<HGraphicBufferProducer>& producer,
+                uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
+                android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
+                std::shared_ptr<C2SurfaceSyncMemory> syncMem);
+
+private:
+    friend struct _C2BlockFactory;
+
+    // Methods delegated from _C2BlockFactory.
+    void getBufferQueueData(uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) const;
+    bool holdBlockFromBufferQueue(const std::shared_ptr<int>& owner,
+                                  const android::sp<HGraphicBufferProducer>& igbp,
+                                  std::shared_ptr<C2SurfaceSyncMemory> syncMem);
+    bool beginTransferBlockToClient();
+    bool endTransferBlockToClient(bool transfer);
+    bool beginAttachBlockToBufferQueue();
+    bool endAttachBlockToBufferQueue(const std::shared_ptr<int>& owner,
+                                     const android::sp<HGraphicBufferProducer>& igbp,
+                                     std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+                                     uint32_t generation, uint64_t bqId, int32_t bqSlot);
+    bool displayBlockToBufferQueue();
+
+    const bool mLocal;
+    bool mHeld;
+
+    // Data of the corresponding buffer.
+    uint32_t mGeneration;
+    uint64_t mBqId;
+    int32_t mBqSlot;
+
+    // Data of the current IGBP, updated at migrate(). If the values are
+    // mismatched, then the corresponding buffer will not be cancelled back to
+    // IGBP at the destructor.
+    uint32_t mCurrentGeneration;
+    uint64_t mCurrentBqId;
+
+    bool mTransfer; // local transfer to remote
+    bool mAttach; // attach on remote
+    bool mDisplay; // display on remote;
+    std::weak_ptr<int> mOwner;
+    android::sp<HGraphicBufferProducer> mIgbp;
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+    mutable std::mutex mLock;
+};
+
 #endif // STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
new file mode 100644
index 0000000..d4bed26
--- /dev/null
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
+#define STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
+
+
+#include <C2Buffer.h>
+
+class C2SurfaceSyncMemory;
+
+/**
+ * C2Fence implementation factory
+ */
+struct _C2FenceFactory {
+
+    class SurfaceFenceImpl;
+
+    /*
+     * Create C2Fence for BufferQueueBased blockpool.
+     *
+     * \param syncMem           Shared memory object for synchronization between processes.
+     * \param waitId            wait id for tracking status change for C2Fence.
+     */
+    static C2Fence CreateSurfaceFence(
+            std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+            uint32_t waitId);
+};
+
+
+#endif // STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
new file mode 100644
index 0000000..16e9a9d
--- /dev/null
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
+#define STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
+
+#include <cutils/native_handle.h>
+#include <memory>
+#include <atomic>
+
+#include <C2Buffer.h>
+
+/**
+ * Futex based lock / wait implementation for sharing output buffer allocation
+ * information between Framework and HAL.
+ */
+struct C2SyncVariables {
+    enum SyncStatus : uint32_t {
+           STATUS_INIT = 0,         // When surface configuration starts.
+           STATUS_ACTIVE = 1,       // When surface configuration finishs.
+                                    // STATUS_INIT -> STATUS_ACTIVE
+           STATUS_SWITCHING = 2,    // When the surface is replaced by a new surface
+                                    // during surface configuration.
+                                    // STATUS_ACTIVE -> STATUS_SWITCHING
+    };
+
+    /**
+     * Lock the memory region
+     */
+    int lock();
+
+    /**
+     * Unlock the memory region
+     */
+    int unlock();
+
+    /**
+     * Set initial dequeued buffer count.
+     *
+     * \param maxDequeueCount           Initial value of # of max dequeued buffer count
+     * \param curDequeueCount           Initial value of # of current dequeued buffer count
+     */
+    void setInitialDequeueCount(int32_t maxDequeueCount, int32_t curDequeueCount);
+
+    /**
+     * Get a waitId which will be used to implement fence.
+     */
+    uint32_t getWaitIdLocked();
+
+    /**
+     * Return whether the upcoming dequeue operation is not blocked.
+     * if it's blocked and waitId is non-null, waitId is returned to be used for waiting.
+     *
+     * \retval false    dequeue operation is blocked now.
+     * \retval true     dequeue operation is possible.
+     */
+    bool isDequeueableLocked(uint32_t *waitId = nullptr);
+
+    /**
+     * Notify a buffer is queued. Return whether the upcoming dequeue operation
+     * is not blocked. if it's blocked and waitId is non-null, waitId is returned
+     * to be used for waiting.
+     *
+     * \retval false    dequeue operation is blocked now.
+     * \retval true     dequeue operation is possible.
+     */
+    bool notifyQueuedLocked(uint32_t *waitId = nullptr);
+
+    /**
+     * Notify a buffer is dequeued.
+     */
+    void notifyDequeuedLocked();
+
+    /**
+     * Set sync status.
+     */
+    void setSyncStatusLocked(SyncStatus status);
+
+    /**
+     * Get sync status.
+     */
+    C2SyncVariables::SyncStatus getSyncStatusLocked();
+
+    /**
+     * Update current max dequeue count.
+     */
+    void updateMaxDequeueCountLocked(int32_t maxDequeueCount);
+
+    /**
+     * Wait until status is no longer equal to waitId, or until timeout.
+     *
+     * \param waitId            internal status for waiting until it is changed.
+     * \param timeousNs         nano seconds to timeout.
+     *
+     * \retval C2_TIMEDOUT      change does not happen during waiting.
+     * \retval C2_BAD_VALUE     invalid event waiting.
+     * \retval C2_OK            change was signalled.
+     */
+    c2_status_t waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs);
+
+    C2SyncVariables() {}
+
+private:
+    /**
+     * signal one waiter to wake up.
+     */
+    int signal();
+
+    /**
+     * signal all waiter to wake up.
+     */
+    int broadcast();
+
+    /**
+     * wait for signal or broadcast.
+     */
+    int wait();
+
+    std::atomic<uint32_t> mLock;
+    std::atomic<uint32_t> mCond;
+    int32_t mMaxDequeueCount;
+    int32_t mCurDequeueCount;
+    SyncStatus mStatus;
+};
+
+/**
+ * Shared memory in order to synchronize information for Surface(IGBP)
+ * based output buffer allocation.
+ */
+class C2SurfaceSyncMemory {
+public:
+    /**
+     * Shared memory handle in order to synchronize information for
+     * Surface based output buffer allocation.
+     */
+    struct HandleSyncMem : public native_handle_t {
+        HandleSyncMem(int fd, size_t size) :
+            native_handle_t(cHeader),
+            mFds{fd},
+            mInts{int(size & 0xFFFFFFFF),
+                int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+        /** Returns a file descriptor of the shared memory
+         * \return a file descriptor representing the shared memory
+         */
+        int memFd() const {return mFds.mMem;}
+
+        /** Returns the size of the shared memory */
+        size_t size() const {
+            return size_t(unsigned(mInts.mSizeLo))
+                    | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+        }
+
+        /** Check whether the native handle is in the form of HandleSyncMem
+         *
+         * \return whether the native handle is compatible
+         */
+        static bool isValid(const native_handle_t * const o);
+
+    protected:
+        struct {
+            int mMem;
+        } mFds;
+        struct {
+            int mSizeLo;
+            int mSizeHi;
+            int mMagic;
+        } mInts;
+    private:
+        enum {
+            kMagic = 'ssm\x00',
+            numFds = sizeof(mFds) / sizeof(int),
+            numInts = sizeof(mInts) / sizeof(int),
+            version = sizeof(native_handle_t)
+        };
+        const static native_handle_t cHeader;
+    };
+
+    /**
+     * Imports a shared memory object from a native handle(The shared memory is already existing).
+     * This is usually used after native_handle_t is passed via RPC.
+     *
+     * \param handle        handle representing shared memory for output buffer allocation.
+     */
+    static std::shared_ptr<C2SurfaceSyncMemory> Import(native_handle_t *handle);
+
+    /**
+     * Creats a shared memory object for synchronization of output buffer allocation.
+     * Shared memory creation should be done explicitly.
+     *
+     * \param fd            file descriptor to shared memory
+     * \param size          size of the shared memory
+     */
+    static std::shared_ptr<C2SurfaceSyncMemory> Create(int fd, size_t size);
+
+    /**
+     * Returns a handle representing the shread memory for synchronization of
+     * output buffer allocation.
+     */
+    native_handle_t *handle();
+
+    /**
+     * Returns synchronization object which will provide synchronization primitives.
+     *
+     * \return a ptr to synchronization primitive class
+     */
+    C2SyncVariables *mem();
+
+    ~C2SurfaceSyncMemory();
+
+private:
+    bool mInit;
+    HandleSyncMem *mHandle;
+    C2SyncVariables *mMem;
+
+    C2SurfaceSyncMemory();
+};
+
+#endif // STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index 4ae946a..c510fca 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -52,6 +52,8 @@
 
 struct C2BufferQueueBlockPoolData;
 
+class C2SurfaceSyncMemory;
+
 /**
  * Internal only interface for creating blocks by block pool/buffer passing implementations.
  *
@@ -279,6 +281,8 @@
      *                 anymore.
      * \param igbp     \c IGraphicBufferProducer instance to be assigned to the
      *                 block. This is not needed when the block is local.
+     * \param syncMem  Memory block which will support synchronization
+     *                 between Framework and HAL.
      *
      * \return The previous held status.
      */
@@ -287,7 +291,8 @@
             const std::shared_ptr<_C2BlockPoolData>& poolData,
             const std::shared_ptr<int>& owner,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
-                                V2_0::IGraphicBufferProducer>& igbp = nullptr);
+                                V2_0::IGraphicBufferProducer>& igbp = nullptr,
+            std::shared_ptr<C2SurfaceSyncMemory> syncMem = nullptr);
 
     /**
      * Prepare a block to be transferred to other process. This blocks
@@ -358,6 +363,7 @@
             const std::shared_ptr<int>& owner,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
                                 V2_0::IGraphicBufferProducer>& igbp,
+            std::shared_ptr<C2SurfaceSyncMemory>,
             uint32_t generation,
             uint64_t bqId,
             int32_t bqSlot);
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index fff12c4..2944925 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -29,6 +29,8 @@
 #include <C2AllocatorGralloc.h>
 #include <C2BqBufferPriv.h>
 #include <C2BlockInternal.h>
+#include <C2FenceFactory.h>
+#include <C2SurfaceSyncObj.h>
 
 #include <list>
 #include <map>
@@ -54,59 +56,13 @@
 using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::V2_0
         ::IGraphicBufferProducer;
 
-struct C2BufferQueueBlockPoolData : public _C2BlockPoolData {
-
-    bool held;
-    bool local;
-    uint32_t generation;
-    uint64_t bqId;
-    int32_t bqSlot;
-    bool transfer; // local transfer to remote
-    bool attach; // attach on remote
-    bool display; // display on remote;
-    std::weak_ptr<int> owner;
-    sp<HGraphicBufferProducer> igbp;
-    std::shared_ptr<C2BufferQueueBlockPool::Impl> localPool;
-    mutable std::mutex lock;
-
-    virtual type_t getType() const override {
-        return TYPE_BUFFERQUEUE;
-    }
-
-    // Create a remote BlockPoolData.
-    C2BufferQueueBlockPoolData(
-            uint32_t generation, uint64_t bqId, int32_t bqSlot,
-            const std::shared_ptr<int> &owner,
-            const sp<HGraphicBufferProducer>& producer);
-
-    // Create a local BlockPoolData.
-    C2BufferQueueBlockPoolData(
-            uint32_t generation, uint64_t bqId, int32_t bqSlot,
-            const std::shared_ptr<C2BufferQueueBlockPool::Impl>& pool);
-
-    virtual ~C2BufferQueueBlockPoolData() override;
-
-    int migrate(const sp<HGraphicBufferProducer>& producer,
-                uint32_t toGeneration, uint64_t toBqId,
-                sp<GraphicBuffer> *buffers, uint32_t oldGeneration);
-};
-
 bool _C2BlockFactory::GetBufferQueueData(
         const std::shared_ptr<const _C2BlockPoolData>& data,
         uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) {
     if (data && data->getType() == _C2BlockPoolData::TYPE_BUFFERQUEUE) {
-        if (generation) {
-            const std::shared_ptr<const C2BufferQueueBlockPoolData> poolData =
-                    std::static_pointer_cast<const C2BufferQueueBlockPoolData>(data);
-            std::scoped_lock<std::mutex> lock(poolData->lock);
-            *generation = poolData->generation;
-            if (bqId) {
-                *bqId = poolData->bqId;
-            }
-            if (bqSlot) {
-                *bqSlot = poolData->bqSlot;
-            }
-        }
+        const std::shared_ptr<const C2BufferQueueBlockPoolData> poolData =
+                std::static_pointer_cast<const C2BufferQueueBlockPoolData>(data);
+        poolData->getBufferQueueData(generation, bqId, bqSlot);
         return true;
     }
     return false;
@@ -115,29 +71,18 @@
 bool _C2BlockFactory::HoldBlockFromBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data,
         const std::shared_ptr<int>& owner,
-        const sp<HGraphicBufferProducer>& igbp) {
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (!poolData->local) {
-        poolData->owner = owner;
-        poolData->igbp = igbp;
-    }
-    if (poolData->held) {
-        poolData->held = true;
-        return false;
-    }
-    poolData->held = true;
-    return true;
+    return poolData->holdBlockFromBufferQueue(owner, igbp, syncMem);
 }
 
 bool _C2BlockFactory::BeginTransferBlockToClient(
         const std::shared_ptr<_C2BlockPoolData>& data) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    poolData->transfer = true;
-    return true;
+    return poolData->beginTransferBlockToClient();
 }
 
 bool _C2BlockFactory::EndTransferBlockToClient(
@@ -145,28 +90,14 @@
         bool transfer) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    poolData->transfer = false;
-    if (transfer) {
-        poolData->held = false;
-    }
-    return true;
+    return poolData->endTransferBlockToClient(transfer);
 }
 
 bool _C2BlockFactory::BeginAttachBlockToBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (poolData->local || poolData->display ||
-        poolData->attach || !poolData->held) {
-        return false;
-    }
-    if (poolData->bqId == 0) {
-        return false;
-    }
-    poolData->attach = true;
-    return true;
+    return poolData->beginAttachBlockToBufferQueue();
 }
 
 // if display was tried during attach, buffer should be retired ASAP.
@@ -174,47 +105,20 @@
         const std::shared_ptr<_C2BlockPoolData>& data,
         const std::shared_ptr<int>& owner,
         const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
         uint32_t generation,
         uint64_t bqId,
         int32_t bqSlot) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (poolData->local || !poolData->attach ) {
-        return false;
-    }
-    if (poolData->display) {
-        poolData->attach = false;
-        poolData->held = false;
-        return false;
-    }
-    poolData->attach = false;
-    poolData->held = true;
-    poolData->owner = owner;
-    poolData->igbp = igbp;
-    poolData->generation = generation;
-    poolData->bqId = bqId;
-    poolData->bqSlot = bqSlot;
-    return true;
+    return poolData->endAttachBlockToBufferQueue(owner, igbp, syncMem, generation, bqId, bqSlot);
 }
 
 bool _C2BlockFactory::DisplayBlockToBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (poolData->local || poolData->display || !poolData->held) {
-        return false;
-    }
-    if (poolData->bqId == 0) {
-        return false;
-    }
-    poolData->display = true;
-    if (poolData->attach) {
-        return false;
-    }
-    poolData->held = false;
-    return true;
+    return poolData->displayBlockToBufferQueue();
 }
 
 std::shared_ptr<C2GraphicBlock> _C2BlockFactory::CreateGraphicBlock(
@@ -267,8 +171,8 @@
     return stamp;
 }
 
-bool getGenerationNumber(const sp<HGraphicBufferProducer> &producer,
-                         uint32_t *generation) {
+bool getGenerationNumberAndUsage(const sp<HGraphicBufferProducer> &producer,
+                                 uint32_t *generation, uint64_t *usage) {
     status_t status{};
     int slot{};
     bool bufferNeedsReallocation{};
@@ -302,7 +206,7 @@
     // instead of a new allocation.
     transResult = producer->requestBuffer(
             slot,
-            [&status, &slotBuffer, &generation](
+            [&status, &slotBuffer, &generation, &usage](
                     HStatus hStatus,
                     HBuffer const& hBuffer,
                     uint32_t generationNumber){
@@ -310,6 +214,7 @@
                         h2b(hBuffer, &slotBuffer) &&
                         slotBuffer) {
                     *generation = generationNumber;
+                    *usage = slotBuffer->getUsage();
                     slotBuffer->setGenerationNumber(generationNumber);
                 } else {
                     status = android::BAD_VALUE;
@@ -330,12 +235,58 @@
 class C2BufferQueueBlockPool::Impl
         : public std::enable_shared_from_this<C2BufferQueueBlockPool::Impl> {
 private:
+    c2_status_t dequeueBuffer(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2AndroidMemoryUsage androidUsage,
+            int *slot, bool *needsRealloc, sp<Fence> *fence) {
+        status_t status{};
+        using Input = HGraphicBufferProducer::DequeueBufferInput;
+        using Output = HGraphicBufferProducer::DequeueBufferOutput;
+        Return<void> transResult = mProducer->dequeueBuffer(
+                Input{
+                    width,
+                    height,
+                    format,
+                    androidUsage.asGrallocUsage()},
+                [&status, slot, needsRealloc,
+                 fence](HStatus hStatus,
+                         int32_t hSlot,
+                         Output const& hOutput) {
+                    *slot = static_cast<int>(hSlot);
+                    if (!h2b(hStatus, &status) ||
+                            !h2b(hOutput.fence, fence)) {
+                        status = ::android::BAD_VALUE;
+                    } else {
+                        *needsRealloc =
+                                hOutput.bufferNeedsReallocation;
+                    }
+                });
+        if (!transResult.isOk() || status != android::OK) {
+            if (transResult.isOk()) {
+                ++mDqFailure;
+                if (status == android::INVALID_OPERATION ||
+                    status == android::TIMED_OUT ||
+                    status == android::WOULD_BLOCK) {
+                    // Dequeue buffer is blocked temporarily. Retrying is
+                    // required.
+                    return C2_BLOCKING;
+                }
+            }
+            ALOGD("cannot dequeue buffer %d", status);
+            return C2_BAD_VALUE;
+        }
+        return C2_OK;
+    }
+
     c2_status_t fetchFromIgbp_l(
             uint32_t width,
             uint32_t height,
             uint32_t format,
             C2MemoryUsage usage,
-            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *c2Fence) {
         // We have an IGBP now.
         C2AndroidMemoryUsage androidUsage = usage;
         status_t status{};
@@ -344,41 +295,39 @@
         sp<Fence> fence = new Fence();
         ALOGV("tries to dequeue buffer");
 
+        C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem(): nullptr;
         { // Call dequeueBuffer().
-            using Input = HGraphicBufferProducer::DequeueBufferInput;
-            using Output = HGraphicBufferProducer::DequeueBufferOutput;
-            Return<void> transResult = mProducer->dequeueBuffer(
-                    Input{
-                        width,
-                        height,
-                        format,
-                        androidUsage.asGrallocUsage()},
-                    [&status, &slot, &bufferNeedsReallocation,
-                     &fence](HStatus hStatus,
-                             int32_t hSlot,
-                             Output const& hOutput) {
-                        slot = static_cast<int>(hSlot);
-                        if (!h2b(hStatus, &status) ||
-                                !h2b(hOutput.fence, &fence)) {
-                            status = ::android::BAD_VALUE;
-                        } else {
-                            bufferNeedsReallocation =
-                                    hOutput.bufferNeedsReallocation;
-                        }
-                    });
-            if (!transResult.isOk() || status != android::OK) {
-                if (transResult.isOk()) {
-                    ++mDqFailure;
-                    if (status == android::INVALID_OPERATION ||
-                        status == android::TIMED_OUT ||
-                        status == android::WOULD_BLOCK) {
-                        // Dequeue buffer is blocked temporarily. Retrying is
-                        // required.
-                        return C2_BLOCKING;
+            c2_status_t c2Status;
+            if (syncVar) {
+                uint32_t waitId;
+                syncVar->lock();
+                if (!syncVar->isDequeueableLocked(&waitId)) {
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
                     }
+                    return C2_BLOCKING;
                 }
-                ALOGD("cannot dequeue buffer %d", status);
-                return C2_BAD_VALUE;
+                if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_ACTIVE) {
+                    waitId = syncVar->getWaitIdLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
+                    }
+                    return C2_BLOCKING;
+                }
+                c2Status = dequeueBuffer(width, height, format, androidUsage,
+                              &slot, &bufferNeedsReallocation, &fence);
+                if (c2Status == C2_OK) {
+                    syncVar->notifyDequeuedLocked();
+                }
+                syncVar->unlock();
+            } else {
+                c2Status = dequeueBuffer(width, height, format, usage,
+                              &slot, &bufferNeedsReallocation, &fence);
+            }
+            if (c2Status != C2_OK) {
+                return c2Status;
             }
             mDqFailure = 0;
             mLastDqTs = getTimestampNow();
@@ -389,18 +338,41 @@
             return C2_BAD_VALUE;
         }
         ALOGV("dequeued a buffer successfully");
+        bool dequeueable = false;
+        uint32_t waitId;
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    dequeueable = syncVar->notifyQueuedLocked(&waitId);
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = dequeueable ? C2Fence() :
+                                _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BLOCKING;
             }
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = C2Fence();
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BAD_VALUE;
             } else if (mRenderCallback) {
                 nsecs_t signalTime = fence->getSignalTime();
@@ -440,7 +412,17 @@
                 return C2_BAD_VALUE;
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = C2Fence();
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BAD_VALUE;
             }
             if (mGeneration == 0) {
@@ -463,20 +445,36 @@
                 std::shared_ptr<C2GraphicAllocation> alloc;
                 c2_status_t err = mAllocator->priorGraphicAllocation(c2Handle, &alloc);
                 if (err != C2_OK) {
+                    native_handle_close(c2Handle);
+                    native_handle_delete(c2Handle);
                     return err;
                 }
                 std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                         std::make_shared<C2BufferQueueBlockPoolData>(
                                 slotBuffer->getGenerationNumber(),
                                 mProducerId, slot,
-                                shared_from_this());
+                                mProducer, mSyncMem, 0);
                 mPoolDatas[slot] = poolData;
                 *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
                 return C2_OK;
             }
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
-            (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+            if (syncVar) {
+                syncVar->lock();
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                syncVar->notifyQueuedLocked();
+                syncVar->unlock();
+                if (c2Fence) {
+                    *c2Fence = C2Fence();
+                }
+            } else {
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+            }
+            return C2_BAD_VALUE;
+        }
+        if (c2Fence) {
+            *c2Fence = C2Fence();
         }
         return C2_BAD_VALUE;
     }
@@ -506,7 +504,8 @@
             uint32_t height,
             uint32_t format,
             C2MemoryUsage usage,
-            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence) {
         block->reset();
         if (mInit != C2_OK) {
             return mInit;
@@ -537,17 +536,19 @@
             }
             std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                     std::make_shared<C2BufferQueueBlockPoolData>(
-                            0, (uint64_t)0, ~0, shared_from_this());
+                            0, (uint64_t)0, ~0, nullptr, nullptr, 0);
             *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
             ALOGV("allocated a buffer successfully");
 
             return C2_OK;
         }
-        c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block);
+        c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block, fence);
         if (status == C2_BLOCKING) {
             lock.unlock();
-            // in order not to drain cpu from component's spinning
-            ::usleep(kMaxIgbpRetryDelayUs);
+            if (!fence) {
+                // in order not to drain cpu from component's spinning
+                ::usleep(kMaxIgbpRetryDelayUs);
+            }
         }
         return status;
     }
@@ -557,10 +558,12 @@
         mRenderCallback = renderCallback;
     }
 
+    /* This is for Old HAL request for compatibility */
     void configureProducer(const sp<HGraphicBufferProducer> &producer) {
         uint64_t producerId = 0;
         uint32_t generation = 0;
-        bool haveGeneration = false;
+        uint64_t usage = 0;
+        bool bqInformation = false;
         if (producer) {
             Return<uint64_t> transResult = producer->getUniqueId();
             if (!transResult.isOk()) {
@@ -568,14 +571,32 @@
                 return;
             }
             producerId = static_cast<uint64_t>(transResult);
-            // TODO: provide gneration number from parameter.
-            haveGeneration = getGenerationNumber(producer, &generation);
-            if (!haveGeneration) {
+            bqInformation = getGenerationNumberAndUsage(producer, &generation, &usage);
+            if (!bqInformation) {
                 ALOGW("get generationNumber failed %llu",
                       (unsigned long long)producerId);
             }
         }
+        configureProducer(producer, nullptr, producerId, generation, usage, bqInformation);
+    }
+
+    void configureProducer(const sp<HGraphicBufferProducer> &producer,
+                           native_handle_t *syncHandle,
+                           uint64_t producerId,
+                           uint32_t generation,
+                           uint64_t usage,
+                           bool bqInformation) {
+        std::shared_ptr<C2SurfaceSyncMemory> c2SyncMem;
+        if (syncHandle) {
+            if (!producer) {
+                native_handle_close(syncHandle);
+                native_handle_delete(syncHandle);
+            } else {
+                c2SyncMem = C2SurfaceSyncMemory::Import(syncHandle);
+            }
+        }
         int migrated = 0;
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem;
         // poolDatas dtor should not be called during lock is held.
         std::shared_ptr<C2BufferQueueBlockPoolData>
                 poolDatas[NUM_BUFFER_SLOTS];
@@ -595,22 +616,30 @@
             if (producer) {
                 mProducer = producer;
                 mProducerId = producerId;
-                mGeneration = haveGeneration ? generation : 0;
+                mGeneration = bqInformation ? generation : 0;
             } else {
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
                 ALOGW("invalid producer producer(%d), generation(%d)",
-                      (bool)producer, haveGeneration);
+                      (bool)producer, bqInformation);
             }
-            if (mProducer && haveGeneration) { // migrate buffers
+            oldMem = mSyncMem; // preven destruction while locked.
+            mSyncMem = c2SyncMem;
+            C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncVar) {
+                syncVar->lock();
+                syncVar->setSyncStatusLocked(C2SyncVariables::STATUS_ACTIVE);
+                syncVar->unlock();
+            }
+            if (mProducer && bqInformation) { // migrate buffers
                 for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
                     std::shared_ptr<C2BufferQueueBlockPoolData> data =
                             mPoolDatas[i].lock();
                     if (data) {
                         int slot = data->migrate(
-                                mProducer, generation,
-                                producerId, mBuffers, oldGeneration);
+                                mProducer, generation, usage,
+                                producerId, mBuffers[i], oldGeneration, mSyncMem);
                         if (slot >= 0) {
                             buffers[slot] = mBuffers[i];
                             poolDatas[slot] = data;
@@ -624,7 +653,7 @@
                 mPoolDatas[i] = poolDatas[i];
             }
         }
-        if (producer && haveGeneration) {
+        if (producer && bqInformation) {
             ALOGD("local generation change %u , "
                   "bqId: %llu migrated buffers # %d",
                   generation, (unsigned long long)producerId, migrated);
@@ -634,17 +663,6 @@
 private:
     friend struct C2BufferQueueBlockPoolData;
 
-    void cancel(uint32_t generation, uint64_t igbp_id, int32_t igbp_slot) {
-        bool cancelled = false;
-        {
-        std::scoped_lock<std::mutex> lock(mMutex);
-        if (generation == mGeneration && igbp_id == mProducerId && mProducer) {
-            (void)mProducer->cancelBuffer(igbp_slot, hidl_handle{}).isOk();
-            cancelled = true;
-        }
-        }
-    }
-
     c2_status_t mInit;
     uint64_t mProducerId;
     uint32_t mGeneration;
@@ -662,71 +680,123 @@
 
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
+
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
 };
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
         uint32_t generation, uint64_t bqId, int32_t bqSlot,
         const std::shared_ptr<int>& owner,
         const sp<HGraphicBufferProducer>& producer) :
-        held(producer && bqId != 0), local(false),
-        generation(generation), bqId(bqId), bqSlot(bqSlot),
-        transfer(false), attach(false), display(false),
-        owner(owner), igbp(producer),
-        localPool() {
+        mLocal(false), mHeld(producer && bqId != 0),
+        mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
+        mCurrentGeneration(generation), mCurrentBqId(bqId),
+        mTransfer(false), mAttach(false), mDisplay(false),
+        mOwner(owner), mIgbp(producer) {
 }
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
         uint32_t generation, uint64_t bqId, int32_t bqSlot,
-        const std::shared_ptr<C2BufferQueueBlockPool::Impl>& pool) :
-        held(true), local(true),
-        generation(generation), bqId(bqId), bqSlot(bqSlot),
-        transfer(false), attach(false), display(false),
-        igbp(pool ? pool->mProducer : nullptr),
-        localPool(pool) {
+        const android::sp<HGraphicBufferProducer>& producer,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem, int noUse) :
+        mLocal(true), mHeld(true),
+        mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
+        mCurrentGeneration(generation), mCurrentBqId(bqId),
+        mTransfer(false), mAttach(false), mDisplay(false),
+        mIgbp(producer), mSyncMem(syncMem) {
+            (void)noUse;
 }
 
 C2BufferQueueBlockPoolData::~C2BufferQueueBlockPoolData() {
-    if (!held || bqId == 0) {
+    if (!mHeld || mBqId == 0 || !mIgbp) {
         return;
     }
-    if (local) {
-        if (localPool) {
-            localPool->cancel(generation, bqId, bqSlot);
+
+    if (mLocal) {
+        if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId) {
+            C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncVar) {
+                syncVar->lock();
+                if (syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE) {
+                    mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+                    syncVar->notifyQueuedLocked();
+                }
+                syncVar->unlock();
+            } else {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+            }
         }
-    } else if (igbp && !owner.expired()) {
-        igbp->cancelBuffer(bqSlot, hidl_handle{}).isOk();
+    } else if (!mOwner.expired()) {
+        C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING) {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        } else {
+            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+        }
     }
 }
+
+C2BufferQueueBlockPoolData::type_t C2BufferQueueBlockPoolData::getType() const {
+    return TYPE_BUFFERQUEUE;
+}
+
 int C2BufferQueueBlockPoolData::migrate(
         const sp<HGraphicBufferProducer>& producer,
-        uint32_t toGeneration, uint64_t toBqId,
-        sp<GraphicBuffer> *buffers, uint32_t oldGeneration) {
-    std::scoped_lock<std::mutex> l(lock);
-    if (!held || bqId == 0) {
+        uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
+        sp<GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
+    std::scoped_lock<std::mutex> l(mLock);
+
+    mCurrentBqId = toBqId;
+    mCurrentGeneration = toGeneration;
+
+    if (!mHeld || mBqId == 0) {
         ALOGV("buffer is not owned");
         return -1;
     }
-    if (!local || !localPool) {
+    if (!mLocal) {
         ALOGV("pool is not local");
         return -1;
     }
-    if (bqSlot < 0 || bqSlot >= NUM_BUFFER_SLOTS || !buffers[bqSlot]) {
+    if (mBqSlot < 0 || mBqSlot >= NUM_BUFFER_SLOTS) {
         ALOGV("slot is not in effect");
         return -1;
     }
-    if (toGeneration == generation && bqId == toBqId) {
+    if (!graphicBuffer) {
+        ALOGV("buffer is null");
+        return -1;
+    }
+    if (toGeneration == mGeneration && mBqId == toBqId) {
         ALOGV("cannot migrate to same bufferqueue");
         return -1;
     }
-    if (oldGeneration != generation) {
+    if (oldGeneration != mGeneration) {
         ALOGV("cannot migrate stale buffer");
+        return -1;
     }
-    if (transfer) {
+    if (mTransfer) {
         // either transferred or detached.
         ALOGV("buffer is in transfer");
         return -1;
     }
-    sp<GraphicBuffer> const& graphicBuffer = buffers[bqSlot];
+
+    if (toUsage != graphicBuffer->getUsage()) {
+        sp<GraphicBuffer> newBuffer = new GraphicBuffer(
+            graphicBuffer->handle, GraphicBuffer::CLONE_HANDLE,
+            graphicBuffer->width, graphicBuffer->height, graphicBuffer->format,
+            graphicBuffer->layerCount, toUsage, graphicBuffer->stride);
+        if (newBuffer->initCheck() == android::NO_ERROR) {
+            graphicBuffer = std::move(newBuffer);
+        } else {
+            ALOGW("%s() failed to update usage, original usage=%" PRIx64 ", toUsage=%" PRIx64,
+                  __func__, graphicBuffer->getUsage(), toUsage);
+        }
+    }
     graphicBuffer->setGenerationNumber(toGeneration);
 
     HBuffer hBuffer{};
@@ -755,13 +825,124 @@
         return -1;
     }
     ALOGV("local migration from gen %u : %u slot %d : %d",
-          generation, toGeneration, bqSlot, slot);
-    generation = toGeneration;
-    bqId = toBqId;
-    bqSlot = slot;
+          mGeneration, toGeneration, mBqSlot, slot);
+    mIgbp = producer;
+    mGeneration = toGeneration;
+    mBqId = toBqId;
+    mBqSlot = slot;
+    mSyncMem = syncMem;
+
+    C2SyncVariables *syncVar = syncMem ? syncMem->mem() : nullptr;
+    if (syncVar) {
+        syncVar->lock();
+        syncVar->notifyDequeuedLocked();
+        syncVar->unlock();
+    }
     return slot;
 }
 
+void C2BufferQueueBlockPoolData::getBufferQueueData(
+        uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) const {
+    if (generation) {
+        std::scoped_lock<std::mutex> lock(mLock);
+        *generation = mGeneration;
+        if (bqId) {
+            *bqId = mBqId;
+        }
+        if (bqSlot) {
+            *bqSlot = mBqSlot;
+        }
+    }
+}
+
+bool C2BufferQueueBlockPoolData::holdBlockFromBufferQueue(
+        const std::shared_ptr<int>& owner,
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (!mLocal) {
+        mOwner = owner;
+        mIgbp = igbp;
+        mSyncMem = syncMem;
+    }
+    if (mHeld) {
+        return false;
+    }
+    mHeld = true;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::beginTransferBlockToClient() {
+    std::scoped_lock<std::mutex> lock(mLock);
+    mTransfer = true;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::endTransferBlockToClient(bool transfer) {
+    std::scoped_lock<std::mutex> lock(mLock);
+    mTransfer = false;
+    if (transfer) {
+        mHeld = false;
+    }
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::beginAttachBlockToBufferQueue() {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (mLocal || mDisplay ||
+        mAttach || !mHeld) {
+        return false;
+    }
+    if (mBqId == 0) {
+        return false;
+    }
+    mAttach = true;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::endAttachBlockToBufferQueue(
+        const std::shared_ptr<int>& owner,
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+        uint32_t generation,
+        uint64_t bqId,
+        int32_t bqSlot) {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (mLocal || !mAttach) {
+        return false;
+    }
+    if (mDisplay) {
+        mAttach = false;
+        mHeld = false;
+        return false;
+    }
+    mAttach = false;
+    mHeld = true;
+    mOwner = owner;
+    mIgbp = igbp;
+    mSyncMem = syncMem;
+    mGeneration = generation;
+    mBqId = bqId;
+    mBqSlot = bqSlot;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::displayBlockToBufferQueue() {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (mLocal || mDisplay || !mHeld) {
+        return false;
+    }
+    if (mBqId == 0) {
+        return false;
+    }
+    mDisplay = true;
+    if (mAttach) {
+        return false;
+    }
+    mHeld = false;
+    return true;
+}
+
 C2BufferQueueBlockPool::C2BufferQueueBlockPool(
         const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId)
         : mAllocator(allocator), mLocalId(localId), mImpl(new Impl(allocator)) {}
@@ -775,7 +956,20 @@
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
     if (mImpl) {
-        return mImpl->fetchGraphicBlock(width, height, format, usage, block);
+        return mImpl->fetchGraphicBlock(width, height, format, usage, block, nullptr);
+    }
+    return C2_CORRUPTED;
+}
+
+c2_status_t C2BufferQueueBlockPool::fetchGraphicBlock(
+        uint32_t width,
+        uint32_t height,
+        uint32_t format,
+        C2MemoryUsage usage,
+        std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+        C2Fence *fence /* nonnull */) {
+    if (mImpl) {
+        return mImpl->fetchGraphicBlock(width, height, format, usage, block, fence);
     }
     return C2_CORRUPTED;
 }
@@ -786,6 +980,18 @@
     }
 }
 
+void C2BufferQueueBlockPool::configureProducer(
+        const sp<HGraphicBufferProducer> &producer,
+        native_handle_t *syncMemory,
+        uint64_t bqId,
+        uint32_t generationId,
+        uint64_t consumerUsage) {
+    if (mImpl) {
+        mImpl->configureProducer(
+               producer, syncMemory, bqId, generationId, consumerUsage, true);
+    }
+}
+
 void C2BufferQueueBlockPool::setRenderCallback(const OnRenderCallback &renderCallback) {
     if (mImpl) {
         mImpl->setRenderCallback(renderCallback);
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
new file mode 100644
index 0000000..587992e
--- /dev/null
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -0,0 +1,265 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SurfaceSyncObj"
+#include <limits.h>
+#include <linux/futex.h>
+#include <sys/mman.h>
+#include <sys/syscall.h>
+#include <sys/time.h>
+#include <utils/Log.h>
+
+#include <chrono>
+#include <C2SurfaceSyncObj.h>
+
+const native_handle_t C2SurfaceSyncMemory::HandleSyncMem::cHeader = {
+    C2SurfaceSyncMemory::HandleSyncMem::version,
+    C2SurfaceSyncMemory::HandleSyncMem::numFds,
+    C2SurfaceSyncMemory::HandleSyncMem::numInts,
+    {}
+};
+
+bool C2SurfaceSyncMemory::HandleSyncMem::isValid(const native_handle_t * const o) {
+    if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+        return false;
+    }
+
+    const HandleSyncMem *other = static_cast<const HandleSyncMem*>(o);
+    return other->mInts.mMagic == kMagic;
+}
+
+C2SurfaceSyncMemory::C2SurfaceSyncMemory()
+    : mInit(false), mHandle(nullptr), mMem(nullptr) {}
+
+C2SurfaceSyncMemory::~C2SurfaceSyncMemory() {
+    if (mInit) {
+        if (mMem) {
+            munmap(static_cast<void *>(mMem), mHandle->size());
+        }
+        if (mHandle) {
+            native_handle_close(mHandle);
+            native_handle_delete(mHandle);
+        }
+    }
+}
+
+std::shared_ptr<C2SurfaceSyncMemory> C2SurfaceSyncMemory::Import(
+        native_handle_t *handle) {
+    if (!HandleSyncMem::isValid(handle)) {
+        return nullptr;
+    }
+
+    HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
+
+    if (ptr == MAP_FAILED) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+        return nullptr;
+    }
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem(new C2SurfaceSyncMemory);
+    syncMem->mInit = true;
+    syncMem->mHandle = o;
+    syncMem->mMem = static_cast<C2SyncVariables*>(ptr);
+    return syncMem;
+}
+
+std::shared_ptr<C2SurfaceSyncMemory> C2SurfaceSyncMemory::Create(int fd, size_t size) {
+    if (fd < 0 || size == 0) {
+        return nullptr;
+    }
+    HandleSyncMem *handle = new HandleSyncMem(fd, size);
+
+    void *ptr = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+    if (ptr == MAP_FAILED) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+        return nullptr;
+    }
+    memset(ptr, 0, size);
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem(new C2SurfaceSyncMemory);
+    syncMem->mInit = true;
+    syncMem->mHandle = handle;
+    syncMem->mMem = static_cast<C2SyncVariables*>(ptr);
+    return syncMem;
+}
+
+native_handle_t *C2SurfaceSyncMemory::handle() {
+    return !mInit ? nullptr : mHandle;
+}
+
+C2SyncVariables *C2SurfaceSyncMemory::mem() {
+    return !mInit ? nullptr : mMem;
+}
+
+namespace {
+    constexpr int kSpinNumForLock = 100;
+    constexpr int kSpinNumForUnlock = 200;
+
+    enum : uint32_t {
+        FUTEX_UNLOCKED = 0,
+        FUTEX_LOCKED_UNCONTENDED = 1,  // user-space locking
+        FUTEX_LOCKED_CONTENDED = 2,    // futex locking
+    };
+}
+
+int C2SyncVariables::lock() {
+    uint32_t old;
+    for (int i = 0; i < kSpinNumForLock; i++) {
+        old = 0;
+        if (mLock.compare_exchange_strong(old, FUTEX_LOCKED_UNCONTENDED)) {
+            return 0;
+        }
+        sched_yield();
+    }
+
+    if (old == FUTEX_LOCKED_UNCONTENDED)
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+
+    while (old) {
+        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+    }
+    return 0;
+}
+
+int C2SyncVariables::unlock() {
+    if (mLock.exchange(FUTEX_UNLOCKED) == FUTEX_LOCKED_UNCONTENDED) return 0;
+
+    for (int i = 0; i < kSpinNumForUnlock; i++) {
+        if (mLock.load()) {
+            uint32_t old = FUTEX_LOCKED_UNCONTENDED;
+            mLock.compare_exchange_strong(old, FUTEX_LOCKED_CONTENDED);
+            if (old) {
+                return 0;
+            }
+        }
+        sched_yield();
+    }
+
+    (void) syscall(__NR_futex, &mLock, FUTEX_WAKE, 1, NULL, NULL, 0);
+    return 0;
+}
+
+void C2SyncVariables::setInitialDequeueCount(
+        int32_t maxDequeueCount, int32_t curDequeueCount) {
+    lock();
+    mMaxDequeueCount = maxDequeueCount;
+    mCurDequeueCount = curDequeueCount;
+    unlock();
+}
+
+uint32_t C2SyncVariables::getWaitIdLocked() {
+    return mCond.load();
+}
+
+bool C2SyncVariables::isDequeueableLocked(uint32_t *waitId) {
+    if (mMaxDequeueCount <= mCurDequeueCount) {
+        if (waitId) {
+            *waitId = getWaitIdLocked();
+        }
+        return false;
+    }
+    return true;
+}
+
+bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId) {
+    // Note. thundering herds may occur. Edge trigged signalling.
+    // But one waiter will guarantee to dequeue. others may wait again.
+    // Minimize futex syscall(trap) for the main use case(one waiter case).
+    if (mMaxDequeueCount == mCurDequeueCount--) {
+        broadcast();
+        return true;
+    }
+
+    if (mCurDequeueCount >= mMaxDequeueCount) {
+        if (waitId) {
+            *waitId = getWaitIdLocked();
+        }
+        ALOGV("dequeue blocked %d/%d", mCurDequeueCount, mMaxDequeueCount);
+        return false;
+    }
+    return true;
+}
+
+void C2SyncVariables::notifyDequeuedLocked() {
+    mCurDequeueCount++;
+    ALOGV("dequeue successful %d/%d", mCurDequeueCount, mMaxDequeueCount);
+}
+
+void C2SyncVariables::setSyncStatusLocked(SyncStatus status) {
+    mStatus = status;
+    if (mStatus == STATUS_ACTIVE) {
+        broadcast();
+    }
+}
+
+C2SyncVariables::SyncStatus C2SyncVariables::getSyncStatusLocked() {
+    return mStatus;
+}
+
+void C2SyncVariables::updateMaxDequeueCountLocked(int32_t maxDequeueCount) {
+    mMaxDequeueCount = maxDequeueCount;
+    if (mStatus == STATUS_ACTIVE) {
+        broadcast();
+    }
+}
+
+c2_status_t C2SyncVariables::waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs) {
+    if (timeoutNs < 0) {
+        timeoutNs = 0;
+    }
+    struct timespec tv;
+    tv.tv_sec = timeoutNs / 1000000000;
+    tv.tv_nsec = timeoutNs % 1000000000;
+
+    int ret =  syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
+    if (ret == 0 || ret == EAGAIN) {
+        return C2_OK;
+    }
+    if (ret == EINTR || ret == ETIMEDOUT) {
+        return C2_TIMED_OUT;
+    }
+    return C2_BAD_VALUE;
+}
+
+int C2SyncVariables::signal() {
+    mCond++;
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_WAKE, 1, NULL, NULL, 0);
+    return 0;
+}
+
+int C2SyncVariables::broadcast() {
+    mCond++;
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_REQUEUE, 1, (void *)INT_MAX, &mLock, 0);
+    return 0;
+}
+
+int C2SyncVariables::wait() {
+    uint32_t old = mCond.load();
+    unlock();
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_WAIT, old, NULL, NULL, 0);
+    while (mLock.exchange(FUTEX_LOCKED_CONTENDED)) {
+        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+    }
+    return 0;
+}
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index 4ea3c69..234faef 100644
--- a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -527,11 +527,9 @@
     }
 
     /* check bit rate */
-    /* set max bit rate */
     for (i = 0; i < encParams->nLayers; i++)
     {
         encParams->LayerBitRate[i] = encOption->bitRate[i];
-        encParams->LayerMaxBitRate[i] = encOption->bitRate[i];
     }
     if (encParams->nLayers > 1)
     {
@@ -3305,6 +3303,3 @@
 }
 
 #endif /* #ifndef ORIGINAL_VERSION */
-
-
-
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 314a822..b1d72e8 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -2345,7 +2345,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_ESDS, &buffer[4], chunk_data_size - 4);
 
             if (mPath.size() >= 2
@@ -2427,7 +2427,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_CSD_AVC, buffer.get(), chunk_data_size);
 
             break;
@@ -2449,7 +2449,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_CSD_HEVC, buffer.get(), chunk_data_size);
 
             *offset += chunk_size;
@@ -4021,13 +4021,13 @@
                 // custom genre string
                 buffer[size] = '\0';
 
-                AMediaFormat_setString(mFileMetaData, 
+                AMediaFormat_setString(mFileMetaData,
                         metadataKey, (const char *)buffer + 8);
             }
         } else {
             buffer[size] = '\0';
 
-            AMediaFormat_setString(mFileMetaData, 
+            AMediaFormat_setString(mFileMetaData,
                     metadataKey, (const char *)buffer + 8);
         }
     }
@@ -4568,6 +4568,9 @@
 
     if (objectTypeIndication == 0x6B || objectTypeIndication == 0x69) {
         // mp3 audio
+        if (mLastTrack == NULL)
+            return ERROR_MALFORMED;
+
         AMediaFormat_setString(mLastTrack->meta,AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_AUDIO_MPEG);
         return OK;
     }
@@ -4658,6 +4661,10 @@
         if (offset >= csd_size || csd[offset] != 0x01) {
             return ERROR_MALFORMED;
         }
+
+        if (mLastTrack == NULL) {
+            return ERROR_MALFORMED;
+        }
         // formerly kKeyVorbisInfo
         AMediaFormat_setBuffer(mLastTrack->meta,
                 AMEDIAFORMAT_KEY_CSD_0, &csd[offset], len1);
@@ -6187,9 +6194,13 @@
         if (newBuffer) {
             if (mIsPcm) {
                 // The twos' PCM block reader assumes that all samples has the same size.
-
-                uint32_t samplesToRead = mSampleTable->getLastSampleIndexInChunk()
-                                                      - mCurrentSampleIndex + 1;
+                uint32_t lastSampleIndexInChunk = mSampleTable->getLastSampleIndexInChunk();
+                if (lastSampleIndexInChunk < mCurrentSampleIndex) {
+                    mBuffer->release();
+                    mBuffer = nullptr;
+                    return AMEDIA_ERROR_UNKNOWN;
+                }
+                uint32_t samplesToRead = lastSampleIndexInChunk - mCurrentSampleIndex + 1;
                 if (samplesToRead > kMaxPcmFrameSize) {
                     samplesToRead = kMaxPcmFrameSize;
                 }
@@ -6198,13 +6209,17 @@
                       samplesToRead, size, mCurrentSampleIndex,
                       mSampleTable->getLastSampleIndexInChunk());
 
-               size_t totalSize = samplesToRead * size;
+                size_t totalSize = samplesToRead * size;
+                if (mBuffer->size() < totalSize) {
+                    mBuffer->release();
+                    mBuffer = nullptr;
+                    return AMEDIA_ERROR_UNKNOWN;
+                }
                 uint8_t* buf = (uint8_t *)mBuffer->data();
                 ssize_t bytesRead = mDataSource->readAt(offset, buf, totalSize);
                 if (bytesRead < (ssize_t)totalSize) {
                     mBuffer->release();
                     mBuffer = NULL;
-
                     return AMEDIA_ERROR_IO;
                 }
 
@@ -6258,7 +6273,19 @@
                 if (isSyncSample) {
                     AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
                 }
- 
+
+                AMediaFormat_setInt64(
+                        meta, "sample-file-offset" /*AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET*/,
+                        offset);
+
+                if (mSampleTable != nullptr &&
+                        mCurrentSampleIndex == mSampleTable->getLastSampleIndexInChunk()) {
+                    AMediaFormat_setInt64(
+                    meta,
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    mSampleTable->getLastSampleIndexInChunk());
+                }
+
                 ++mCurrentSampleIndex;
             }
         }
@@ -6408,6 +6435,17 @@
             AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
         }
 
+        AMediaFormat_setInt64(
+                meta, "sample-file-offset" /*AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET*/, offset);
+
+        if (mSampleTable != nullptr &&
+                mCurrentSampleIndex == mSampleTable->getLastSampleIndexInChunk()) {
+            AMediaFormat_setInt64(
+                    meta,
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    mSampleTable->getLastSampleIndexInChunk());
+        }
+
         ++mCurrentSampleIndex;
 
         *out = mBuffer;
diff --git a/media/janitors/OWNERS-codecs b/media/janitors/codec_OWNERS
similarity index 100%
rename from media/janitors/OWNERS-codecs
rename to media/janitors/codec_OWNERS
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 9072886..3333925 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -74,8 +74,9 @@
      * The nominal range of the data is [-1.0f, 1.0f).
      * Values outside that range may be clipped.
      *
-     * See also 'floatData' at
-     * https://developer.android.com/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)
+     * See also the float Data in
+     * <a href="/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)">
+     *   write(float[], int, int, int)</a>.
      */
     AAUDIO_FORMAT_PCM_FLOAT,
 
@@ -196,21 +197,69 @@
 };
 typedef int32_t  aaudio_result_t;
 
+/**
+ * AAudio Stream states, for details, refer to
+ * <a href="/ndk/guides/audio/aaudio/aaudio#using-streams">Using an Audio Stream</a>
+ */
 enum
 {
+
+    /**
+     * The stream is created but not initialized yet.
+     */
     AAUDIO_STREAM_STATE_UNINITIALIZED = 0,
+    /**
+     * The stream is in an unrecognized state.
+     */
     AAUDIO_STREAM_STATE_UNKNOWN,
+
+    /**
+     * The stream is open and ready to use.
+     */
     AAUDIO_STREAM_STATE_OPEN,
+    /**
+     * The stream is just starting up.
+     */
     AAUDIO_STREAM_STATE_STARTING,
+    /**
+     * The stream has started.
+     */
     AAUDIO_STREAM_STATE_STARTED,
+    /**
+     * The stream is pausing.
+     */
     AAUDIO_STREAM_STATE_PAUSING,
+    /**
+     * The stream has paused, could be restarted or flushed.
+     */
     AAUDIO_STREAM_STATE_PAUSED,
+    /**
+     * The stream is being flushed.
+     */
     AAUDIO_STREAM_STATE_FLUSHING,
+    /**
+     * The stream is flushed, ready to be restarted.
+     */
     AAUDIO_STREAM_STATE_FLUSHED,
+    /**
+     * The stream is stopping.
+     */
     AAUDIO_STREAM_STATE_STOPPING,
+    /**
+     * The stream has been stopped.
+     */
     AAUDIO_STREAM_STATE_STOPPED,
+    /**
+     * The stream is closing.
+     */
     AAUDIO_STREAM_STATE_CLOSING,
+    /**
+     * The stream has been closed.
+     */
     AAUDIO_STREAM_STATE_CLOSED,
+    /**
+     * The stream is disconnected from audio device.
+     */
     AAUDIO_STREAM_STATE_DISCONNECTED
 };
 typedef int32_t aaudio_stream_state_t;
@@ -260,7 +309,8 @@
  * This information is used by certain platforms or routing policies
  * to make more refined volume or routing decisions.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 28.
@@ -361,7 +411,8 @@
  * an audio book application) this information might be used by the audio framework to
  * enforce audio focus.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 28.
@@ -441,7 +492,8 @@
 /**
  * Specifying if audio may or may not be captured by other apps or the system.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 29.
@@ -453,10 +505,11 @@
      * For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
      * AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
      *
-     * On {@link android.os.Build.VERSION_CODES#Q}, this means only {@link #AAUDIO_USAGE_MEDIA}
-     * and {@link #AAUDIO_USAGE_GAME} may be captured.
+     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+     * this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
      *
-     * See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_ALL}.
+     * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
+     * ALLOW_CAPTURE_BY_ALL</a>.
      */
     AAUDIO_ALLOW_CAPTURE_BY_ALL = 1,
     /**
@@ -464,8 +517,9 @@
      *
      * System apps can capture for many purposes like accessibility, user guidance...
      * but have strong restriction. See
-     * {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_SYSTEM} for what the system apps
-     * can do with the capture audio.
+     * <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_SYSTEM">
+     * ALLOW_CAPTURE_BY_SYSTEM</a>
+     * for what the system apps can do with the capture audio.
      */
     AAUDIO_ALLOW_CAPTURE_BY_SYSTEM = 2,
     /**
@@ -473,7 +527,8 @@
      *
      * It is encouraged to use {@link #AAUDIO_ALLOW_CAPTURE_BY_SYSTEM} instead of this value as system apps
      * provide significant and useful features for the user (eg. accessibility).
-     * See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_NONE}.
+     * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_NONE">
+     * ALLOW_CAPTURE_BY_NONE</a>.
      */
     AAUDIO_ALLOW_CAPTURE_BY_NONE = 3,
 };
@@ -580,6 +635,37 @@
 AAUDIO_API void AAudioStreamBuilder_setDeviceId(AAudioStreamBuilder* builder,
                                                 int32_t deviceId) __INTRODUCED_IN(26);
 
+// TODO b/182392769: reexamine if Identity can be used
+/**
+ * Declare the name of the package creating the stream.
+ *
+ * This is usually {@code Context#getPackageName()}.
+ *
+ * The default, if you do not call this function, is a random package in the calling uid.
+ *
+ * Available since API level 31.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param packageName packageName of the calling app.
+ */
+AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
+                                                   const char * packageName) __INTRODUCED_IN(31);
+
+/**
+ * Declare the attribution tag of the context creating the stream.
+ *
+ * This is usually {@code Context#getAttributionTag()}.
+ *
+ * The default, if you do not call this function, is the default attribution tag.
+ *
+ * Available since API level 31.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param attributionTag attributionTag of the calling context.
+ */
+AAUDIO_API void AAudioStreamBuilder_setAttributionTag(AAudioStreamBuilder* builder,
+        const char * attributionTag) __INTRODUCED_IN(31);
+
 /**
  * Request a sample rate in Hertz.
  *
@@ -772,7 +858,9 @@
  * The default is {@link #AAUDIO_ALLOW_CAPTURE_BY_ALL}.
  *
  * Note that an application can also set its global policy, in which case the most restrictive
- * policy is always applied. See {@link android.media.AudioAttributes#setAllowedCapturePolicy(int)}
+ * policy is always applied. See
+ * <a href="/reference/android/media/AudioManager#setAllowedCapturePolicy(int)">
+ * setAllowedCapturePolicy(int)</a>
  *
  * Available since API level 29.
  *
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 47cbbb1..fe2d98e 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -85,6 +85,10 @@
     ],
     export_header_lib_headers: ["libaaudio_headers"],
 
+    export_shared_lib_headers: [
+        "media_permission-aidl-cpp",
+    ],
+
     shared_libs: [
         "libaudioclient",
         "libaudioutils",
@@ -96,6 +100,12 @@
         "libutils",
         "libbinder",
         "aaudio-aidl-cpp",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+    ],
+
+    static_libs: [
+        "media_permission-aidl-cpp",
     ],
 
     cflags: [
@@ -167,6 +177,7 @@
     imports: [
         "audio_common-aidl",
         "shared-file-region-aidl",
+        "media_permission-aidl",
     ],
     backend:
     {
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.cpp b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
index 536395a..5e0a4bb 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
@@ -31,19 +31,15 @@
 
 AAudioStreamRequest::AAudioStreamRequest(const StreamRequest& parcelable) :
         mConfiguration(std::move(parcelable.params)),
-        mUserId(parcelable.userId),
-        mProcessId(parcelable.processId),
+        mIdentity(parcelable.identity),
         mSharingModeMatchRequired(parcelable.sharingModeMatchRequired),
         mInService(parcelable.inService) {
-    static_assert(sizeof(mUserId) == sizeof(parcelable.userId));
-    static_assert(sizeof(mProcessId) == sizeof(parcelable.processId));
 }
 
 StreamRequest AAudioStreamRequest::parcelable() const {
     StreamRequest result;
     result.params = std::move(mConfiguration).parcelable();
-    result.userId = mUserId;
-    result.processId = mProcessId;
+    result.identity = mIdentity;
     result.sharingModeMatchRequired = mSharingModeMatchRequired;
     result.inService = mInService;
     return result;
@@ -54,8 +50,7 @@
 }
 
 void AAudioStreamRequest::dump() const {
-    ALOGD("mUserId    = %d", mUserId);
-    ALOGD("mProcessId = %d", mProcessId);
+    ALOGD("mIdentity  = %s", mIdentity.toString().c_str());
     ALOGD("mSharingModeMatchRequired = %d", mSharingModeMatchRequired);
     ALOGD("mInService = %d", mInService);
     mConfiguration.dump();
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.h b/media/libaaudio/src/binding/AAudioStreamRequest.h
index 31d3ea1..02341c8 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.h
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.h
@@ -23,6 +23,7 @@
 #include <aaudio/StreamRequest.h>
 
 #include "binding/AAudioStreamConfiguration.h"
+#include <android/media/permission/Identity.h>
 
 namespace aaudio {
 
@@ -33,20 +34,12 @@
     // Construct based on a parcelable representation.
     explicit AAudioStreamRequest(const StreamRequest& parcelable);
 
-    uid_t getUserId() const {
-        return mUserId;
+    const android::media::permission::Identity &getIdentity() const {
+        return mIdentity;
     }
 
-    void setUserId(uid_t userId) {
-        mUserId = userId;
-    }
-
-    pid_t getProcessId() const {
-        return mProcessId;
-    }
-
-    void setProcessId(pid_t processId) {
-        mProcessId = processId;
+    void setIdentity(const android::media::permission::Identity &identity) {
+        mIdentity = identity;
     }
 
     bool isSharingModeMatchRequired() const {
@@ -82,8 +75,7 @@
 
 private:
     AAudioStreamConfiguration  mConfiguration;
-    uid_t                      mUserId = (uid_t) -1;
-    pid_t                      mProcessId = (pid_t) -1;
+    android::media::permission::Identity mIdentity;
     bool                       mSharingModeMatchRequired = false;
     bool                       mInService = false; // Stream opened by AAudioservice
 };
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
index 9bf4077..12802e6 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
@@ -17,11 +17,11 @@
 package aaudio;
 
 import aaudio.StreamParameters;
+import android.media.permission.Identity;
 
 parcelable StreamRequest {
     StreamParameters    params;
-    int                 userId; // = (uid_t) -1;
-    int                 processId; // = (pid_t) -1;
+    Identity            identity;
     boolean             sharingModeMatchRequired; // = false;
     boolean             inService; // = false; // Stream opened by AAudioservice
 }
\ No newline at end of file
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 2815c6a..dc961ad 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -28,7 +28,6 @@
 #include <cutils/properties.h>
 
 #include <media/MediaMetricsItem.h>
-#include <utils/String16.h>
 #include <utils/Trace.h>
 
 #include "AudioEndpointParcelable.h"
@@ -39,6 +38,7 @@
 #include "core/AudioStreamBuilder.h"
 #include "fifo/FifoBuffer.h"
 #include "utility/AudioClock.h"
+#include <media/AidlConversion.h>
 
 #include "AudioStreamInternal.h"
 
@@ -49,9 +49,9 @@
 // This is needed to make sense of the logs more easily.
 #define LOG_TAG (mInService ? "AudioStreamInternal_Service" : "AudioStreamInternal_Client")
 
-using android::String16;
 using android::Mutex;
 using android::WrappingBuffer;
+using android::media::permission::Identity;
 
 using namespace aaudio;
 
@@ -107,9 +107,15 @@
     // Request FLOAT for the shared mixer or the device.
     request.getConfiguration().setFormat(AUDIO_FORMAT_PCM_FLOAT);
 
+    // TODO b/182392769: use identity util
+    Identity identity;
+    identity.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
+    identity.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
+    identity.packageName = builder.getOpPackageName();
+    identity.attributionTag = builder.getAttributionTag();
+
     // Build the request to send to the server.
-    request.setUserId(getuid());
-    request.setProcessId(getpid());
+    request.setIdentity(identity);
     request.setSharingModeMatchRequired(isSharingModeMatchRequired());
     request.setInService(isInService());
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 5d311fc..1bbe443 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -268,7 +268,7 @@
 
         if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
-            result = systemStopFromCallback();
+            result = systemStopInternal();
             break;
         }
     }
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index b81e5e4..3f17e6b 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -301,7 +301,7 @@
             }
         } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
-            result = systemStopFromCallback();
+            result = systemStopInternal();
             break;
         }
     }
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index cfa7221..d103aca 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -87,6 +87,30 @@
     streamBuilder->setDeviceId(deviceId);
 }
 
+AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
+                                                   const char* packageName)
+{
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    std::optional<std::string> optionalPackageName;
+    if (packageName != nullptr) {
+      optionalPackageName = std::string(packageName);
+    }
+    // Only system apps can read the op package name. For regular apps the
+    // regular package name is a sufficient replacement
+    streamBuilder->setOpPackageName(optionalPackageName);
+}
+
+AAUDIO_API void AAudioStreamBuilder_setAttributionTag(AAudioStreamBuilder* builder,
+                                                      const char* attributionTag)
+{
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    std::optional<std::string> optionalAttrTag;
+    if (attributionTag != nullptr) {
+      optionalAttrTag = std::string(attributionTag);
+    }
+    streamBuilder->setAttributionTag(optionalAttrTag);
+}
+
 AAUDIO_API void AAudioStreamBuilder_setSampleRate(AAudioStreamBuilder* builder,
                                               int32_t sampleRate)
 {
@@ -209,7 +233,6 @@
     AudioStreamBuilder *streamBuilder = COMMON_GET_FROM_BUILDER_OR_RETURN(streamPtr);
     aaudio_result_t result = streamBuilder->build(&audioStream);
     if (result == AAUDIO_OK) {
-        audioStream->registerPlayerBase();
         *streamPtr = (AAudioStream*) audioStream;
         id = audioStream->getId();
     } else {
@@ -348,7 +371,8 @@
 
     // Don't allow writes when playing with a callback.
     if (audioStream->isDataCallbackActive()) {
-        ALOGD("Cannot write to a callback stream when running.");
+        // A developer requested this warning because it would have saved lots of debugging.
+        ALOGW("%s() - Cannot write to a callback stream when running.", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 2c81c91..0d60120 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -48,6 +48,8 @@
     mInputPreset          = other.mInputPreset;
     mAllowedCapturePolicy = other.mAllowedCapturePolicy;
     mIsPrivacySensitive   = other.mIsPrivacySensitive;
+    mOpPackageName        = other.mOpPackageName;
+    mAttributionTag       = other.mAttributionTag;
 }
 
 static aaudio_result_t isFormatValid(audio_format_t format) {
@@ -203,4 +205,8 @@
     ALOGD("mInputPreset          = %6d", mInputPreset);
     ALOGD("mAllowedCapturePolicy = %6d", mAllowedCapturePolicy);
     ALOGD("mIsPrivacySensitive   = %s", mIsPrivacySensitive ? "true" : "false");
+    ALOGD("mOpPackageName        = %s", !mOpPackageName.has_value() ?
+        "(null)" : mOpPackageName.value().c_str());
+    ALOGD("mAttributionTag       = %s", !mAttributionTag.has_value() ?
+        "(null)" : mAttributionTag.value().c_str());
 }
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 3e65b37..5737052 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -136,6 +136,23 @@
         mIsPrivacySensitive = privacySensitive;
     }
 
+    const std::optional<std::string> getOpPackageName() const {
+        return mOpPackageName;
+    }
+
+    // TODO b/182392769: reexamine if Identity can be used
+    void setOpPackageName(const std::optional<std::string> opPackageName) {
+        mOpPackageName = opPackageName;
+    }
+
+    const std::optional<std::string> getAttributionTag() const {
+        return mAttributionTag;
+    }
+
+    void setAttributionTag(const std::optional<std::string> attributionTag) {
+        mAttributionTag = attributionTag;
+    }
+
     /**
      * @return bytes per frame of getFormat()
      */
@@ -167,6 +184,8 @@
     aaudio_allowed_capture_policy_t mAllowedCapturePolicy = AAUDIO_UNSPECIFIED;
     aaudio_session_id_t             mSessionId            = AAUDIO_SESSION_ID_NONE;
     bool                            mIsPrivacySensitive   = false;
+    std::optional<std::string>      mOpPackageName        = {};
+    std::optional<std::string>      mAttributionTag       = {};
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 5078d2c..e8f71be 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -119,12 +119,16 @@
 
 void AudioStream::logOpen() {
     if (mMetricsId.size() > 0) {
-        android::mediametrics::LogItem(mMetricsId)
-                .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
-                     AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
-                .set(AMEDIAMETRICS_PROP_SHARINGMODE,
-                     AudioGlobal_convertSharingModeToText(getSharingMode()))
-                .record();
+        android::mediametrics::LogItem item(mMetricsId);
+        item.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_OPEN)
+            .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
+                AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
+            .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+                AudioGlobal_convertSharingModeToText(getSharingMode()));
+        if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+            item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
+        }
+        item.record();
     }
 }
 
@@ -139,13 +143,13 @@
 }
 
 aaudio_result_t AudioStream::systemStart() {
-    std::lock_guard<std::mutex> lock(mStreamLock);
-
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    std::lock_guard<std::mutex> lock(mStreamLock);
+
     switch (getState()) {
         // Is this a good time to start?
         case AAUDIO_STREAM_STATE_OPEN:
@@ -183,7 +187,6 @@
 }
 
 aaudio_result_t AudioStream::systemPause() {
-    std::lock_guard<std::mutex> lock(mStreamLock);
 
     if (!isPauseSupported()) {
         return AAUDIO_ERROR_UNIMPLEMENTED;
@@ -194,6 +197,7 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    std::lock_guard<std::mutex> lock(mStreamLock);
     switch (getState()) {
         // Proceed with pausing.
         case AAUDIO_STREAM_STATE_STARTING:
@@ -238,12 +242,12 @@
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("stream cannot be flushed from a callback!");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    std::lock_guard<std::mutex> lock(mStreamLock);
     aaudio_result_t result = AAudio_isFlushAllowed(getState());
     if (result != AAUDIO_OK) {
         return result;
@@ -252,7 +256,7 @@
     return requestFlush_l();
 }
 
-aaudio_result_t AudioStream::systemStopFromCallback() {
+aaudio_result_t AudioStream::systemStopInternal() {
     std::lock_guard<std::mutex> lock(mStreamLock);
     aaudio_result_t result = safeStop_l();
     if (result == AAUDIO_OK) {
@@ -263,17 +267,12 @@
 }
 
 aaudio_result_t AudioStream::systemStopFromApp() {
-    std::lock_guard<std::mutex> lock(mStreamLock);
+    // This check can and should be done outside the lock.
     if (collidesWithCallback()) {
         ALOGE("stream cannot be stopped by calling from a callback!");
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    aaudio_result_t result = safeStop_l();
-    if (result == AAUDIO_OK) {
-        // We only call this for logging in "dumpsys audio". So ignore return code.
-        (void) mPlayerBase->stopWithStatus();
-    }
-    return result;
+    return systemStopInternal();
 }
 
 aaudio_result_t AudioStream::safeStop_l() {
@@ -312,12 +311,12 @@
 }
 
 aaudio_result_t AudioStream::safeRelease() {
-    // This may get temporarily unlocked in the MMAP release() when joining callback threads.
-    std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
+    // This may get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
     if (getState() == AAUDIO_STREAM_STATE_CLOSING) { // already released?
         return AAUDIO_OK;
     }
@@ -325,17 +324,14 @@
 }
 
 aaudio_result_t AudioStream::safeReleaseClose() {
-    // This get temporarily unlocked in the MMAP release() when joining callback threads.
-    std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    releaseCloseFinal_l();
-    return AAUDIO_OK;
+    return safeReleaseCloseInternal();
 }
 
-aaudio_result_t AudioStream::safeReleaseCloseFromCallback() {
+aaudio_result_t AudioStream::safeReleaseCloseInternal() {
     // This get temporarily unlocked in the MMAP release() when joining callback threads.
     std::lock_guard<std::mutex> lock(mStreamLock);
     releaseCloseFinal_l();
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 510ead8..abf62f3 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -408,7 +408,7 @@
     /**
      * This is called internally when an app callback returns AAUDIO_CALLBACK_RESULT_STOP.
      */
-    aaudio_result_t systemStopFromCallback();
+    aaudio_result_t systemStopInternal();
 
     /**
      * Safely RELEASE a stream after taking mStreamLock and checking
@@ -424,7 +424,7 @@
      */
     aaudio_result_t safeReleaseClose();
 
-    aaudio_result_t safeReleaseCloseFromCallback();
+    aaudio_result_t safeReleaseCloseInternal();
 
 protected:
 
@@ -475,6 +475,11 @@
             return mResult;
         }
 
+        // Returns the playerIId if registered, -1 otherwise.
+        int32_t getPlayerIId() const {
+            return mPIId;
+        }
+
     private:
         // Use a weak pointer so the AudioStream can be deleted.
         std::mutex               mParentLock;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 630b289..207a8e3 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -188,6 +188,7 @@
             }
         }
         if (result == AAUDIO_OK) {
+            audioStream->registerPlayerBase();
             audioStream->logOpen();
             *streamPtr = startUsingStream(audioStream);
         } // else audioStream will go out of scope and be deleted
@@ -281,4 +282,8 @@
     ALOGI("usage  = %6d, contentType = %d, inputPreset = %d, allowedCapturePolicy = %d",
           getUsage(), getContentType(), getInputPreset(), getAllowedCapturePolicy());
     ALOGI("privacy sensitive = %s", isPrivacySensitive() ? "true" : "false");
+    ALOGI("opPackageName = %s", !getOpPackageName().has_value() ?
+        "(null)" : getOpPackageName().value().c_str());
+    ALOGI("attributionTag = %s", !getAttributionTag().has_value() ?
+        "(null)" : getAttributionTag().value().c_str());
 }
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index fdaa2ab..60eb73a 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -124,7 +124,7 @@
                               __func__, callbackResult);
                     }
                     audioBuffer->size = 0;
-                    systemStopFromCallback();
+                    systemStopInternal();
                     // Disable the callback just in case the system keeps trying to call us.
                     mCallbackEnabled.store(false);
                 }
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 45b2258..7733a04 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -22,6 +22,7 @@
 
 #include <aaudio/AAudio.h>
 #include <audio_utils/primitives.h>
+#include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
 #include <utils/String16.h>
 
@@ -30,6 +31,8 @@
 #include "utility/AudioClock.h"
 #include "utility/FixedBlockWriter.h"
 
+using android::media::permission::Identity;
+
 using namespace android;
 using namespace aaudio;
 
@@ -152,13 +155,20 @@
             .tags = ""
     };
 
+    // TODO b/182392769: use identity util
+    Identity identity;
+    identity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    identity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    identity.packageName = builder.getOpPackageName();
+    identity.attributionTag = builder.getAttributionTag();
+
     // ----------- open the AudioRecord ---------------------
     // Might retry, but never more than once.
     for (int i = 0; i < 2; i ++) {
         const audio_format_t requestedInternalFormat = getDeviceFormat();
 
         mAudioRecord = new AudioRecord(
-                mOpPackageName // const String16& opPackageName TODO does not compile
+                identity
         );
         mAudioRecord->set(
                 AUDIO_SOURCE_DEFAULT, // ignored because we pass attributes below
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index b2f8ba5..7d0a197 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -25,6 +25,7 @@
 #include "AAudioLegacy.h"
 #include "legacy/AudioStreamLegacy.h"
 #include "utility/FixedBlockWriter.h"
+#include <android/media/permission/Identity.h>
 
 namespace aaudio {
 
@@ -86,7 +87,7 @@
     FixedBlockWriter                 mFixedBlockWriter;
 
     // TODO add 64-bit position reporting to AudioRecord and use it.
-    android::String16                mOpPackageName;
+    android::media::permission::Identity mIdentity;
 
     // Only one type of conversion buffer is used.
     std::unique_ptr<float[]>         mFormatConversionBufferFloat;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index af8ff19..142a85c 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -31,6 +31,8 @@
 using namespace android;
 using namespace aaudio;
 
+using media::permission::Identity;
+
 // Arbitrary and somewhat generous number of bursts.
 #define DEFAULT_BURSTS_PER_BUFFER_CAPACITY     8
 
@@ -147,6 +149,7 @@
     };
 
     mAudioTrack = new AudioTrack();
+    // TODO b/182392769: use identity util
     mAudioTrack->set(
             AUDIO_STREAM_DEFAULT,  // ignored because we pass attributes below
             getSampleRate(),
@@ -162,8 +165,7 @@
             sessionId,
             streamTransferType,
             NULL,    // DEFAULT audio_offload_info_t
-            AUDIO_UID_INVALID, // DEFAULT uid
-            -1,      // DEFAULT pid
+            Identity(), // DEFAULT uid and pid
             &attributes,
             // WARNING - If doNotReconnect set true then audio stops after plugging and unplugging
             // headphones a few times.
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 2e00aa5..1dd44d1 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -23,6 +23,8 @@
     AAudioStreamBuilder_setAllowedCapturePolicy; # introduced=29
     AAudioStreamBuilder_setSessionId;   # introduced=28
     AAudioStreamBuilder_setPrivacySensitive;   # introduced=30
+    AAudioStreamBuilder_setPackageName;   # introduced=31
+    AAudioStreamBuilder_setAttributionTag;   # introduced=31
     AAudioStreamBuilder_openStream;
     AAudioStreamBuilder_delete;
     AAudioStream_close;
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 62c9b46..f9eebd7 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -250,3 +250,16 @@
         "libutils",
     ],
 }
+
+
+cc_test {
+    name: "test_disconnect_race",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_disconnect_race.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
diff --git a/media/libaaudio/tests/test_disconnect_race.cpp b/media/libaaudio/tests/test_disconnect_race.cpp
new file mode 100644
index 0000000..6dbe165
--- /dev/null
+++ b/media/libaaudio/tests/test_disconnect_race.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Test whether an error callback is joined before the close finishes.
+ *
+ * Start a stream with a callback.
+ * The callback just sleeps for a long time.
+ * While the callback is sleeping, close() the stream from the main thread.
+ * Then check to make sure the callback was joined before the close() returns.
+ *
+ * This can hang if there are deadlocks. So make sure you get a PASSED result.
+ */
+
+#include <atomic>
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+
+// Sleep long enough that the foreground has a chance to call close.
+static constexpr int kCallbackSleepMillis = 1000;
+static constexpr int kPollSleepMillis     =  100;
+
+static int sErrorCount = 0;
+
+#define MY_ASSERT_TRUE(statement) \
+    if (!(statement)) { \
+        printf("ERROR line:%d - " #statement "\n", __LINE__); \
+        sErrorCount++; \
+        return false; \
+    }
+
+#define MY_ASSERT_EQ(aa,bb) MY_ASSERT_TRUE(((aa) == (bb)))
+#define MY_ASSERT_NE(aa,bb) MY_ASSERT_TRUE(((aa) != (bb)))
+
+class AudioEngine {
+public:
+
+    // Check for a crash or late callback if we close without stopping.
+    bool checkCloseJoins(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode,
+                             bool callStopFromCallback) {
+        mCallStopFromCallback = callStopFromCallback;
+
+        if (!startStreamForStall(direction, perfMode)) return false;
+
+        printf("--------------------------------------------------------\n");
+        printf("%s() - direction = %d, perfMode = %d, callStop = %d\n",
+            __func__, direction, perfMode, callStopFromCallback);
+
+        // When the callback starts it will go to sleep.
+        if (!waitForCallbackToStart()) return false;
+
+        printf("call AAudioStream_close()\n");
+        MY_ASSERT_TRUE(!mCallbackFinished); // Still sleeping?
+        aaudio_result_t result = AAudioStream_close(mStream); // May hang here!
+        if (mCallbackStarted) {
+            MY_ASSERT_TRUE(mCallbackFinished);
+        }
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+        printf("AAudioStream_close() returned %d\n", result);
+
+        MY_ASSERT_EQ(AAUDIO_ERROR_DISCONNECTED, mError.load());
+        if (mCallStopFromCallback) {
+            // Did calling stop() from callback fail? It should have.
+            MY_ASSERT_NE(AAUDIO_OK, mStopResult.load());
+        }
+
+        return true;
+    }
+
+private:
+    bool startStreamForStall(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode) {
+        AAudioStreamBuilder* builder = nullptr;
+        aaudio_result_t result = AAUDIO_OK;
+
+        // Use an AAudioStreamBuilder to contain requested parameters.
+        result = AAudio_createStreamBuilder(&builder);
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+
+        // Request stream properties.
+        AAudioStreamBuilder_setDirection(builder, direction);
+        AAudioStreamBuilder_setPerformanceMode(builder, perfMode);
+        AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, this);
+        AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, this);
+
+        // Create an AAudioStream using the Builder.
+        result = AAudioStreamBuilder_openStream(builder, &mStream);
+        AAudioStreamBuilder_delete(builder);
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+
+        // Check to see what kind of stream we actually got.
+        int32_t deviceId = AAudioStream_getDeviceId(mStream);
+        aaudio_performance_mode_t
+            actualPerfMode = AAudioStream_getPerformanceMode(mStream);
+        printf("-------- opened: deviceId = %3d, perfMode = %d\n",
+               deviceId,
+               actualPerfMode);
+
+        // Start stream.
+        result = AAudioStream_requestStart(mStream);
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+
+        return true;
+    }
+
+    bool waitForCallbackToStart() {
+        // Wait for callback to say it has been called.
+        int countDown = 10 * 1000 / kPollSleepMillis;
+        while (!mCallbackStarted && countDown > 0) {
+            if ((countDown % 5) == 0) {
+                printf("===== Please PLUG or UNPLUG headphones! ======= %d\n", countDown);
+            }
+            usleep(kPollSleepMillis * 1000);
+            countDown--;
+        }
+        MY_ASSERT_TRUE(countDown > 0);
+        MY_ASSERT_TRUE(mCallbackStarted);
+        return true;
+    }
+
+// Callback function that fills the audio output buffer.
+    static aaudio_data_callback_result_t s_myDataCallbackProc(
+            AAudioStream * /* stream */,
+            void * /* userData */,
+            void * /* audioData */,
+            int32_t /* numFrames */
+    ) {
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+
+    static void s_myErrorCallbackProc(
+                AAudioStream * stream,
+                void *userData,
+                aaudio_result_t error) {
+        AudioEngine *engine = (AudioEngine *)userData;
+        engine->mError = error;
+        engine->mCallbackStarted = true;
+        usleep(kCallbackSleepMillis * 1000);
+        // it is illegal to call stop() from the callback. It should
+        // return an error and not hang.
+        if (engine->mCallStopFromCallback) {
+            engine->mStopResult = AAudioStream_requestStop(stream);
+        }
+        engine->mCallbackFinished = true;
+    }
+
+    AAudioStream* mStream = nullptr;
+
+    std::atomic<aaudio_result_t> mError{AAUDIO_OK}; // written by error callback
+    std::atomic<bool> mCallStopFromCallback{false};
+    std::atomic<bool> mCallbackStarted{false};   // written by error callback
+    std::atomic<bool> mCallbackFinished{false};  // written by error callback
+    std::atomic<aaudio_result_t> mStopResult{AAUDIO_OK};
+};
+
+int main(int, char **) {
+    // Parameters to test.
+    static aaudio_direction_t directions[] = {AAUDIO_DIRECTION_OUTPUT,
+                                            AAUDIO_DIRECTION_INPUT};
+    static aaudio_performance_mode_t perfModes[] =
+        {AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAUDIO_PERFORMANCE_MODE_NONE};
+    static bool callStops[] = { false, true };
+
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+    printf("Test Disconnect Race V1.0\n");
+    printf("\n");
+
+    for (auto callStop : callStops) {
+        for (auto direction : directions) {
+            for (auto perfMode : perfModes) {
+                AudioEngine engine;
+                engine.checkCloseJoins(direction, perfMode, callStop);
+            }
+        }
+    }
+
+    printf("Error Count = %d, %s\n", sErrorCount,
+           ((sErrorCount == 0) ? "PASS" : "FAIL"));
+}
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index 4c0db3e..c77aeeb 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -190,6 +190,27 @@
     return std::string(String8(legacy).c_str());
 }
 
+// TODO b/182392769: create an optional -> optional util
+ConversionResult<std::optional<String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl) {
+    if (!aidl.has_value()) {
+        return std::nullopt;
+    }
+    ConversionResult<String16> conversion =
+        VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.value()));
+    return conversion.value();
+}
+
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy) {
+  if (!legacy.has_value()) {
+    return std::nullopt;
+  }
+  ConversionResult<std::string> conversion =
+      VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.value()));
+  return conversion.value();
+}
+
 ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl) {
     return String8(aidl.data(), aidl.size());
 }
@@ -1160,20 +1181,16 @@
 ConversionResult<AudioClient> aidl2legacy_AudioClient_AudioClient(
         const media::AudioClient& aidl) {
     AudioClient legacy;
-    legacy.clientUid = VALUE_OR_RETURN(aidl2legacy_int32_t_uid_t(aidl.clientUid));
-    legacy.clientPid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientPid));
     legacy.clientTid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientTid));
-    legacy.packageName = VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.packageName));
+    legacy.identity = aidl.identity;
     return legacy;
 }
 
 ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
         const AudioClient& legacy) {
     media::AudioClient aidl;
-    aidl.clientUid = VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.clientUid));
-    aidl.clientPid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientPid));
     aidl.clientTid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientTid));
-    aidl.packageName = VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.packageName));
+    aidl.identity = legacy.identity;
     return aidl;
 }
 
@@ -1901,6 +1918,9 @@
             convertRange(aidl.channelMasks.begin(), aidl.channelMasks.end(), legacy.channel_masks,
                          aidl2legacy_int32_t_audio_channel_mask_t));
     legacy.num_channel_masks = aidl.channelMasks.size();
+
+    legacy.encapsulation_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(aidl.encapsulationType));
     return legacy;
 }
 
@@ -1924,6 +1944,10 @@
             convertRange(legacy.channel_masks, legacy.channel_masks + legacy.num_channel_masks,
                          std::back_inserter(aidl.channelMasks),
                          legacy2aidl_audio_channel_mask_t_int32_t));
+
+    aidl.encapsulationType = VALUE_OR_RETURN(
+            legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+                    legacy.encapsulation_type));
     return aidl;
 }
 
@@ -1972,6 +1996,15 @@
                                  aidl2legacy_AudioProfile_audio_profile));
     legacy.num_audio_profiles = aidl.profiles.size();
 
+    if (aidl.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(aidl.extraAudioDescriptors.begin(), aidl.extraAudioDescriptors.end(),
+                         legacy.extra_audio_descriptors,
+                         aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
+    legacy.num_extra_audio_descriptors = aidl.extraAudioDescriptors.size();
+
     if (aidl.gains.size() > std::size(legacy.gains)) {
         return unexpected(BAD_VALUE);
     }
@@ -2001,6 +2034,15 @@
                          std::back_inserter(aidl.profiles),
                          legacy2aidl_audio_profile_AudioProfile));
 
+    if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.extra_audio_descriptors,
+                    legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
+                    std::back_inserter(aidl.extraAudioDescriptors),
+                    legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
+
     if (legacy.num_gains > std::size(legacy.gains)) {
         return unexpected(BAD_VALUE);
     }
@@ -2201,4 +2243,84 @@
     return aidl;
 }
 
+ConversionResult<audio_standard_t>
+aidl2legacy_AudioStandard_audio_standard_t(media::AudioStandard aidl) {
+    switch (aidl) {
+        case media::AudioStandard::NONE:
+            return AUDIO_STANDARD_NONE;
+        case media::AudioStandard::EDID:
+            return AUDIO_STANDARD_EDID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioStandard>
+legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy) {
+    switch (legacy) {
+        case AUDIO_STANDARD_NONE:
+            return media::AudioStandard::NONE;
+        case AUDIO_STANDARD_EDID:
+            return media::AudioStandard::EDID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const media::ExtraAudioDescriptor& aidl) {
+    audio_extra_audio_descriptor legacy;
+    legacy.standard = VALUE_OR_RETURN(aidl2legacy_AudioStandard_audio_standard_t(aidl.standard));
+    if (aidl.audioDescriptor.size() > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
+        return unexpected(BAD_VALUE);
+    }
+    legacy.descriptor_length = aidl.audioDescriptor.size();
+    std::copy(aidl.audioDescriptor.begin(), aidl.audioDescriptor.end(),
+              std::begin(legacy.descriptor));
+    legacy.encapsulation_type =
+            VALUE_OR_RETURN(aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+                    aidl.encapsulationType));
+    return legacy;
+}
+
+ConversionResult<media::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy) {
+    media::ExtraAudioDescriptor aidl;
+    aidl.standard = VALUE_OR_RETURN(legacy2aidl_audio_standard_t_AudioStandard(legacy.standard));
+    if (legacy.descriptor_length > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
+        return unexpected(BAD_VALUE);
+    }
+    aidl.audioDescriptor.resize(legacy.descriptor_length);
+    std::copy(legacy.descriptor, legacy.descriptor + legacy.descriptor_length,
+              aidl.audioDescriptor.begin());
+    aidl.encapsulationType =
+            VALUE_OR_RETURN(legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+                    legacy.encapsulation_type));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const media::AudioEncapsulationType& aidl) {
+    switch (aidl) {
+        case media::AudioEncapsulationType::NONE:
+            return AUDIO_ENCAPSULATION_TYPE_NONE;
+        case media::AudioEncapsulationType::IEC61937:
+            return AUDIO_ENCAPSULATION_TYPE_IEC61937;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t & legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_TYPE_NONE:
+            return media::AudioEncapsulationType::NONE;
+        case AUDIO_ENCAPSULATION_TYPE_IEC61937:
+            return media::AudioEncapsulationType::IEC61937;
+    }
+    return unexpected(BAD_VALUE);
+}
+
 }  // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 79c155e..19d68a0 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -132,10 +132,12 @@
         "libshmemcompat",
         "libutils",
         "libvibrator",
+        "media_permission-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
+        "media_permission-aidl-cpp",
         "libbinder",
     ],
 
@@ -162,6 +164,7 @@
         // for memory heap analysis
         "libc_malloc_debug_backtrace",
         "shared-file-region-aidl-cpp",
+        "media_permission-aidl-cpp",
     ],
     cflags: [
         "-Wall",
@@ -229,6 +232,7 @@
         "libshmemcompat",
         "libutils",
         "shared-file-region-aidl-cpp",
+        "media_permission-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "audioclient-types-aidl-cpp",
@@ -307,6 +311,7 @@
         "aidl/android/media/AudioDualMonoMode.aidl",
         "aidl/android/media/AudioEncapsulationMode.aidl",
         "aidl/android/media/AudioEncapsulationMetadataType.aidl",
+        "aidl/android/media/AudioEncapsulationType.aidl",
         "aidl/android/media/AudioFlag.aidl",
         "aidl/android/media/AudioGain.aidl",
         "aidl/android/media/AudioGainConfig.aidl",
@@ -337,15 +342,19 @@
         "aidl/android/media/AudioPortType.aidl",
         "aidl/android/media/AudioProfile.aidl",
         "aidl/android/media/AudioSourceType.aidl",
+        "aidl/android/media/AudioStandard.aidl",
         "aidl/android/media/AudioStreamType.aidl",
         "aidl/android/media/AudioTimestampInternal.aidl",
         "aidl/android/media/AudioUniqueIdUse.aidl",
         "aidl/android/media/AudioUsage.aidl",
         "aidl/android/media/AudioUuid.aidl",
+        "aidl/android/media/AudioVibratorInfo.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
+        "aidl/android/media/ExtraAudioDescriptor.aidl",
     ],
     imports: [
         "audio_common-aidl",
+        "media_permission-aidl",
     ],
     backend: {
         cpp: {
@@ -427,6 +436,7 @@
         "av-types-aidl",
         "effect-aidl",
         "shared-file-region-aidl",
+        "media_permission-aidl",
     ],
     double_loadable: true,
     backend: {
@@ -460,6 +470,7 @@
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
+        "media_permission-aidl",
     ],
     double_loadable: true,
     backend: {
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index ad8ad7b..d5047b1 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -42,6 +42,7 @@
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
 using media::IAudioPolicyService;
+using media::permission::Identity;
 
 namespace {
 
@@ -57,8 +58,8 @@
 
 // ---------------------------------------------------------------------------
 
-AudioEffect::AudioEffect(const String16& opPackageName)
-    : mOpPackageName(opPackageName)
+AudioEffect::AudioEffect(const Identity& identity)
+    : mClientIdentity(identity)
 {
 }
 
@@ -107,9 +108,12 @@
     mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
     mDescriptor.uuid = *(uuid != NULL ? uuid : EFFECT_UUID_NULL);
 
+    // TODO b/182392769: use identity util
     mIEffectClient = new EffectClient(this);
-    mClientPid = IPCThreadState::self()->getCallingPid();
-    mClientUid = IPCThreadState::self()->getCallingUid();
+    pid_t pid = IPCThreadState::self()->getCallingPid();
+    mClientIdentity.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
+    pid_t uid = IPCThreadState::self()->getCallingUid();
+    mClientIdentity.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
 
     media::CreateEffectRequest request;
     request.desc = VALUE_OR_RETURN_STATUS(
@@ -119,8 +123,7 @@
     request.output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(io));
     request.sessionId = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(mSessionId));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(device));
-    request.opPackageName = VALUE_OR_RETURN_STATUS(legacy2aidl_String16_string(mOpPackageName));
-    request.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(mClientPid));
+    request.identity = mClientIdentity;
     request.probe = probe;
 
     media::CreateEffectResponse response;
@@ -175,10 +178,10 @@
 
     IInterface::asBinder(iEffect)->linkToDeath(mIEffectClient);
     ALOGV("set() %p OK effect: %s id: %d status %d enabled %d pid %d", this, mDescriptor.name, mId,
-            mStatus, mEnabled, mClientPid);
+            mStatus, mEnabled, mClientIdentity.pid);
 
     if (!audio_is_global_session(mSessionId)) {
-        AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
+        AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
     }
 
     return mStatus;
@@ -219,7 +222,8 @@
 
     if (!mProbe && (mStatus == NO_ERROR || mStatus == ALREADY_EXISTS)) {
         if (!audio_is_global_session(mSessionId)) {
-            AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+            AudioSystem::releaseAudioSessionId(mSessionId,
+                VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientIdentity.pid)));
         }
         if (mIEffect != NULL) {
             mIEffect->disconnect();
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 112cb67..e15ef3d 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -35,18 +35,11 @@
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 
-#define VALUE_OR_FATAL(result)                   \
-    ({                                           \
-       auto _tmp = (result);                     \
-       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),           \
-                           "Failed result (%d)", \
-                           _tmp.error());        \
-       std::move(_tmp.value());                  \
-     })
-
 #define WAIT_PERIOD_MS          10
 
 namespace android {
+
+using android::media::permission::Identity;
 using aidl_utils::statusTFromBinderStatus;
 
 // ---------------------------------------------------------------------------
@@ -114,6 +107,7 @@
         mMetricsItem->setInt32(MM_PREFIX "lastError.code", (int32_t)mLastError);
         mMetricsItem->setCString(MM_PREFIX "lastError.at", mLastErrorFunc.c_str());
     }
+    mMetricsItem->setCString(MM_PREFIX "logSessionId", record->mLogSessionId.c_str());
 }
 
 static const char *stateToString(bool active) {
@@ -132,9 +126,8 @@
     return NO_ERROR;
 }
 
-AudioRecord::AudioRecord(const String16 &opPackageName)
-    : mActive(false), mStatus(NO_INIT), mOpPackageName(opPackageName),
-      mSessionId(AUDIO_SESSION_ALLOCATE),
+AudioRecord::AudioRecord(const Identity &client)
+    : mActive(false), mStatus(NO_INIT), mClientIdentity(client), mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE), mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mSelectedMicDirection(MIC_DIRECTION_UNSPECIFIED),
@@ -147,7 +140,7 @@
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
-        const String16& opPackageName,
+        const Identity& client,
         size_t frameCount,
         callback_t cbf,
         void* user,
@@ -155,24 +148,24 @@
         audio_session_t sessionId,
         transfer_type transferType,
         audio_input_flags_t flags,
-        uid_t uid,
-        pid_t pid,
         const audio_attributes_t* pAttributes,
         audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
         float microphoneFieldDimension)
     : mActive(false),
       mStatus(NO_INIT),
-      mOpPackageName(opPackageName),
+      mClientIdentity(client),
       mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mProxy(NULL)
 {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientIdentity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientIdentity.pid));
     (void)set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
             notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
-            uid, pid, pAttributes, selectedDeviceId,
-            selectedMicDirection, microphoneFieldDimension);
+            uid, pid, pAttributes, selectedDeviceId, selectedMicDirection,
+            microphoneFieldDimension);
 }
 
 AudioRecord::~AudioRecord()
@@ -210,7 +203,8 @@
         IPCThreadState::self()->flushCommands();
         ALOGV("%s(%d): releasing session id %d",
                 __func__, mPortId, mSessionId);
-        AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+        pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientIdentity.pid));
+        AudioSystem::releaseAudioSessionId(mSessionId, pid);
     }
 }
 
@@ -236,16 +230,29 @@
 {
     status_t status = NO_ERROR;
     uint32_t channelCount;
-    pid_t callingPid;
-    pid_t myPid;
 
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
-          "notificationFrames %u, sessionId %d, transferType %d, flags %#x, opPackageName %s "
+          "notificationFrames %u, sessionId %d, transferType %d, flags %#x, identity %s"
           "uid %d, pid %d",
           __func__,
           inputSource, sampleRate, format, channelMask, frameCount, notificationFrames,
-          sessionId, transferType, flags, String8(mOpPackageName).string(), uid, pid);
+          sessionId, transferType, flags, mClientIdentity.toString().c_str(), uid, pid);
+
+    // TODO b/182392553: refactor or remove
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    pid_t myPid = getpid();
+    pid_t adjPid = pid;
+    if (pid == -1 || (callingPid != myPid)) {
+        adjPid = callingPid;
+    }
+    mClientIdentity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(adjPid));
+
+    uid_t adjUid = uid;
+    if (uid == -1 || (callingPid != myPid)) {
+        adjUid = IPCThreadState::self()->getCallingUid();
+    }
+    mClientIdentity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(adjUid));
 
     mTracker.reset(new RecordingActivityTracker());
 
@@ -340,19 +347,6 @@
     mSessionId = sessionId;
     ALOGV("%s(): mSessionId %d", __func__, mSessionId);
 
-    callingPid = IPCThreadState::self()->getCallingPid();
-    myPid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
-        mClientUid = IPCThreadState::self()->getCallingUid();
-    } else {
-        mClientUid = uid;
-    }
-    if (pid == -1 || (callingPid != myPid)) {
-        mClientPid = callingPid;
-    } else {
-        mClientPid = pid;
-    }
-
     mOrigFlags = mFlags = flags;
     mCbf = cbf;
 
@@ -365,7 +359,7 @@
     // create the IAudioRecord
     {
         AutoMutex lock(mLock);
-        status = createRecord_l(0 /*epoch*/, mOpPackageName);
+        status = createRecord_l(0 /*epoch*/);
     }
 
     ALOGV("%s(%d): status %d", __func__, mPortId, status);
@@ -386,7 +380,7 @@
     mMarkerReached = false;
     mNewPosition = 0;
     mUpdatePeriod = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
+    AudioSystem::acquireAudioSessionId(mSessionId, adjPid, adjUid);
     mSequence = 1;
     mObservedSequence = mSequence;
     mInOverrun = false;
@@ -743,7 +737,7 @@
 }
 
 // must be called with mLock held
-status_t AudioRecord::createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName)
+status_t AudioRecord::createRecord_l(const Modulo<uint32_t> &epoch)
 {
     const int64_t beginNs = systemTime();
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
@@ -796,15 +790,13 @@
     input.config.sample_rate = mSampleRate;
     input.config.channel_mask = mChannelMask;
     input.config.format = mFormat;
-    input.clientInfo.clientUid = mClientUid;
-    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.identity = mClientIdentity;
     input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
         if (mAudioRecordThread != 0) {
             input.clientInfo.clientTid = mAudioRecordThread->getTid();
         }
     }
-    input.opPackageName = opPackageName;
     input.riid = mTracker->getRiid();
 
     input.flags = mFlags;
@@ -953,6 +945,7 @@
         .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
         .set(AMEDIAMETRICS_PROP_TRACKID, mPortId)
+        .set(AMEDIAMETRICS_PROP_LOGSESSIONID, mLogSessionId)
         .set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
         .set(AMEDIAMETRICS_PROP_THREADID, (int32_t)output.inputId)
         .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
@@ -1435,7 +1428,7 @@
     // It will also delete the strong references on previous IAudioRecord and IMemory
     Modulo<uint32_t> position(mProxy->getPosition());
     mNewPosition = position + mUpdatePeriod;
-    result = createRecord_l(position, mOpPackageName);
+    result = createRecord_l(position);
 
     if (result == NO_ERROR) {
         if (mActive) {
@@ -1574,6 +1567,19 @@
     }
 }
 
+void AudioRecord::setLogSessionId(const char *logSessionId)
+{
+     AutoMutex lock(mLock);
+    if (logSessionId == nullptr) logSessionId = "";  // an empty string is an unset session id.
+    if (mLogSessionId == logSessionId) return;
+
+     mLogSessionId = logSessionId;
+     mediametrics::LogItem(mMetricsId)
+         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID)
+         .set(AMEDIAMETRICS_PROP_LOGSESSIONID, logSessionId)
+         .record();
+}
+
 // =========================================================================
 
 void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 74258a1..0bc592d 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -52,6 +52,7 @@
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
 using media::IAudioPolicyService;
+using media::permission::Identity;
 
 // client singleton for AudioFlinger binder interface
 Mutex AudioSystem::gLock;
@@ -940,8 +941,7 @@
                                        audio_io_handle_t* output,
                                        audio_session_t session,
                                        audio_stream_type_t* stream,
-                                       pid_t pid,
-                                       uid_t uid,
+                                       const Identity& identity,
                                        const audio_config_t* config,
                                        audio_output_flags_t flags,
                                        audio_port_handle_t* selectedDeviceId,
@@ -974,8 +974,6 @@
     media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
-    int32_t pidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
-    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
     media::AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_t_AudioConfig(*config));
     int32_t flagsAidl = VALUE_OR_RETURN_STATUS(
@@ -986,7 +984,7 @@
     media::GetOutputForAttrResponse responseAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            aps->getOutputForAttr(attrAidl, sessionAidl, pidAidl, uidAidl, configAidl, flagsAidl,
+            aps->getOutputForAttr(attrAidl, sessionAidl, identity, configAidl, flagsAidl,
                                   selectedDeviceIdAidl, &responseAidl)));
 
     *output = VALUE_OR_RETURN_STATUS(
@@ -1040,9 +1038,7 @@
                                       audio_io_handle_t* input,
                                       audio_unique_id_t riid,
                                       audio_session_t session,
-                                      pid_t pid,
-                                      uid_t uid,
-                                      const String16& opPackageName,
+                                      const Identity &identity,
                                       const audio_config_base_t* config,
                                       audio_input_flags_t flags,
                                       audio_port_handle_t* selectedDeviceId,
@@ -1072,10 +1068,6 @@
     int32_t inputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
     int32_t riidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_unique_id_t_int32_t(riid));
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
-    int32_t pidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
-    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
-    std::string opPackageNameAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_String16_string(opPackageName));
     media::AudioConfigBase configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_base_t_AudioConfigBase(*config));
     int32_t flagsAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
@@ -1085,9 +1077,8 @@
     media::GetInputForAttrResponse response;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl, pidAidl, uidAidl,
-                                 opPackageNameAidl, configAidl, flagsAidl, selectedDeviceIdAidl,
-                                 &response)));
+            aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl, identity,
+                configAidl, flagsAidl, selectedDeviceIdAidl, &response)));
 
     *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.input));
     *selectedDeviceId = VALUE_OR_RETURN_STATUS(
@@ -1839,8 +1830,7 @@
 
 status_t AudioSystem::getSurroundFormats(unsigned int* numSurroundFormats,
                                          audio_format_t* surroundFormats,
-                                         bool* surroundFormatsEnabled,
-                                         bool reported) {
+                                         bool* surroundFormatsEnabled) {
     if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 &&
                                           (surroundFormats == nullptr ||
                                            surroundFormatsEnabled == nullptr))) {
@@ -1855,8 +1845,8 @@
     std::vector<media::audio::common::AudioFormat> surroundFormatsAidl;
     std::vector<bool> surroundFormatsEnabledAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            aps->getSurroundFormats(reported, &numSurroundFormatsAidl,
-                                    &surroundFormatsAidl, &surroundFormatsEnabledAidl)));
+            aps->getSurroundFormats(&numSurroundFormatsAidl, &surroundFormatsAidl,
+                                    &surroundFormatsEnabledAidl)));
 
     *numSurroundFormats = VALUE_OR_RETURN_STATUS(
             convertIntegral<unsigned int>(numSurroundFormatsAidl.value));
@@ -1868,6 +1858,29 @@
     return OK;
 }
 
+status_t AudioSystem::getReportedSurroundFormats(unsigned int* numSurroundFormats,
+                                                 audio_format_t* surroundFormats) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 && surroundFormats == nullptr)) {
+        return BAD_VALUE;
+    }
+
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    media::Int numSurroundFormatsAidl;
+    numSurroundFormatsAidl.value =
+            VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
+    std::vector<media::audio::common::AudioFormat> surroundFormatsAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getReportedSurroundFormats(&numSurroundFormatsAidl, &surroundFormatsAidl)));
+
+    *numSurroundFormats = VALUE_OR_RETURN_STATUS(
+            convertIntegral<unsigned int>(numSurroundFormatsAidl.value));
+    RETURN_STATUS_IF_ERROR(
+            convertRange(surroundFormatsAidl.begin(), surroundFormatsAidl.end(), surroundFormats,
+                         aidl2legacy_AudioFormat_audio_format_t));
+    return OK;
+}
+
 status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
@@ -2245,6 +2258,15 @@
     return NO_ERROR;
 }
 
+status_t AudioSystem::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setVibratorInfos(vibratorInfos);
+}
+
 // ---------------------------------------------------------------------------
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 57bd04f..6c9e85c 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -38,15 +38,6 @@
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 
-#define VALUE_OR_FATAL(result)                   \
-    ({                                           \
-       auto _tmp = (result);                     \
-       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),           \
-                           "Failed result (%d)", \
-                           _tmp.error());        \
-       std::move(_tmp.value());                  \
-     })
-
 #define WAIT_PERIOD_MS                  10
 #define WAIT_STREAM_END_TIMEOUT_SEC     120
 static const int kMaxLoopCountNotifications = 32;
@@ -57,6 +48,7 @@
 // ---------------------------------------------------------------------------
 
 using media::VolumeShaper;
+using media::permission::Identity;
 
 // TODO: Move to a separate .h
 
@@ -218,6 +210,7 @@
     mMetricsItem->setCString(MM_PREFIX "encoding", toString(track->mFormat).c_str());
     mMetricsItem->setInt32(MM_PREFIX "frameCount", (int32_t)track->mFrameCount);
     mMetricsItem->setCString(MM_PREFIX "attributes", toString(track->mAttributes).c_str());
+    mMetricsItem->setCString(MM_PREFIX "logSessionId", track->mLogSessionId.c_str());
 }
 
 // hand the user a snapshot of the metrics.
@@ -232,11 +225,11 @@
     return NO_ERROR;
 }
 
-AudioTrack::AudioTrack() : AudioTrack("" /*opPackageName*/)
+AudioTrack::AudioTrack() : AudioTrack(Identity())
 {
 }
 
-AudioTrack::AudioTrack(const std::string& opPackageName)
+AudioTrack::AudioTrack(const Identity& identity)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -244,7 +237,7 @@
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mOpPackageName(opPackageName),
+      mClientIdentity(identity),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
@@ -266,19 +259,16 @@
         audio_session_t sessionId,
         transfer_type transferType,
         const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
+        const Identity& identity,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed,
-        audio_port_handle_t selectedDeviceId,
-        const std::string& opPackageName)
+        audio_port_handle_t selectedDeviceId)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
-      mOpPackageName(opPackageName),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -286,7 +276,7 @@
     (void)set(streamType, sampleRate, format, channelMask,
             frameCount, flags, cbf, user, notificationFrames,
             0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
-            offloadInfo, uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+            offloadInfo, identity, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
 AudioTrack::AudioTrack(
@@ -302,19 +292,16 @@
         audio_session_t sessionId,
         transfer_type transferType,
         const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
+        const Identity& identity,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
-        float maxRequiredSpeed,
-        const std::string& opPackageName)
+        float maxRequiredSpeed)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mOpPackageName(opPackageName),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -322,7 +309,7 @@
     (void)set(streamType, sampleRate, format, channelMask,
             0 /*frameCount*/, flags, cbf, user, notificationFrames,
             sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
+            identity, pAttributes, doNotReconnect, maxRequiredSpeed);
 }
 
 AudioTrack::~AudioTrack()
@@ -360,10 +347,11 @@
         mCblkMemory.clear();
         mSharedBuffer.clear();
         IPCThreadState::self()->flushCommands();
+        pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientIdentity.pid));
         ALOGV("%s(%d), releasing session id %d from %d on behalf of %d",
                 __func__, mPortId,
-                mSessionId, IPCThreadState::self()->getCallingPid(), mClientPid);
-        AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+                mSessionId, IPCThreadState::self()->getCallingPid(), clientPid);
+        AudioSystem::releaseAudioSessionId(mSessionId, clientPid);
     }
 }
 
@@ -382,8 +370,7 @@
         audio_session_t sessionId,
         transfer_type transferType,
         const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
+        const Identity& identity,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed,
@@ -393,13 +380,15 @@
     uint32_t channelCount;
     pid_t callingPid;
     pid_t myPid;
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
 
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
           __func__,
           streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
-          sessionId, transferType, uid, pid);
+          sessionId, transferType, identity.uid, identity.pid);
 
     mThreadCanCallJava = threadCanCallJava;
     mSelectedDeviceId = selectedDeviceId;
@@ -595,17 +584,19 @@
                 notificationFrames, minNotificationsPerBuffer, maxNotificationsPerBuffer);
     }
     mNotificationFramesAct = 0;
+    // TODO b/182392553: refactor or remove
     callingPid = IPCThreadState::self()->getCallingPid();
     myPid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
-        mClientUid = IPCThreadState::self()->getCallingUid();
+    if (uid == -1 || (callingPid != myPid)) {
+        mClientIdentity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
+            IPCThreadState::self()->getCallingUid()));
     } else {
-        mClientUid = uid;
+        mClientIdentity.uid = identity.uid;
     }
-    if (pid == -1 || (callingPid != myPid)) {
-        mClientPid = callingPid;
+    if (pid == (pid_t)-1 || (callingPid != myPid)) {
+        mClientIdentity.pid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(callingPid));
     } else {
-        mClientPid = pid;
+        mClientIdentity.pid = identity.pid;
     }
     mAuxEffectId = 0;
     mOrigFlags = mFlags = flags;
@@ -644,7 +635,7 @@
     mReleased = 0;
     mStartNs = 0;
     mStartFromZeroUs = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
+    AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
     mSequence = 1;
     mObservedSequence = mSequence;
     mInUnderrun = false;
@@ -690,10 +681,13 @@
         float maxRequiredSpeed,
         audio_port_handle_t selectedDeviceId)
 {
+    Identity identity;
+    identity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
+    identity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
     return set(streamType, sampleRate, format,
             static_cast<audio_channel_mask_t>(channelMask),
             frameCount, flags, cbf, user, notificationFrames, sharedBuffer,
-            threadCanCallJava, sessionId, transferType, offloadInfo, uid, pid,
+            threadCanCallJava, sessionId, transferType, offloadInfo, identity,
             pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
@@ -1289,6 +1283,46 @@
     return finalBufferSize;
 }
 
+ssize_t AudioTrack::getStartThresholdInFrames() const
+{
+    AutoMutex lock(mLock);
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    return (ssize_t) mProxy->getStartThresholdInFrames();
+}
+
+ssize_t AudioTrack::setStartThresholdInFrames(size_t startThresholdInFrames)
+{
+    if (startThresholdInFrames > INT32_MAX || startThresholdInFrames == 0) {
+        // contractually we could simply return the current threshold in frames
+        // to indicate the request was ignored, but we return an error here.
+        return BAD_VALUE;
+    }
+    AutoMutex lock(mLock);
+    // We do not permit calling setStartThresholdInFrames() between the AudioTrack
+    // default ctor AudioTrack() and set(...) but rather fail such an attempt.
+    // (To do so would require a cached mOrigStartThresholdInFrames and we may
+    // not have proper validation for the actual set value).
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    const uint32_t original = mProxy->getStartThresholdInFrames();
+    const uint32_t final = mProxy->setStartThresholdInFrames(startThresholdInFrames);
+    if (original != final) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD)
+                .set(AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES, (int32_t)final)
+                .record();
+        if (original > final) {
+            // restart track if it was disabled by audioflinger due to previous underrun
+            // and we reduced the number of frames for the threshold.
+            restartIfDisabled();
+        }
+    }
+    return final;
+}
+
 status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
 {
     if (mSharedBuffer == 0 || isOffloadedOrDirect()) {
@@ -1655,8 +1689,7 @@
     input.config.channel_mask = mChannelMask;
     input.config.format = mFormat;
     input.config.offload_info = mOffloadInfoCopy;
-    input.clientInfo.clientUid = mClientUid;
-    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.identity = mClientIdentity;
     input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         // It is currently meaningless to request SCHED_FIFO for a Java thread.  Even if the
@@ -1680,7 +1713,6 @@
     input.selectedDeviceId = mSelectedDeviceId;
     input.sessionId = mSessionId;
     input.audioTrackCallback = mAudioTrackCallback;
-    input.opPackageName = mOpPackageName;
 
     media::CreateTrackResponse response;
     status = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()), response);
@@ -1857,6 +1889,8 @@
         .set(AMEDIAMETRICS_PROP_FLAGS, toString(mFlags).c_str())
         .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
+        .set(AMEDIAMETRICS_PROP_LOGSESSIONID, mLogSessionId)
+        .set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerIId)
         .set(AMEDIAMETRICS_PROP_TRACKID, mPortId) // dup from key
         .set(AMEDIAMETRICS_PROP_CONTENTTYPE, toString(mAttributes.content_type).c_str())
         .set(AMEDIAMETRICS_PROP_USAGE, toString(mAttributes.usage).c_str())
@@ -2608,6 +2642,10 @@
         staticPosition = mStaticProxy->getPosition().unsignedValue();
     }
 
+    // save the old startThreshold and framecount
+    const uint32_t originalStartThresholdInFrames = mProxy->getStartThresholdInFrames();
+    const uint32_t originalFrameCount = mProxy->frameCount();
+
     // See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
     // causes a lot of churn on the service side, and it can reject starting
     // playback of a previously created track. May also apply to other cases.
@@ -2668,6 +2706,18 @@
             return status;
         });
 
+        // restore the original start threshold if different than frameCount.
+        if (originalStartThresholdInFrames != originalFrameCount) {
+            // Note: mProxy->setStartThresholdInFrames() call is in the Proxy
+            // and does not trigger a restart.
+            // (Also CBLK_DISABLED is not set, buffers are empty after track recreation).
+            // Any start would be triggered on the mState == ACTIVE check below.
+            const uint32_t currentThreshold =
+                    mProxy->setStartThresholdInFrames(originalStartThresholdInFrames);
+            ALOGD_IF(originalStartThresholdInFrames != currentThreshold,
+                    "%s(%d) startThresholdInFrames changing from %u to %u",
+                    __func__, mPortId, originalStartThresholdInFrames, currentThreshold);
+        }
         if (mState == STATE_ACTIVE) {
             mAudioTrack->start(&result);
         }
@@ -3263,6 +3313,31 @@
     return mProxy->getUnderrunFrames();
 }
 
+void AudioTrack::setLogSessionId(const char *logSessionId)
+{
+     AutoMutex lock(mLock);
+    if (logSessionId == nullptr) logSessionId = "";  // an empty string is an unset session id.
+    if (mLogSessionId == logSessionId) return;
+
+     mLogSessionId = logSessionId;
+     mediametrics::LogItem(mMetricsId)
+         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID)
+         .set(AMEDIAMETRICS_PROP_LOGSESSIONID, logSessionId)
+         .record();
+}
+
+void AudioTrack::setPlayerIId(int playerIId)
+{
+    AutoMutex lock(mLock);
+    if (mPlayerIId == playerIId) return;
+
+    mPlayerIId = playerIId;
+    mediametrics::LogItem(mMetricsId)
+        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID)
+        .set(AMEDIAMETRICS_PROP_PLAYERIID, playerIId)
+        .record();
+}
+
 status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
 {
 
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index e2c9698..35719be 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AudioTrackShared"
 //#define LOG_NDEBUG 0
 
+#include <atomic>
 #include <android-base/macros.h>
 #include <private/media/AudioTrackShared.h>
 #include <utils/Log.h>
@@ -33,6 +34,21 @@
     return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
 }
 
+// compile-time safe atomics. TODO: update all methods to use it
+template <typename T>
+T android_atomic_load(const volatile T* addr) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    return atomic_load((std::atomic<T>*)addr);          // memory_order_seq_cst
+}
+
+template <typename T>
+void android_atomic_store(const volatile T* addr, T value) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    atomic_store((std::atomic<T>*)addr, value);         // memory_order_seq_cst
+}
+
 // incrementSequence is used to determine the next sequence value
 // for the loop and position sequence counters.  It should return
 // a value between "other" + 1 and "other" + INT32_MAX, the choice of
@@ -51,6 +67,7 @@
     : mServer(0), mFutex(0), mMinimum(0)
     , mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0)
     , mBufferSizeInFrames(0)
+    , mStartThresholdInFrames(0) // filled in by the server.
     , mFlags(0)
 {
     memset(&u, 0, sizeof(u));
@@ -66,6 +83,26 @@
 {
 }
 
+uint32_t Proxy::getStartThresholdInFrames() const
+{
+    const uint32_t startThresholdInFrames =
+           android_atomic_load(&mCblk->mStartThresholdInFrames);
+    if (startThresholdInFrames == 0 || startThresholdInFrames > mFrameCount) {
+        ALOGD("%s: startThresholdInFrames %u not between 1 and frameCount %zu, "
+                "setting to frameCount",
+                __func__, startThresholdInFrames, mFrameCount);
+        return mFrameCount;
+    }
+    return startThresholdInFrames;
+}
+
+uint32_t Proxy::setStartThresholdInFrames(uint32_t startThresholdInFrames)
+{
+    const uint32_t actual = std::min((size_t)startThresholdInFrames, frameCount());
+    android_atomic_store(&mCblk->mStartThresholdInFrames, actual);
+    return actual;
+}
+
 // ---------------------------------------------------------------------------
 
 ClientProxy::ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
@@ -663,6 +700,7 @@
     , mTimestampMutator(&cblk->mExtendedTimestampQueue)
 {
     cblk->mBufferSizeInFrames = frameCount;
+    cblk->mStartThresholdInFrames = frameCount;
 }
 
 __attribute__((no_sanitize("integer")))
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 20124df..0feafc5 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -61,7 +61,6 @@
     aidl.notificationsPerBuffer = VALUE_OR_RETURN(convertIntegral<int32_t>(notificationsPerBuffer));
     aidl.speed = speed;
     aidl.audioTrackCallback = audioTrackCallback;
-    aidl.opPackageName = opPackageName;
     aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
     aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
     aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
@@ -82,7 +81,6 @@
             convertIntegral<uint32_t>(aidl.notificationsPerBuffer));
     legacy.speed = aidl.speed;
     legacy.audioTrackCallback = aidl.audioTrackCallback;
-    legacy.opPackageName = aidl.opPackageName;
     legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.flags));
     legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
     legacy.notificationFrameCount = VALUE_OR_RETURN(
@@ -139,7 +137,6 @@
     aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
     aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(config));
     aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
-    aidl.opPackageName = VALUE_OR_RETURN(legacy2aidl_String16_string(opPackageName));
     aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(riid));
     aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
     aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
@@ -157,7 +154,6 @@
     legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
     legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
     legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
-    legacy.opPackageName = VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.opPackageName));
     legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
     legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_input_flags_t_mask(aidl.flags));
     legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
@@ -737,6 +733,11 @@
     return statusTFromBinderStatus(mDelegate->setAudioHalPids(pidsAidl));
 }
 
+status_t AudioFlingerClientAdapter::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    return statusTFromBinderStatus(mDelegate->setVibratorInfos(vibratorInfos));
+}
+
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
@@ -1178,4 +1179,9 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    return Status::fromStatusT(mDelegate->setVibratorInfos(vibratorInfos));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index c9f3ab9..451c4b1 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -27,6 +27,7 @@
 
 namespace android {
 
+using media::permission::Identity;
 
 // Descriptors for all available tones (See ToneGenerator::ToneDescriptor class declaration for details)
 const ToneGenerator::ToneDescriptor ToneGenerator::sToneDescriptors[] = {
@@ -1259,7 +1260,10 @@
 ////////////////////////////////////////////////////////////////////////////////
 bool ToneGenerator::initAudioTrack() {
     // Open audio track in mono, PCM 16bit, default sampling rate.
-    mpAudioTrack = new AudioTrack(mOpPackageName);
+    // TODO b/182392769: use identity util
+    Identity identity = Identity();
+    identity.packageName = mOpPackageName;
+    mpAudioTrack = new AudioTrack(identity);
     ALOGV("AudioTrack(%p) created", mpAudioTrack.get());
 
     audio_attributes_t attr;
@@ -1285,8 +1289,7 @@
             AUDIO_SESSION_ALLOCATE,
             AudioTrack::TRANSFER_CALLBACK,
             nullptr,
-            AUDIO_UID_INVALID,
-            -1,
+            identity,
             &attr);
     // Set caller name so it can be logged in destructor.
     // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_TONEGENERATOR
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 536e9fb..188f321 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -40,6 +40,7 @@
     if (mAudioTrack != 0) {
         mSelfAudioDeviceCallback = new SelfAudioDeviceCallback(*this);
         mAudioTrack->addAudioDeviceCallback(mSelfAudioDeviceCallback);
+        mAudioTrack->setPlayerIId(mPIId); // set in PlayerBase::init().
     }
 }
 
diff --git a/media/libaudioclient/aidl/android/media/AudioClient.aidl b/media/libaudioclient/aidl/android/media/AudioClient.aidl
index 7bff0d6..aa4d8f5 100644
--- a/media/libaudioclient/aidl/android/media/AudioClient.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioClient.aidl
@@ -16,15 +16,13 @@
 
 package android.media;
 
+import android.media.permission.Identity;
+
 /**
  * {@hide}
  */
 parcelable AudioClient {
-    /** Interpreted as uid_t. */
-    int clientUid;
-    /** Interpreted as pid_t. */
-    int clientPid;
     /** Interpreted as pid_t. */
     int clientTid;
-    @utf8InCpp String packageName;
+    Identity identity;
 }
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
similarity index 63%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
index 65756e8..b08a604 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,16 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+package android.media;
+
+/**
+ * Audio encapsulation type is used to describe if the audio data should be sent with a particular
+ * encapsulation type or not.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioEncapsulationType {
+    NONE     = 0,
+    IEC61937 = 1,
+}
\ No newline at end of file
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPort.aidl
index 123aeb0..bf0e5b7 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPort.aidl
@@ -22,6 +22,7 @@
 import android.media.AudioPortRole;
 import android.media.AudioPortType;
 import android.media.AudioProfile;
+import android.media.ExtraAudioDescriptor;
 
 /**
  * {@hide}
@@ -36,6 +37,11 @@
     @utf8InCpp String name;
     /** AudioProfiles supported by this port (format, Rates, Channels). */
     AudioProfile[] profiles;
+    /**
+     * ExtraAudioDescriptors supported by this port. The format is not unrecognized to the
+     * platform. The audio capability is described by a hardware descriptor.
+     */
+    ExtraAudioDescriptor[] extraAudioDescriptors;
     /** Gain controllers. */
     AudioGain[] gains;
     /** Current audio port configuration. */
diff --git a/media/libaudioclient/aidl/android/media/AudioProfile.aidl b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
index e5e8812..afb288f 100644
--- a/media/libaudioclient/aidl/android/media/AudioProfile.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
@@ -16,6 +16,7 @@
 
 package android.media;
 
+import android.media.AudioEncapsulationType;
 import android.media.audio.common.AudioFormat;
 
 /**
@@ -31,4 +32,5 @@
     boolean isDynamicFormat;
     boolean isDynamicChannels;
     boolean isDynamicRate;
+    AudioEncapsulationType encapsulationType;
 }
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/libaudioclient/aidl/android/media/AudioStandard.aidl
similarity index 69%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/libaudioclient/aidl/android/media/AudioStandard.aidl
index 65756e8..e131d0d 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/libaudioclient/aidl/android/media/AudioStandard.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,5 +13,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package android.media;
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+/**
+ * The audio standard that describe audio playback/capture capabilites.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioStandard {
+    NONE = 0,
+    EDID = 1,
+}
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/libaudioclient/aidl/android/media/AudioVibratorInfo.aidl
similarity index 66%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/libaudioclient/aidl/android/media/AudioVibratorInfo.aidl
index 65756e8..f88fc3c 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/libaudioclient/aidl/android/media/AudioVibratorInfo.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,14 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+package android.media;
+
+/**
+ * {@hide}
+ * A class for vibrator information. The information will be used in HapticGenerator effect.
+ */
+parcelable AudioVibratorInfo {
+    int id;
+    float resonantFrequency;
+    float qFactor;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
index 8368854..5737fcd 100644
--- a/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
@@ -19,6 +19,7 @@
 import android.media.AudioDevice;
 import android.media.EffectDescriptor;
 import android.media.IEffectClient;
+import android.media.permission.Identity;
 
 /**
  * Input arguments of the createEffect() method.
@@ -34,8 +35,6 @@
     /** Interpreted as audio_session_t. */
     int sessionId;
     AudioDevice device;
-    @utf8InCpp String opPackageName;
-    /** Interpreted as pid_t. */
-    int pid;
+    Identity identity;
     boolean probe;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
index 6da743a..62007da 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -19,6 +19,7 @@
 import android.media.AudioAttributesInternal;
 import android.media.AudioClient;
 import android.media.AudioConfigBase;
+import android.media.permission.Identity;
 
 /**
  * CreateRecordRequest contains all input arguments sent by AudioRecord to AudioFlinger
@@ -31,7 +32,6 @@
     AudioAttributesInternal attr;
     AudioConfigBase config;
     AudioClient clientInfo;
-    @utf8InCpp String opPackageName;
     /** Interpreted as audio_unique_id_t. */
     int riid;
     /** Bitmask, indexed by AudioInputFlags. */
diff --git a/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl b/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl
new file mode 100644
index 0000000..ec5b67a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioEncapsulationType;
+import android.media.AudioStandard;
+
+/**
+ * The audio descriptor that descibes playback/capture capabilities according to
+ * a particular standard.
+ *
+ * {@hide}
+ */
+parcelable ExtraAudioDescriptor {
+    AudioStandard standard;
+    byte[] audioDescriptor;
+    AudioEncapsulationType encapsulationType;
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index e63f391..abbced5 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -23,6 +23,7 @@
 import android.media.AudioStreamType;
 import android.media.AudioUniqueIdUse;
 import android.media.AudioUuid;
+import android.media.AudioVibratorInfo;
 import android.media.CreateEffectRequest;
 import android.media.CreateEffectResponse;
 import android.media.CreateRecordRequest;
@@ -202,4 +203,8 @@
     MicrophoneInfoData[] getMicrophones();
 
     void setAudioHalPids(in int[] /* pid_t[] */ pids);
+
+    // Set vibrators' information.
+    // The value will be used to initialize HapticGenerator.
+    void setVibratorInfos(in AudioVibratorInfo[] vibratorInfos);
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 89fad5a..f8924f3 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -48,6 +48,7 @@
 import android.media.IAudioPolicyServiceClient;
 import android.media.ICaptureStateListener;
 import android.media.Int;
+import android.media.permission.Identity;
 import android.media.SoundTriggerSession;
 
 /**
@@ -80,8 +81,7 @@
 
     GetOutputForAttrResponse getOutputForAttr(in AudioAttributesInternal attr,
                                               int /* audio_session_t */ session,
-                                              int /* pid_t */ pid,
-                                              int /* uid_t */ uid,
+                                              in Identity identity,
                                               in AudioConfig config,
                                               int /* Bitmask, indexed by AudioOutputFlags */ flags,
                                               int /* audio_port_handle_t */ selectedDeviceId);
@@ -96,9 +96,7 @@
                                             int /* audio_io_handle_t */ input,
                                             int /* audio_unique_id_t */ riid,
                                             int /* audio_session_t */ session,
-                                            int /* pid_t */ pid,
-                                            int /* uid_t */ uid,
-                                            @utf8InCpp String opPackageName,
+                                            in Identity identity,
                                             in AudioConfigBase config,
                                             int /* Bitmask, indexed by AudioInputFlags */ flags,
                                             int /* audio_port_handle_t */ selectedDeviceId);
@@ -279,11 +277,21 @@
      * Passing '0' on input and inspecting the value on output is a common way of determining the
      * number of elements without actually retrieving them.
      */
-    void getSurroundFormats(boolean reported,
-                            inout Int count,
+    void getSurroundFormats(inout Int count,
                             out AudioFormat[] formats,
                             out boolean[] formatsEnabled);
 
+    /**
+     * Populates the surround formats reported by the HDMI devices in formats.
+     *
+     * On input, count represents the maximum length of the returned array.
+     * On output, count is the total number of elements, which may be larger than the array size.
+     * Passing '0' on input and inspecting the value on output is a common way of determining the
+     * number of elements without actually retrieving them.
+     */
+    void getReportedSurroundFormats(inout Int count,
+                                    out AudioFormat[] formats);
+
     AudioFormat[] getHwOffloadEncodingFormatsSupportedForA2DP();
 
     void setSurroundFormatEnabled(AudioFormat audioFormat, boolean enabled);
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index a8eb385..21e25b9 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_fuzz {
     name: "audioflinger_fuzzer",
     srcs: [
@@ -56,6 +65,7 @@
         "libutils",
         "libxml2",
         "mediametricsservice-aidl-cpp",
+        "media_permission-aidl-cpp",
     ],
     header_libs: [
         "libaudiofoundation_headers",
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index db2b0b8..1b75917 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -23,8 +23,10 @@
  */
 
 #include <android_audio_policy_configuration_V7_0-enums.h>
+#include <android/media/permission/Identity.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <media/AidlConversion.h>
 #include <media/AudioEffect.h>
 #include <media/AudioRecord.h>
 #include <media/AudioSystem.h>
@@ -35,6 +37,10 @@
 #define MAX_STRING_LENGTH 256
 #define MAX_ARRAY_LENGTH 256
 
+constexpr int32_t kMinSampleRateHz = 4000;
+constexpr int32_t kMaxSampleRateHz = 192000;
+constexpr int32_t kSampleRateUnspecified = 0;
+
 using namespace std;
 using namespace android;
 
@@ -42,6 +48,8 @@
 using namespace ::android::audio::policy::configuration::V7_0;
 }
 
+using media::permission::Identity;
+
 constexpr audio_unique_id_use_t kUniqueIds[] = {
     AUDIO_UNIQUE_ID_USE_UNSPECIFIED, AUDIO_UNIQUE_ID_USE_SESSION, AUDIO_UNIQUE_ID_USE_MODULE,
     AUDIO_UNIQUE_ID_USE_EFFECT,      AUDIO_UNIQUE_ID_USE_PATCH,   AUDIO_UNIQUE_ID_USE_OUTPUT,
@@ -130,29 +138,20 @@
         xsdc_enum_range<xsd::AudioInOutFlag>{}, audio_output_flag_from_string, "_OUTPUT_");
 
 template <typename T, size_t size>
-T getValueFromArray(FuzzedDataProvider *fdp, const T (&arr)[size]) {
-    return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
-}
-
-template <typename T, size_t size>
 T getValue(FuzzedDataProvider *fdp, const T (&arr)[size]) {
-    if (fdp->ConsumeBool()) {
-        return static_cast<T>(fdp->ConsumeIntegral<int32_t>());
-    }
-    return getValueFromArray(fdp, arr);
-}
-
-template <typename T>
-T getValueFromVector(FuzzedDataProvider *fdp, std::vector<T> vec) {
-    return vec[fdp->ConsumeIntegralInRange<int32_t>(0, vec.size() - 1)];
+    return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
 }
 
 template <typename T>
 T getValue(FuzzedDataProvider *fdp, std::vector<T> vec) {
+    return vec[fdp->ConsumeIntegralInRange<int32_t>(0, vec.size() - 1)];
+}
+
+int32_t getSampleRate(FuzzedDataProvider *fdp) {
     if (fdp->ConsumeBool()) {
-        return static_cast<T>(fdp->ConsumeIntegral<int32_t>());
+        return fdp->ConsumeIntegralInRange<int32_t>(kMinSampleRateHz, kMaxSampleRateHz);
     }
-    return getValueFromVector(fdp, vec);
+    return kSampleRateUnspecified;
 }
 
 class DeathNotifier : public IBinder::DeathRecipient {
@@ -189,7 +188,7 @@
 }
 
 void AudioFlingerFuzzer::invokeAudioTrack() {
-    uint32_t sampleRate = mFdp.ConsumeIntegral<uint32_t>();
+    uint32_t sampleRate = getSampleRate(&mFdp);
     audio_format_t format = getValue(&mFdp, kFormats);
     audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
     size_t frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
@@ -226,11 +225,15 @@
     attributes.usage = usage;
     sp<AudioTrack> track = new AudioTrack();
 
+    // TODO b/182392769: use identity util
+    Identity i;
+    i.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    i.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
     track->set(AUDIO_STREAM_DEFAULT, sampleRate, format, channelMask, frameCount, flags, nullptr,
                nullptr, notificationFrames, sharedBuffer, false, sessionId,
                ((fast && sharedBuffer == 0) || offload) ? AudioTrack::TRANSFER_CALLBACK
                                                         : AudioTrack::TRANSFER_DEFAULT,
-               offload ? &offloadInfo : nullptr, getuid(), getpid(), &attributes, false, 1.0f,
+               offload ? &offloadInfo : nullptr, i, &attributes, false, 1.0f,
                AUDIO_PORT_HANDLE_NONE);
 
     status_t status = track->initCheck();
@@ -259,7 +262,7 @@
 
     float auxEffectSendLevel;
     track->getAuxEffectSendLevel(&auxEffectSendLevel);
-    track->setSampleRate(mFdp.ConsumeIntegral<uint32_t>());
+    track->setSampleRate(getSampleRate(&mFdp));
     track->getSampleRate();
     track->getOriginalSampleRate();
 
@@ -292,7 +295,7 @@
 
 void AudioFlingerFuzzer::invokeAudioRecord() {
     int32_t notificationFrames = mFdp.ConsumeIntegral<int32_t>();
-    uint32_t sampleRate = mFdp.ConsumeIntegral<uint32_t>();
+    uint32_t sampleRate = getSampleRate(&mFdp);
     size_t frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
     audio_format_t format = getValue(&mFdp, kFormats);
     audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
@@ -305,7 +308,10 @@
 
     attributes.source = inputSource;
 
-    sp<AudioRecord> record = new AudioRecord(String16(mFdp.ConsumeRandomLengthString().c_str()));
+    // TODO b/182392769: use identity util
+    Identity i;
+    i.packageName = std::string(mFdp.ConsumeRandomLengthString().c_str());
+    sp<AudioRecord> record = new AudioRecord(i);
     record->set(AUDIO_SOURCE_DEFAULT, sampleRate, format, channelMask, frameCount, nullptr, nullptr,
                 notificationFrames, false, sessionId,
                 fast ? AudioRecord::TRANSFER_CALLBACK : AudioRecord::TRANSFER_DEFAULT, flags,
@@ -396,7 +402,7 @@
     const int32_t priority = mFdp.ConsumeIntegral<int32_t>();
     audio_session_t sessionId = static_cast<audio_session_t>(mFdp.ConsumeIntegral<int32_t>());
     const audio_io_handle_t io = mFdp.ConsumeIntegral<int32_t>();
-    String16 opPackageName = static_cast<String16>(mFdp.ConsumeRandomLengthString().c_str());
+    std::string opPackageName = static_cast<std::string>(mFdp.ConsumeRandomLengthString().c_str());
     AudioDeviceTypeAddr device;
 
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
@@ -412,8 +418,9 @@
     request.output = io;
     request.sessionId = sessionId;
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(device));
-    request.opPackageName = VALUE_OR_RETURN_STATUS(legacy2aidl_String16_string(opPackageName));
-    request.pid = getpid();
+    // TODO b/182392769: use identity util
+    request.identity.packageName = opPackageName;
+    request.identity.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(getpid()));
     request.probe = false;
 
     media::CreateEffectResponse response{};
@@ -518,7 +525,7 @@
     AudioSystem::getFrameCountHAL(mFdp.ConsumeIntegral<int32_t>(), &frameCount);
 
     size_t buffSize;
-    uint32_t sampleRate = mFdp.ConsumeIntegral<uint32_t>();
+    uint32_t sampleRate = getSampleRate(&mFdp);
     audio_format_t format = getValue(&mFdp, kFormats);
     audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
     AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &buffSize);
@@ -572,12 +579,12 @@
     config.offload_info.format = getValue(&mFdp, kFormats);
     config.offload_info.has_video = mFdp.ConsumeBool();
     config.offload_info.is_streaming = mFdp.ConsumeBool();
-    config.offload_info.sample_rate = (mFdp.ConsumeIntegral<uint32_t>());
+    config.offload_info.sample_rate = getSampleRate(&mFdp);
     config.offload_info.sync_id = mFdp.ConsumeIntegral<uint32_t>();
     config.offload_info.stream_type = getValue(&mFdp, kStreamtypes);
     config.offload_info.usage = getValue(&mFdp, kUsages);
 
-    config.sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+    config.sample_rate = getSampleRate(&mFdp);
 
     audio_devices_t device = getValue(&mFdp, kDevices);
     audio_source_t source = getValue(&mFdp, kInputSources);
@@ -628,13 +635,13 @@
     config.offload_info.format = getValue(&mFdp, kFormats);
     config.offload_info.has_video = mFdp.ConsumeBool();
     config.offload_info.is_streaming = mFdp.ConsumeBool();
-    config.offload_info.sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.sample_rate = getSampleRate(&mFdp);
     config.offload_info.stream_type = getValue(&mFdp, kStreamtypes);
     config.offload_info.sync_id = mFdp.ConsumeIntegral<uint32_t>();
     config.offload_info.usage = getValue(&mFdp, kUsages);
 
     config.format = getValue(&mFdp, kFormats);
-    config.sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+    config.sample_rate = getSampleRate(&mFdp);
 
     sp<DeviceDescriptorBase> device = new DeviceDescriptorBase(getValue(&mFdp, kDevices));
     audio_output_flags_t flags = getValue(&mFdp, kOutputFlags);
@@ -683,7 +690,7 @@
         patch.sources[i].gain.ramp_duration_ms = mFdp.ConsumeIntegral<uint32_t>();
         patch.sources[i].id = static_cast<audio_format_t>(mFdp.ConsumeIntegral<int32_t>());
         patch.sources[i].role = getValue(&mFdp, kPortRoles);
-        patch.sources[i].sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+        patch.sources[i].sample_rate = getSampleRate(&mFdp);
         patch.sources[i].type = getValue(&mFdp, kPortTypes);
 
         patch.sinks[i].config_mask = mFdp.ConsumeIntegral<uint32_t>();
@@ -695,7 +702,7 @@
         patch.sinks[i].gain.ramp_duration_ms = mFdp.ConsumeIntegral<uint32_t>();
         patch.sinks[i].id = static_cast<audio_format_t>(mFdp.ConsumeIntegral<int32_t>());
         patch.sinks[i].role = getValue(&mFdp, kPortRoles);
-        patch.sinks[i].sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+        patch.sinks[i].sample_rate = getSampleRate(&mFdp);
         patch.sinks[i].type = getValue(&mFdp, kPortTypes);
     }
 
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index bde20cd..1dd9d60 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -28,6 +28,7 @@
 #include <android/media/AudioDualMonoMode.h>
 #include <android/media/AudioEncapsulationMode.h>
 #include <android/media/AudioEncapsulationMetadataType.h>
+#include <android/media/AudioEncapsulationType.h>
 #include <android/media/AudioFlag.h>
 #include <android/media/AudioGain.h>
 #include <android/media/AudioGainMode.h>
@@ -48,6 +49,7 @@
 #include <android/media/AudioTimestampInternal.h>
 #include <android/media/AudioUniqueIdUse.h>
 #include <android/media/EffectDescriptor.h>
+#include <android/media/ExtraAudioDescriptor.h>
 
 #include <android/media/SharedFileRegion.h>
 #include <binder/IMemory.h>
@@ -108,6 +110,11 @@
 ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
 ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
 
+ConversionResult<std::optional<String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy);
+
 ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
         media::AudioIoConfigEvent aidl);
 ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
@@ -381,4 +388,25 @@
 ConversionResult<media::AudioPlaybackRate>
 legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
 
+ConversionResult<audio_standard_t>
+aidl2legacy_AudioStandard_audio_standard_t(media::AudioStandard aidl);
+ConversionResult<media::AudioStandard>
+legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy);
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const media::ExtraAudioDescriptor& aidl);
+ConversionResult<media::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy);
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const media::AudioEncapsulationType& aidl);
+ConversionResult<media::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t & legacy);
+
+
+
 }  // namespace android
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
index bf2d800..c1a2be3 100644
--- a/media/libaudioclient/include/media/AidlConversionUtil.h
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -48,6 +48,15 @@
        std::move(_tmp.value());             \
      })
 
+#define VALUE_OR_FATAL(result)                                        \
+    ({                                                                \
+       auto _tmp = (result);                                          \
+       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),                                \
+                           "Function: %s Line: %d Failed result (%d)",\
+                           __FUNCTION__, __LINE__, _tmp.error());     \
+       std::move(_tmp.value());                                       \
+     })
+
 /**
  * A generic template to safely cast between integral types, respecting limits of the destination
  * type.
diff --git a/media/libaudioclient/include/media/AudioClient.h b/media/libaudioclient/include/media/AudioClient.h
index 0b89d15..295fd4f 100644
--- a/media/libaudioclient/include/media/AudioClient.h
+++ b/media/libaudioclient/include/media/AudioClient.h
@@ -19,19 +19,17 @@
 #define ANDROID_AUDIO_CLIENT_H
 
 #include <sys/types.h>
-#include <utils/String16.h>
+#include <android/media/permission/Identity.h>
 
 namespace android {
 
 class AudioClient {
  public:
     AudioClient() :
-        clientUid(-1), clientPid(-1), clientTid(-1), packageName("") {}
+        clientTid(-1) {}
 
-    uid_t clientUid;
-    pid_t clientPid;
     pid_t clientTid;
-    String16 packageName;
+    android::media::permission::Identity identity;
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index 0d18fb1..974ce62 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -23,6 +23,7 @@
 #include <media/IAudioFlinger.h>
 #include <media/AudioSystem.h>
 #include <system/audio_effect.h>
+#include <android/media/permission/Identity.h>
 
 #include <utils/RefBase.h>
 #include <utils/Errors.h>
@@ -31,7 +32,6 @@
 #include "android/media/IEffect.h"
 #include "android/media/BnEffectClient.h"
 
-
 namespace android {
 
 // ----------------------------------------------------------------------------
@@ -337,9 +337,9 @@
      *
      * Parameters:
      *
-     * opPackageName:      The package name used for app op checks.
+     * client:      Identity for app-op checks
      */
-    explicit AudioEffect(const String16& opPackageName);
+    explicit AudioEffect(const media::permission::Identity& client);
 
     /* Terminates the AudioEffect and unregisters it from AudioFlinger.
      * The effect engine is also destroyed if this AudioEffect was the last controlling
@@ -531,7 +531,7 @@
      static const uint32_t kMaxPreProcessing = 10;
 
 protected:
-     const String16          mOpPackageName;     // The package name used for app op checks.
+     media::permission::Identity mClientIdentity; // Identity used for app op checks.
      bool                    mEnabled = false;   // enable state
      audio_session_t         mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
      int32_t                 mPriority = 0;      // priority for effect control
@@ -606,8 +606,6 @@
     sp<EffectClient>        mIEffectClient;     // IEffectClient implementation
     sp<IMemory>             mCblkMemory;        // shared memory for deferred parameter setting
     effect_param_cblk_t*    mCblk = nullptr;    // control block for deferred parameter setting
-    pid_t                   mClientPid = (pid_t)-1;
-    uid_t                   mClientUid = (uid_t)-1;
 };
 
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 2f66658..82a29d4 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -32,6 +32,7 @@
 #include <utils/threads.h>
 
 #include "android/media/IAudioRecord.h"
+#include <android/media/permission/Identity.h>
 
 namespace android {
 
@@ -148,9 +149,9 @@
      *
      * Parameters:
      *
-     * opPackageName:      The package name used for app ops.
+     * clientIdentity:          The identity of the owner of the record
      */
-                        AudioRecord(const String16& opPackageName);
+                        AudioRecord(const media::permission::Identity& clientIdentity);
 
     /* Creates an AudioRecord object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
@@ -163,7 +164,7 @@
      * format:             Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed
      *                     16 bits per sample).
      * channelMask:        Channel mask, such that audio_is_input_channel(channelMask) is true.
-     * opPackageName:      The package name used for app ops.
+     * client:             The identity of the owner of the record
      * frameCount:         Minimum size of track PCM buffer in frames. This defines the
      *                     application's contribution to the
      *                     latency of the track.  The actual size selected by the AudioRecord could
@@ -186,7 +187,7 @@
                                     uint32_t sampleRate,
                                     audio_format_t format,
                                     audio_channel_mask_t channelMask,
-                                    const String16& opPackageName,
+                                    const media::permission::Identity& clientIdentity,
                                     size_t frameCount = 0,
                                     callback_t cbf = NULL,
                                     void* user = NULL,
@@ -194,8 +195,6 @@
                                     audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                                    uid_t uid = AUDIO_UID_INVALID,
-                                    pid_t pid = -1,
                                     const audio_attributes_t* pAttributes = NULL,
                                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                                     audio_microphone_direction_t
@@ -578,6 +577,12 @@
       */
             audio_port_handle_t getPortId() const { return mPortId; };
 
+    /* Sets the LogSessionId field which is used for metrics association of
+     * this object with other objects. A nullptr or empty string clears
+     * the logSessionId.
+     */
+            void setLogSessionId(const char *logSessionId);
+
      /*
       * Dumps the state of an audio record.
       */
@@ -632,7 +637,7 @@
 
             // caller must hold lock on mLock for all _l methods
 
-            status_t createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName);
+            status_t createRecord_l(const Modulo<uint32_t> &epoch);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreRecord_l(const char *from);
@@ -673,7 +678,7 @@
 
     status_t                mStatus;
 
-    String16                mOpPackageName;         // The package name used for app ops.
+    media::permission::Identity mClientIdentity;    // The identity of the owner of this record
 
     size_t                  mFrameCount;            // corresponds to current IAudioRecord, value is
                                                     // reported back by AudioFlinger to the client
@@ -701,6 +706,14 @@
 
     audio_session_t         mSessionId;
     audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+
+    /**
+     * mLogSessionId is a string identifying this AudioRecord for the metrics service.
+     * It may be unique or shared with other objects.  An empty string means the
+     * logSessionId is not set.
+     */
+    std::string             mLogSessionId{};
+
     transfer_type           mTransfer;
 
     // Next 5 fields may be changed if IAudioRecord is re-created, but always != 0
@@ -740,8 +753,6 @@
 
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioRecord attempt
-    uid_t                   mClientUid;
-    pid_t                   mClientPid;
     audio_attributes_t      mAttributes;
 
     // For Device Selection API
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 694f2d0..4c99dbd 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,8 +19,10 @@
 
 #include <sys/types.h>
 
+#include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerClient.h>
 #include <android/media/BnAudioPolicyServiceClient.h>
+#include <android/media/permission/Identity.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
@@ -262,8 +264,7 @@
                                      audio_io_handle_t *output,
                                      audio_session_t session,
                                      audio_stream_type_t *stream,
-                                     pid_t pid,
-                                     uid_t uid,
+                                     const media::permission::Identity& identity,
                                      const audio_config_t *config,
                                      audio_output_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
@@ -279,9 +280,7 @@
                                     audio_io_handle_t *input,
                                     audio_unique_id_t riid,
                                     audio_session_t session,
-                                    pid_t pid,
-                                    uid_t uid,
-                                    const String16& opPackageName,
+                                    const media::permission::Identity& identity,
                                     const audio_config_base_t *config,
                                     audio_input_flags_t flags,
                                     audio_port_handle_t *selectedDeviceId,
@@ -417,8 +416,9 @@
     // populated. The actual number of surround formats should be returned at numSurroundFormats.
     static status_t getSurroundFormats(unsigned int *numSurroundFormats,
                                        audio_format_t *surroundFormats,
-                                       bool *surroundFormatsEnabled,
-                                       bool reported);
+                                       bool *surroundFormatsEnabled);
+    static status_t getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                               audio_format_t *surroundFormats);
     static status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
     static status_t setAssistantUid(uid_t uid);
@@ -554,6 +554,8 @@
 
     static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
+    static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
+
 private:
 
     class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index e60ed55..d167c40 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -26,6 +26,7 @@
 #include <media/Modulo.h>
 #include <media/VolumeShaper.h>
 #include <utils/threads.h>
+#include <android/media/permission/Identity.h>
 
 #include <string>
 
@@ -181,7 +182,7 @@
      */
                         AudioTrack();
 
-                        AudioTrack(const std::string& opPackageName);
+                        AudioTrack(const media::permission::Identity& identity);
 
     /* Creates an AudioTrack object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
@@ -229,10 +230,9 @@
      * transferType:       How data is transferred to AudioTrack.
      * offloadInfo:        If not NULL, provides offload parameters for
      *                     AudioSystem::getOutputForAttr().
-     * uid:                User ID of the app which initially requested this AudioTrack
-     *                     for power management tracking, or -1 for current user ID.
-     * pid:                Process ID of the app which initially requested this AudioTrack
-     *                     for power management tracking, or -1 for current process ID.
+     * identity:           The identity of the app which initiallly requested this AudioTrack.
+     *                     Includes the UID and PID for power management tracking, or -1 for
+     *                     current user/process ID, plus the package name.
      * pAttributes:        If not NULL, supersedes streamType for use case selection.
      * doNotReconnect:     If set to true, AudioTrack won't automatically recreate the IAudioTrack
                            binder to AudioFlinger.
@@ -259,13 +259,12 @@
                                     audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     const audio_offload_info_t *offloadInfo = NULL,
-                                    uid_t uid = AUDIO_UID_INVALID,
-                                    pid_t pid = -1,
+                                    const media::permission::Identity& identity =
+                                        media::permission::Identity(),
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f,
-                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
-                                    const std::string& opPackageName = "");
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Creates an audio track and registers it with AudioFlinger.
      * With this constructor, the track is configured for static buffer mode.
@@ -291,12 +290,11 @@
                                     audio_session_t sessionId   = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     const audio_offload_info_t *offloadInfo = NULL,
-                                    uid_t uid = AUDIO_UID_INVALID,
-                                    pid_t pid = -1,
+                                    const media::permission::Identity& identity =
+                                        media::permission::Identity(),
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
-                                    float maxRequiredSpeed = 1.0f,
-                                    const std::string& opPackageName = "");
+                                    float maxRequiredSpeed = 1.0f);
 
     /* Terminates the AudioTrack and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioTrack.
@@ -340,8 +338,8 @@
                             audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
                             const audio_offload_info_t *offloadInfo = NULL,
-                            uid_t uid = AUDIO_UID_INVALID,
-                            pid_t pid = -1,
+                            const media::permission::Identity& identity =
+                                media::permission::Identity(),
                             const audio_attributes_t* pAttributes = NULL,
                             bool doNotReconnect = false,
                             float maxRequiredSpeed = 1.0f,
@@ -431,6 +429,19 @@
      */
             ssize_t     setBufferSizeInFrames(size_t size);
 
+    /* Returns the start threshold on the buffer for audio streaming
+     * or a negative value if the AudioTrack is not initialized.
+     */
+            ssize_t     getStartThresholdInFrames() const;
+
+    /* Sets the start threshold in frames on the buffer for audio streaming.
+     *
+     * May be clamped internally. Returns the actual value set, or a negative
+     * value if the AudioTrack is not initialized or if the input
+     * is zero or greater than INT_MAX.
+     */
+            ssize_t     setStartThresholdInFrames(size_t startThresholdInFrames);
+
     /* Return the static buffer specified in constructor or set(), or 0 for streaming mode */
             sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
 
@@ -986,6 +997,23 @@
      */
             audio_port_handle_t getPortId() const { return mPortId; };
 
+    /* Sets the LogSessionId field which is used for metrics association of
+     * this object with other objects. A nullptr or empty string clears
+     * the logSessionId.
+     */
+            void setLogSessionId(const char *logSessionId);
+
+    /* Sets the playerIId field to associate the AudioTrack with an interface managed by
+     * AudioService.
+     *
+     * If this value is not set, then the playerIId is reported as -1
+     * (not associated with an AudioService player interface).
+     *
+     * For metrics purposes, we keep the playerIId association in the native
+     * client AudioTrack to improve the robustness under track restoration.
+     */
+            void setPlayerIId(int playerIId);
+
             void setAudioTrackCallback(const sp<media::IAudioTrackCallback>& callback) {
                 mAudioTrackCallback->setAudioTrackCallback(callback);
             }
@@ -1255,6 +1283,19 @@
     int                     mAuxEffectId;
     audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
 
+    /**
+     * mPlayerIId is the player id of the AudioTrack used by AudioManager.
+     * For an AudioTrack created by the Java interface, this is generally set once.
+     */
+    int                     mPlayerIId = -1;  // AudioManager.h PLAYER_PIID_INVALID
+
+    /**
+     * mLogSessionId is a string identifying this AudioTrack for the metrics service.
+     * It may be unique or shared with other objects.  An empty string means the
+     * logSessionId is not set.
+     */
+    std::string             mLogSessionId{};
+
     mutable Mutex           mLock;
 
     int                     mPreviousPriority;          // before start()
@@ -1281,8 +1322,6 @@
 
     sp<media::VolumeHandler>       mVolumeHandler;
 
-    const std::string      mOpPackageName;
-
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
     public:
@@ -1295,8 +1334,7 @@
 
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioTrack attempt
-    uid_t                   mClientUid;
-    pid_t                   mClientPid;
+    media::permission::Identity mClientIdentity;
 
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 9a8014d..7f7ca85 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -35,8 +35,10 @@
 #include <string>
 #include <vector>
 
+#include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerService.h>
 #include <android/media/BpAudioFlingerService.h>
+#include <android/media/permission/Identity.h>
 #include "android/media/CreateEffectRequest.h"
 #include "android/media/CreateEffectResponse.h"
 #include "android/media/CreateRecordRequest.h"
@@ -78,7 +80,6 @@
         uint32_t notificationsPerBuffer;
         float speed;
         sp<media::IAudioTrackCallback> audioTrackCallback;
-        std::string opPackageName;
 
         /* input/output */
         audio_output_flags_t flags;
@@ -127,7 +128,7 @@
         audio_attributes_t attr;
         audio_config_base_t config;
         AudioClient clientInfo;
-        String16 opPackageName;
+        media::permission::Identity identity;
         audio_unique_id_t riid;
 
         /* input/output */
@@ -331,6 +332,11 @@
     virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
 
     virtual status_t setAudioHalPids(const std::vector<pid_t>& pids) = 0;
+
+    // Set vibrators' information.
+    // The values will be used to initialize HapticGenerator.
+    virtual status_t setVibratorInfos(
+            const std::vector<media::AudioVibratorInfo>& vibratorInfos) = 0;
 };
 
 /**
@@ -422,6 +428,7 @@
     size_t frameCountHAL(audio_io_handle_t ioHandle) const override;
     status_t getMicrophones(std::vector<media::MicrophoneInfo>* microphones) override;
     status_t setAudioHalPids(const std::vector<pid_t>& pids) override;
+    status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -504,6 +511,7 @@
             GET_MASTER_BALANCE = media::BnAudioFlingerService::TRANSACTION_getMasterBalance,
             SET_EFFECT_SUSPENDED = media::BnAudioFlingerService::TRANSACTION_setEffectSuspended,
             SET_AUDIO_HAL_PIDS = media::BnAudioFlingerService::TRANSACTION_setAudioHalPids,
+            SET_VIBRATOR_INFOS = media::BnAudioFlingerService::TRANSACTION_setVibratorInfos,
         };
 
         /**
@@ -605,6 +613,7 @@
     Status frameCountHAL(int32_t ioHandle, int64_t* _aidl_return) override;
     Status getMicrophones(std::vector<media::MicrophoneInfoData>* _aidl_return) override;
     Status setAudioHalPids(const std::vector<int32_t>& pids) override;
+    Status setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
 
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index 62cc21d..23b6bfd 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -71,6 +71,10 @@
     float mPanMultiplierL, mPanMultiplierR;
     float mVolumeMultiplierL, mVolumeMultiplierR;
 
+    // player interface ID, uniquely identifies the player in the system
+    // effectively const after PlayerBase::init().
+    audio_unique_id_t mPIId;
+
 private:
             // report events to AudioService
             void servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId);
@@ -79,9 +83,6 @@
     // native interface to AudioService
     android::sp<android::IAudioManager> mAudioManager;
 
-    // player interface ID, uniquely identifies the player in the system
-    audio_unique_id_t mPIId;
-
     // Mutex for state reporting
     Mutex mPlayerStateLock;
     player_state_t mLastReportedEvent;
diff --git a/media/libaudioclient/tests/test_create_audiorecord.cpp b/media/libaudioclient/tests/test_create_audiorecord.cpp
index cf6a734..57676c1 100644
--- a/media/libaudioclient/tests/test_create_audiorecord.cpp
+++ b/media/libaudioclient/tests/test_create_audiorecord.cpp
@@ -19,6 +19,7 @@
 #include <string.h>
 #include <unistd.h>
 
+#include <android/media/permission/Identity.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryDealer.h>
 #include <binder/MemoryHeapBase.h>
@@ -32,19 +33,24 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 int testRecord(FILE *inputFile, int outputFileFd)
 {
     char line[MAX_INPUT_FILE_LINE_LENGTH];
     uint32_t testCount = 0;
     Vector<String16> args;
     int ret = 0;
+    // TODO b/182392769: use identity util
+    Identity identity;
+    identity.packageName = PACKAGE_NAME;
 
     if (inputFile == nullptr) {
         sp<AudioRecord> record = new AudioRecord(AUDIO_SOURCE_DEFAULT,
                                               0 /* sampleRate */,
                                               AUDIO_FORMAT_DEFAULT,
                                               AUDIO_CHANNEL_IN_MONO,
-                                              String16(PACKAGE_NAME));
+                                              identity);
         if (record == 0 || record->initCheck() != NO_ERROR) {
             write(outputFileFd, "Error creating AudioRecord\n",
                   sizeof("Error creating AudioRecord\n"));
@@ -90,7 +96,7 @@
         memset(&attributes, 0, sizeof(attributes));
         attributes.source = inputSource;
 
-        sp<AudioRecord> record = new AudioRecord(String16(PACKAGE_NAME));
+        sp<AudioRecord> record = new AudioRecord(identity);
 
         record->set(AUDIO_SOURCE_DEFAULT,
                    sampleRate,
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 20d8632..fafabd9 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -16,7 +16,9 @@
 #define LOG_TAG "AudioPort"
 
 #include <algorithm>
+#include <utility>
 
+#include <android/media/ExtraAudioDescriptor.h>
 #include <android-base/stringprintf.h>
 #include <media/AudioPort.h>
 #include <utils/Log.h>
@@ -46,11 +48,26 @@
                         port.audio_profiles->num_channel_masks),
                 SampleRateSet(port.audio_profiles[i].sample_rates,
                         port.audio_profiles[i].sample_rates +
-                        port.audio_profiles[i].num_sample_rates));
+                        port.audio_profiles[i].num_sample_rates),
+                port.audio_profiles[i].encapsulation_type);
         if (!mProfiles.contains(profile)) {
             addAudioProfile(profile);
         }
     }
+
+    for (size_t i = 0; i < port.num_extra_audio_descriptors; ++i) {
+        auto convertedResult = legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+                port.extra_audio_descriptors[i]);
+        if (!convertedResult.ok()) {
+            ALOGE("%s, failed to convert extra audio descriptor", __func__);
+            continue;
+        }
+        if (std::find(mExtraAudioDescriptors.begin(),
+                      mExtraAudioDescriptors.end(),
+                      convertedResult.value()) == mExtraAudioDescriptors.end()) {
+            mExtraAudioDescriptors.push_back(std::move(convertedResult.value()));
+        }
+    }
 }
 
 void AudioPort::toAudioPort(struct audio_port *port) const {
@@ -98,7 +115,7 @@
                     channelMasks.size() > AUDIO_PORT_MAX_CHANNEL_MASKS ||
                     port->num_audio_profiles >= AUDIO_PORT_MAX_AUDIO_PROFILES) {
                 ALOGE("%s: bailing out: cannot export profiles to port config", __func__);
-                return;
+                break;
             }
 
             auto& dstProfile = port->audio_profiles[port->num_audio_profiles++];
@@ -109,8 +126,25 @@
             dstProfile.num_channel_masks = channelMasks.size();
             std::copy(channelMasks.begin(), channelMasks.end(),
                     std::begin(dstProfile.channel_masks));
+            dstProfile.encapsulation_type = profile->getEncapsulationType();
         }
     }
+
+    port->num_extra_audio_descriptors = 0;
+    for (const auto& desc : mExtraAudioDescriptors) {
+        if (port->num_extra_audio_descriptors >= AUDIO_PORT_MAX_EXTRA_AUDIO_DESCRIPTORS) {
+            ALOGE("%s: bailing out: cannot export extra audio descriptor to port config", __func__);
+            return;
+        }
+
+        auto convertedResult = aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(desc);
+        if (!convertedResult.ok()) {
+            ALOGE("%s: failed to convert extra audio descriptor", __func__);
+            continue;
+        }
+        port->extra_audio_descriptors[port->num_extra_audio_descriptors++] =
+                std::move(convertedResult.value());
+    }
 }
 
 void AudioPort::dump(std::string *dst, int spaces, bool verbose) const {
@@ -121,6 +155,22 @@
         std::string profilesStr;
         mProfiles.dump(&profilesStr, spaces);
         dst->append(profilesStr);
+        if (!mExtraAudioDescriptors.empty()) {
+            dst->append(base::StringPrintf("%*s- extra audio descriptors: \n", spaces, ""));
+            const int eadSpaces = spaces + 4;
+            const int descSpaces = eadSpaces + 4;
+            for (size_t i = 0; i < mExtraAudioDescriptors.size(); i++) {
+                dst->append(
+                        base::StringPrintf("%*s extra audio descriptor %zu:\n", eadSpaces, "", i));
+                dst->append(base::StringPrintf(
+                    "%*s- standard: %u\n", descSpaces, "", mExtraAudioDescriptors[i].standard));
+                dst->append(base::StringPrintf("%*s- descriptor:", descSpaces, ""));
+                for (auto v : mExtraAudioDescriptors[i].audioDescriptor) {
+                    dst->append(base::StringPrintf(" %02x", v));
+                }
+                dst->append("\n");
+            }
+        }
 
         if (mGains.size() != 0) {
             dst->append(base::StringPrintf("%*s- gains:\n", spaces, ""));
@@ -145,7 +195,8 @@
            mName.compare(other->getName()) == 0 &&
            mType == other->getType() &&
            mRole == other->getRole() &&
-           mProfiles.equals(other->getAudioProfiles());
+           mProfiles.equals(other->getAudioProfiles()) &&
+           mExtraAudioDescriptors == other->getExtraAudioDescriptors();
 }
 
 status_t AudioPort::writeToParcel(Parcel *parcel) const
@@ -160,6 +211,7 @@
     parcelable->type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_type_t_AudioPortType(mType));
     parcelable->role = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_role_t_AudioPortRole(mRole));
     parcelable->profiles = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioProfileVector(mProfiles));
+    parcelable->extraAudioDescriptors = mExtraAudioDescriptors;
     parcelable->gains = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioGains(mGains));
     return OK;
 }
@@ -175,6 +227,7 @@
     mType = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortType_audio_port_type_t(parcelable.type));
     mRole = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortRole_audio_port_role_t(parcelable.role));
     mProfiles = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioProfileVector(parcelable.profiles));
+    mExtraAudioDescriptors = parcelable.extraAudioDescriptors;
     mGains = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioGains(parcelable.gains));
     return OK;
 }
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 65f7388..8ac3f73 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -58,10 +58,18 @@
 AudioProfile::AudioProfile(audio_format_t format,
                            const ChannelMaskSet &channelMasks,
                            const SampleRateSet &samplingRateCollection) :
+        AudioProfile(format, channelMasks, samplingRateCollection,
+                     AUDIO_ENCAPSULATION_TYPE_NONE) {}
+
+AudioProfile::AudioProfile(audio_format_t format,
+                           const ChannelMaskSet &channelMasks,
+                           const SampleRateSet &samplingRateCollection,
+                           audio_encapsulation_type_t encapsulationType) :
         mName(""),
         mFormat(format),
         mChannelMasks(channelMasks),
-        mSamplingRates(samplingRateCollection) {}
+        mSamplingRates(samplingRateCollection),
+        mEncapsulationType(encapsulationType) {}
 
 void AudioProfile::setChannels(const ChannelMaskSet &channelMasks)
 {
@@ -116,6 +124,9 @@
         }
         dst->append("\n");
     }
+
+    dst->append(base::StringPrintf(
+            "%*s- encapsulation type: %#x\n", spaces, "", mEncapsulationType));
 }
 
 bool AudioProfile::equals(const sp<AudioProfile>& other) const
@@ -127,7 +138,8 @@
            mSamplingRates == other->getSampleRates() &&
            mIsDynamicFormat == other->isDynamicFormat() &&
            mIsDynamicChannels == other->isDynamicChannels() &&
-           mIsDynamicRate == other->isDynamicRate();
+           mIsDynamicRate == other->isDynamicRate() &&
+           mEncapsulationType == other->getEncapsulationType();
 }
 
 AudioProfile& AudioProfile::operator=(const AudioProfile& other) {
@@ -135,6 +147,7 @@
     mFormat = other.mFormat;
     mChannelMasks = other.mChannelMasks;
     mSamplingRates = other.mSamplingRates;
+    mEncapsulationType = other.mEncapsulationType;
     mIsDynamicFormat = other.mIsDynamicFormat;
     mIsDynamicChannels = other.mIsDynamicChannels;
     mIsDynamicRate = other.mIsDynamicRate;
@@ -160,6 +173,8 @@
     parcelable.isDynamicFormat = mIsDynamicFormat;
     parcelable.isDynamicChannels = mIsDynamicChannels;
     parcelable.isDynamicRate = mIsDynamicRate;
+    parcelable.encapsulationType = VALUE_OR_RETURN(
+            legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(mEncapsulationType));
     return parcelable;
 }
 
@@ -186,6 +201,9 @@
     legacy->mIsDynamicFormat = parcelable.isDynamicFormat;
     legacy->mIsDynamicChannels = parcelable.isDynamicChannels;
     legacy->mIsDynamicRate = parcelable.isDynamicRate;
+    legacy->mEncapsulationType = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+                    parcelable.encapsulationType));
     return legacy;
 }
 
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index 633e4e3..1cee1c9 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -21,6 +21,7 @@
 
 #include <android/media/AudioPort.h>
 #include <android/media/AudioPortConfig.h>
+#include <android/media/ExtraAudioDescriptor.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <media/AudioGain.h>
@@ -67,6 +68,14 @@
     void setAudioProfiles(const AudioProfileVector &profiles) { mProfiles = profiles; }
     AudioProfileVector &getAudioProfiles() { return mProfiles; }
 
+    void setExtraAudioDescriptors(
+            const std::vector<media::ExtraAudioDescriptor> extraAudioDescriptors) {
+        mExtraAudioDescriptors = extraAudioDescriptors;
+    }
+    std::vector<media::ExtraAudioDescriptor> &getExtraAudioDescriptors() {
+        return mExtraAudioDescriptors;
+    }
+
     virtual void importAudioPort(const sp<AudioPort>& port, bool force = false);
 
     virtual void importAudioPort(const audio_port_v7& port);
@@ -102,6 +111,10 @@
     audio_port_type_t mType;
     audio_port_role_t mRole;
     AudioProfileVector mProfiles; // AudioProfiles supported by this port (format, Rates, Channels)
+
+    // Audio capabilities that are defined by hardware descriptors when the format is unrecognized
+    // by the platform, e.g. short audio descriptor in EDID for HDMI.
+    std::vector<media::ExtraAudioDescriptor> mExtraAudioDescriptors;
 private:
     template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
                                         || std::is_same<T, struct audio_port_v7>::value, int> = 0>
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 5051233..20c7ab9 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -38,6 +38,10 @@
     AudioProfile(audio_format_t format,
                  const ChannelMaskSet &channelMasks,
                  const SampleRateSet &samplingRateCollection);
+    AudioProfile(audio_format_t format,
+                 const ChannelMaskSet &channelMasks,
+                 const SampleRateSet &samplingRateCollection,
+                 audio_encapsulation_type_t encapsulationType);
 
     audio_format_t getFormat() const { return mFormat; }
     const ChannelMaskSet &getChannels() const { return mChannelMasks; }
@@ -68,6 +72,11 @@
 
     bool isDynamic() { return mIsDynamicFormat || mIsDynamicChannels || mIsDynamicRate; }
 
+    audio_encapsulation_type_t getEncapsulationType() const { return mEncapsulationType; }
+    void setEncapsulationType(audio_encapsulation_type_t encapsulationType) {
+        mEncapsulationType = encapsulationType;
+    }
+
     void dump(std::string *dst, int spaces) const;
 
     bool equals(const sp<AudioProfile>& other) const;
@@ -79,6 +88,7 @@
     static ConversionResult<sp<AudioProfile>> fromParcelable(const media::AudioProfile& parcelable);
 
 private:
+
     std::string  mName;
     audio_format_t mFormat; // The format for an audio profile should only be set when initialized.
     ChannelMaskSet mChannelMasks;
@@ -88,6 +98,8 @@
     bool mIsDynamicChannels = false;
     bool mIsDynamicRate = false;
 
+    audio_encapsulation_type_t mEncapsulationType;
+
     AudioProfile() = default;
     AudioProfile& operator=(const AudioProfile& other);
 };
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index b9a3e29..ca4f663 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -246,6 +246,10 @@
         return status;
     }
     CoreUtils::AudioInputFlags hidlFlags;
+#if MAJOR_VERSION <= 5
+    // Some flags were specific to framework and must not leak to the HAL.
+    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
+#endif
     if (status_t status = CoreUtils::audioInputFlagsFromHal(flags, &hidlFlags); status != OK) {
         return status;
     }
@@ -278,10 +282,6 @@
         sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
     }
 #endif
-#if MAJOR_VERSION <= 5
-    // Some flags were specific to framework and must not leak to the HAL.
-    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
-#endif
     Return<void> ret = mDevice->openInputStream(
             handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
             [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
@@ -354,7 +354,8 @@
     return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
 }
 
-status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+template <typename HalPort>
+status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
     if (mDevice == 0) return NO_INIT;
     AudioPort hidlPort;
     HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -370,31 +371,28 @@
     return processReturn("getAudioPort", ret, retval);
 }
 
+status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+    return getAudioPortImpl(port);
+}
+
 status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
-    if (mDevice == 0) return NO_INIT;
-    status_t status = NO_ERROR;
 #if MAJOR_VERSION >= 7
-    AudioPort hidlPort;
-    HidlUtils::audioPortFromHal(*port, &hidlPort);
-    Result retval;
-    Return<void> ret = mDevice->getAudioPort(
-            hidlPort,
-            [&](Result r, const AudioPort& p) {
-                retval = r;
-                if (retval == Result::OK) {
-                    HidlUtils::audioPortToHal(p, port);
-                }
-            });
-    status = processReturn("getAudioPort", ret, retval);
+    return getAudioPortImpl(port);
 #else
     struct audio_port audioPort = {};
-    audio_populate_audio_port(port, &audioPort);
-    status = getAudioPort(&audioPort);
+    status_t result = NO_ERROR;
+    if (!audio_populate_audio_port(port, &audioPort)) {
+        ALOGE("Failed to populate legacy audio port from audio_port_v7");
+        result = BAD_VALUE;
+    }
+    status_t status = getAudioPort(&audioPort);
     if (status == NO_ERROR) {
         audio_populate_audio_port_v7(&audioPort, port);
+    } else {
+        result = status;
     }
+    return result;
 #endif
-    return status;
 }
 
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index abd4ad5..2c847cf 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -131,6 +131,8 @@
 
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
+
+    template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
 };
 
 } // namespace CPP_VERSION
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index aa9e477..af7dc1a 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -181,6 +181,12 @@
 }
 
 status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+    if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
+        // get_audio_port_v7 is mandatory if legacy HAL support this API version.
+        return mDev->get_audio_port_v7(mDev, port);
+    }
+#endif
     struct audio_port audioPort = {};
     audio_populate_audio_port(port, &audioPort);
     status_t status = getAudioPort(&audioPort);
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 2a3e2b6..539a149 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -57,8 +57,7 @@
     // Note: This assumes channel mask, format, and sample rate do not change after creation.
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     if (/* mStreamPowerLog.isUserDebugOrEngBuild() && */
-        StreamHalHidl::getAudioProperties(
-                &config.sample_rate, &config.channel_mask, &config.format) == NO_ERROR) {
+        StreamHalHidl::getAudioProperties(&config) == NO_ERROR) {
         mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
 }
@@ -69,14 +68,6 @@
     hardware::IPCThreadState::self()->flushCommands();
 }
 
-// Note: this method will be removed
-status_t StreamHalHidl::getSampleRate(uint32_t *rate) {
-    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
-    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
-    *rate = config.sample_rate;
-    return status;
-}
-
 status_t StreamHalHidl::getBufferSize(size_t *size) {
     if (!mStream) return NO_INIT;
     status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
@@ -86,48 +77,28 @@
     return status;
 }
 
-// Note: this method will be removed
-status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
-    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
-    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
-    *mask = config.channel_mask;
-    return status;
-}
-
-// Note: this method will be removed
-status_t StreamHalHidl::getFormat(audio_format_t *format) {
-    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
-    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
-    *format = config.format;
-    return status;
-}
-
-status_t StreamHalHidl::getAudioProperties(
-        uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+    *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
     if (!mStream) return NO_INIT;
 #if MAJOR_VERSION <= 6
     Return<void> ret = mStream->getAudioProperties(
             [&](uint32_t sr, auto m, auto f) {
-                *sampleRate = sr;
-                *mask = static_cast<audio_channel_mask_t>(m);
-                *format = static_cast<audio_format_t>(f);
+                configBase->sample_rate = sr;
+                configBase->channel_mask = static_cast<audio_channel_mask_t>(m);
+                configBase->format = static_cast<audio_format_t>(f);
             });
     return processReturn("getAudioProperties", ret);
 #else
     Result retval;
     status_t conversionStatus = BAD_VALUE;
-    audio_config_base_t halConfig = AUDIO_CONFIG_BASE_INITIALIZER;
     Return<void> ret = mStream->getAudioProperties(
             [&](Result r, const AudioConfigBase& config) {
                 retval = r;
                 if (retval == Result::OK) {
-                    conversionStatus = HidlUtils::audioConfigBaseToHal(config, &halConfig);
+                    conversionStatus = HidlUtils::audioConfigBaseToHal(config, configBase);
                 }
             });
     if (status_t status = processReturn("getAudioProperties", ret, retval); status == NO_ERROR) {
-        *sampleRate = halConfig.sample_rate;
-        *mask = halConfig.channel_mask;
-        *format = halConfig.format;
         return conversionStatus;
     } else {
         return status;
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index c6db6d6..970903b 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -49,21 +49,14 @@
 class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate);
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size);
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format);
-
-    // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase);
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs);
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index e89b288..d0c375e 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -45,31 +45,15 @@
     mDevice.clear();
 }
 
-status_t StreamHalLocal::getSampleRate(uint32_t *rate) {
-    *rate = mStream->get_sample_rate(mStream);
-    return OK;
-}
-
 status_t StreamHalLocal::getBufferSize(size_t *size) {
     *size = mStream->get_buffer_size(mStream);
     return OK;
 }
 
-status_t StreamHalLocal::getChannelMask(audio_channel_mask_t *mask) {
-    *mask = mStream->get_channels(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getFormat(audio_format_t *format) {
-    *format = mStream->get_format(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(
-        uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
-    *sampleRate = mStream->get_sample_rate(mStream);
-    *mask = mStream->get_channels(mStream);
-    *format = mStream->get_format(mStream);
+status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
+    configBase->sample_rate = mStream->get_sample_rate(mStream);
+    configBase->channel_mask = mStream->get_channels(mStream);
+    configBase->format = mStream->get_format(mStream);
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
index e228104..b260495 100644
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ b/media/libaudiohal/impl/StreamHalLocal.h
@@ -28,21 +28,14 @@
 class StreamHalLocal : public virtual StreamHalInterface
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate);
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size);
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format);
-
-    // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase);
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs);
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index b47f536..2be12fb 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -31,25 +31,27 @@
 class StreamHalInterface : public virtual RefBase
 {
   public:
-    // TODO(mnaganov): Remove
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate) = 0;
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size) = 0;
 
-    // TODO(mnaganov): Remove
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask) = 0;
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase) = 0;
 
-    // TODO(mnaganov): Remove
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format) = 0;
-
-    // TODO(mnaganov): Change to use audio_config_base_t
     // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) = 0;
+    inline status_t getAudioProperties(
+            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        const status_t result = getAudioProperties(&config);
+        if (result == NO_ERROR) {
+            if (sampleRate != nullptr) *sampleRate = config.sample_rate;
+            if (mask != nullptr) *mask = config.channel_mask;
+            if (format != nullptr) *format = config.format;
+        }
+        return result;
+    }
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs) = 0;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index f947339..a660957 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -13,6 +13,15 @@
 // limitations under the License.
 
 // HapticGenerator library
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libhapticgenerator",
 
@@ -48,4 +57,3 @@
         "libaudioeffects",
     ],
 }
-
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index 9b93659..65a20a7 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -26,11 +26,16 @@
 
 #include <errno.h>
 #include <inttypes.h>
+#include <math.h>
 
 #include <audio_effects/effect_hapticgenerator.h>
 #include <audio_utils/format.h>
 #include <system/audio.h>
 
+static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
+static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
+static constexpr float DEFAULT_BSF_POLE_Q = 4.0f;
+
 // This is the only symbol that needs to be exported
 __attribute__ ((visibility ("default")))
 audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
@@ -101,6 +106,16 @@
     context->param.audioChannelCount = 0;
     context->param.maxHapticIntensity = os::HapticScale::MUTE;
 
+    context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
+    context->param.bpfQ = 1.0f;
+    context->param.slowEnvNormalizationPower = -0.8f;
+    context->param.bsfZeroQ = DEFAULT_BSF_ZERO_Q;
+    context->param.bsfPoleQ = DEFAULT_BSF_POLE_Q;
+    context->param.distortionCornerFrequency = 300.0f;
+    context->param.distortionInputGain = 0.3f;
+    context->param.distortionCubeThreshold = 0.1f;
+    context->param.distortionOutputGain = 1.5f;
+
     context->state = HAPTICGENERATOR_STATE_INITIALIZED;
     return 0;
 }
@@ -128,16 +143,17 @@
  */
 void HapticGenerator_buildProcessingChain(
         std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
-        struct HapticGeneratorProcessorsRecord& processorsRecord,
-        float sampleRate, size_t channelCount) {
-    float highPassCornerFrequency = 100.0f;
+        struct HapticGeneratorProcessorsRecord& processorsRecord, float sampleRate,
+        const struct HapticGeneratorParam* param) {
+    const size_t channelCount = param->hapticChannelCount;
+    float highPassCornerFrequency = 50.0f;
     auto hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
     addBiquadFilter(processingChain, processorsRecord, hpf);
-    float lowPassCornerFrequency = 3000.0f;
+    float lowPassCornerFrequency = 9000.0f;
     auto lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
     addBiquadFilter(processingChain, processorsRecord, lpf);
 
-    auto ramp = std::make_shared<Ramp>(channelCount);
+    auto ramp = std::make_shared<Ramp>(channelCount);  // ramp = half-wave rectifier.
     // The process chain captures the shared pointer of the ramp in lambda. It will be the only
     // reference to the ramp.
     // The process record will keep a weak pointer to the ramp so that it is possible to access
@@ -154,19 +170,6 @@
     lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
     addBiquadFilter(processingChain, processorsRecord, lpf);
 
-    lowPassCornerFrequency = 5.0f;
-    float normalizationPower = -0.3f;
-    // The process chain captures the shared pointer of the slow envelope in lambda. It will
-    // be the only reference to the slow envelope.
-    // The process record will keep a weak pointer to the slow envelope so that it is possible
-    // to access the slow envelope outside of the process chain.
-    auto slowEnv = std::make_shared<SlowEnvelope>(
-            lowPassCornerFrequency, sampleRate, normalizationPower, channelCount);
-    processorsRecord.slowEnvs.push_back(slowEnv);
-    processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
-            slowEnv->process(out, in, frameCount);
-    });
-
     lowPassCornerFrequency = 400.0f;
     lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
     addBiquadFilter(processingChain, processorsRecord, lpf);
@@ -174,23 +177,40 @@
     lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
     addBiquadFilter(processingChain, processorsRecord, lpf);
 
-    auto apf = createAPF2(400.0f, 200.0f, sampleRate, channelCount);
-    addBiquadFilter(processingChain, processorsRecord, apf);
-    apf = createAPF2(100.0f, 50.0f, sampleRate, channelCount);
-    addBiquadFilter(processingChain, processorsRecord, apf);
-    float allPassCornerFrequency = 25.0f;
-    apf = createAPF(allPassCornerFrequency, sampleRate, channelCount);
-    addBiquadFilter(processingChain, processorsRecord, apf);
-
-    float resonantFrequency = 150.0f;
-    float bandpassQ = 1.0f;
-    auto bpf = createBPF(resonantFrequency, bandpassQ, sampleRate, channelCount);
+    auto bpf = createBPF(param->resonantFrequency, param->bpfQ, sampleRate, channelCount);
+    processorsRecord.bpf = bpf;
     addBiquadFilter(processingChain, processorsRecord, bpf);
 
-    float zeroQ = 8.0f;
-    float poleQ = 4.0f;
-    auto bsf = createBSF(resonantFrequency, zeroQ, poleQ, sampleRate, channelCount);
+    float normalizationPower = param->slowEnvNormalizationPower;
+    // The process chain captures the shared pointer of the slow envelope in lambda. It will
+    // be the only reference to the slow envelope.
+    // The process record will keep a weak pointer to the slow envelope so that it is possible
+    // to access the slow envelope outside of the process chain.
+    auto slowEnv = std::make_shared<SlowEnvelope>(  // SlowEnvelope = partial normalizer, or AGC.
+            5.0f /*envCornerFrequency*/, sampleRate, normalizationPower,
+            0.01f /*envOffset*/, channelCount);
+    processorsRecord.slowEnvs.push_back(slowEnv);
+    processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
+            slowEnv->process(out, in, frameCount);
+    });
+
+
+    auto bsf = createBSF(
+            param->resonantFrequency, param->bsfZeroQ, param->bsfPoleQ, sampleRate, channelCount);
+    processorsRecord.bsf = bsf;
     addBiquadFilter(processingChain, processorsRecord, bsf);
+
+    // The process chain captures the shared pointer of the Distortion in lambda. It will
+    // be the only reference to the Distortion.
+    // The process record will keep a weak pointer to the Distortion so that it is possible
+    // to access the Distortion outside of the process chain.
+    auto distortion = std::make_shared<Distortion>(
+            param->distortionCornerFrequency, sampleRate, param->distortionInputGain,
+            param->distortionCubeThreshold, param->distortionOutputGain, channelCount);
+    processorsRecord.distortions.push_back(distortion);
+    processingChain.push_back([distortion](float *out, const float *in, size_t frameCount) {
+            distortion->process(out, in, frameCount);
+    });
 }
 
 int HapticGenerator_Configure(struct HapticGeneratorContext *context, effect_config_t *config) {
@@ -206,6 +226,7 @@
         context->processorsRecord.filters.clear();
         context->processorsRecord.ramps.clear();
         context->processorsRecord.slowEnvs.clear();
+        context->processorsRecord.distortions.clear();
         memcpy(&context->config, config, sizeof(effect_config_t));
         context->param.audioChannelCount = audio_channel_count_from_out_mask(
                 ((audio_channel_mask_t) config->inputCfg.channels) & ~AUDIO_CHANNEL_HAPTIC_ALL);
@@ -224,7 +245,7 @@
         HapticGenerator_buildProcessingChain(context->processingChain,
                                              context->processorsRecord,
                                              config->inputCfg.samplingRate,
-                                             context->param.hapticChannelCount);
+                                             &context->param);
     }
     return 0;
 }
@@ -236,6 +257,9 @@
     for (auto& slowEnv : context->processorsRecord.slowEnvs) {
         slowEnv->clear();
     }
+    for (auto& distortion : context->processorsRecord.distortions) {
+        distortion->clear();
+    }
     return 0;
 }
 
@@ -262,7 +286,32 @@
         }
         break;
     }
+    case HG_PARAM_VIBRATOR_INFO: {
+        if (value == nullptr || size != 2 * sizeof(float)) {
+            return -EINVAL;
+        }
+        const float resonantFrequency = *(float*) value;
+        const float qFactor = *((float *) value + 1);
+        context->param.resonantFrequency =
+                isnan(resonantFrequency) ? DEFAULT_RESONANT_FREQUENCY : resonantFrequency;
+        context->param.bsfZeroQ = isnan(qFactor) ? DEFAULT_BSF_POLE_Q : qFactor;
+        context->param.bsfPoleQ = context->param.bsfZeroQ / 2.0f;
 
+        if (context->processorsRecord.bpf != nullptr) {
+            context->processorsRecord.bpf->setCoefficients(
+                    bpfCoefs(context->param.resonantFrequency,
+                             context->param.bpfQ,
+                             context->config.inputCfg.samplingRate));
+        }
+        if (context->processorsRecord.bsf != nullptr) {
+            context->processorsRecord.bsf->setCoefficients(
+                    bsfCoefs(context->param.resonantFrequency,
+                             context->param.bsfZeroQ,
+                             context->param.bsfPoleQ,
+                             context->config.inputCfg.samplingRate));
+        }
+        HapticGenerator_Reset(context);
+    } break;
     default:
         ALOGW("Unknown param: %d", param);
         return -EINVAL;
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index 57b4338..96b744a 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -51,6 +51,16 @@
     // A map from track id to haptic intensity.
     std::map<int, os::HapticScale> id2Intensity;
     os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+
+    float resonantFrequency;
+    float bpfQ;
+    float slowEnvNormalizationPower;
+    float bsfZeroQ;
+    float bsfPoleQ;
+    float distortionCornerFrequency;
+    float distortionInputGain;
+    float distortionCubeThreshold;
+    float distortionOutputGain;
 };
 
 // A structure to keep all shared pointers for all processors in HapticGenerator.
@@ -58,6 +68,12 @@
     std::vector<std::shared_ptr<HapticBiquadFilter>> filters;
     std::vector<std::shared_ptr<Ramp>> ramps;
     std::vector<std::shared_ptr<SlowEnvelope>> slowEnvs;
+    std::vector<std::shared_ptr<Distortion>> distortions;
+
+    // Cache band-pass filter and band-stop filter for updating parameters
+    // according to vibrator info
+    std::shared_ptr<HapticBiquadFilter> bpf;
+    std::shared_ptr<HapticBiquadFilter> bsf;
 };
 
 // A structure to keep all the context for HapticGenerator.
diff --git a/media/libeffects/hapticgenerator/Processors.cpp b/media/libeffects/hapticgenerator/Processors.cpp
index 3157b35..4fe3a75 100644
--- a/media/libeffects/hapticgenerator/Processors.cpp
+++ b/media/libeffects/hapticgenerator/Processors.cpp
@@ -83,30 +83,92 @@
         float cornerFrequency,
         float sampleRate,
         float normalizationPower,
+        float envOffset,
         size_t channelCount)
         : mLpf(createLPF(cornerFrequency, sampleRate, channelCount)),
           mNormalizationPower(normalizationPower),
-          mChannelCount(channelCount),
-          mEnv(0.25 * (sampleRate / (2 * M_PI * cornerFrequency))) {}
+          mEnvOffset(envOffset),
+          mChannelCount(channelCount) {}
 
 void SlowEnvelope::process(float* out, const float* in, size_t frameCount) {
     size_t sampleCount = frameCount * mChannelCount;
-    if (sampleCount > mLpfInBuffer.size()) {
-        mLpfInBuffer.resize(sampleCount, mEnv);
+    if (sampleCount > mLpfOutBuffer.size()) {
         mLpfOutBuffer.resize(sampleCount);
+        mLpfInBuffer.resize(sampleCount);
+    }
+    for (size_t i = 0; i < sampleCount; ++i) {
+        mLpfInBuffer[i] = fabs(in[i]);
     }
     mLpf->process(mLpfOutBuffer.data(), mLpfInBuffer.data(), frameCount);
     for (size_t i = 0; i < sampleCount; ++i) {
-        *out = *in * pow(mLpfOutBuffer[i], mNormalizationPower);
-        out++;
-        in++;
+        out[i] = in[i] * pow(mLpfOutBuffer[i] + mEnvOffset, mNormalizationPower);
     }
 }
 
+void SlowEnvelope::setNormalizationPower(float normalizationPower) {
+    mNormalizationPower = normalizationPower;
+}
+
 void SlowEnvelope::clear() {
     mLpf->clear();
 }
 
+// Implementation of distortion
+
+Distortion::Distortion(
+        float cornerFrequency,
+        float sampleRate,
+        float inputGain,
+        float cubeThreshold,
+        float outputGain,
+        size_t channelCount)
+        : mLpf(createLPF2(cornerFrequency, sampleRate, channelCount)),
+          mSampleRate(sampleRate),
+          mCornerFrequency(cornerFrequency),
+          mInputGain(inputGain),
+          mCubeThreshold(cubeThreshold),
+          mOutputGain(outputGain),
+          mChannelCount(channelCount) {}
+
+void Distortion::process(float *out, const float *in, size_t frameCount) {
+    size_t sampleCount = frameCount * mChannelCount;
+    if (sampleCount > mLpfInBuffer.size()) {
+        mLpfInBuffer.resize(sampleCount);
+    }
+    for (size_t i = 0; i < sampleCount; ++i) {
+        const float x = mInputGain * in[i];
+        mLpfInBuffer[i] = x * x * x / (mCubeThreshold + x * x);  // "Coring" nonlinearity.
+    }
+    mLpf->process(out, mLpfInBuffer.data(), frameCount);  // Reduce 3*F components.
+    for (size_t i = 0; i < sampleCount; ++i) {
+        const float x = out[i];
+        out[i] = mOutputGain * x / (1.0f + fabs(x));  // Soft limiter.
+    }
+}
+
+void Distortion::setCornerFrequency(float cornerFrequency) {
+    mCornerFrequency = cornerFrequency;
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, mSampleRate);
+    mLpf->setCoefficients(coefficient);
+}
+
+void Distortion::setInputGain(float inputGain) {
+    mInputGain = inputGain;
+}
+
+void Distortion::setCubeThrehold(float cubeThreshold) {
+    mCubeThreshold = cubeThreshold;
+}
+
+void Distortion::setOutputGain(float outputGain) {
+    mOutputGain = outputGain;
+}
+
+void Distortion::clear() {
+    mLpf->clear();
+}
+
+
 // Implementation of helper functions
 
 BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
@@ -134,6 +196,40 @@
     return coefficient;
 }
 
+BiquadFilterCoefficients bpfCoefs(const float ringingFrequency,
+                                  const float q,
+                                  const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
+    // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
+    coefficient[0] = 1.0f;
+    coefficient[1] = -1.0f;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -2 * real;
+    coefficient[4] = real * real + img * img;
+    return coefficient;
+}
+
+BiquadFilterCoefficients bsfCoefs(const float ringingFrequency,
+                                  const float zq,
+                                  const float pq,
+                                  const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
+    float zeroCoeff1 = -2 * zeroReal;
+    float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
+    const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
+    float poleCoeff1 = -2 * poleReal;
+    float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
+    const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
+    coefficient[0] = 1.0f * norm;
+    coefficient[1] = zeroCoeff1 * norm;
+    coefficient[2] = zeroCoeff2 * norm;
+    coefficient[3] = poleCoeff1;
+    coefficient[4] = poleCoeff2;
+    return coefficient;
+}
+
 std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
                                         const float sampleRate,
                                         const size_t channelCount) {
@@ -166,47 +262,11 @@
             channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
 }
 
-BiquadFilterCoefficients apfCoefs(const float cornerFrequency, const float sampleRate) {
-    BiquadFilterCoefficients coefficient;
-    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
-    float zeroZ = 1.0f / realPoleZ;
-    coefficient[0] = (1.0f - realPoleZ) / (1.0f - zeroZ);
-    coefficient[1] = -coefficient[0] * zeroZ;
-    coefficient[2] = 0.0f;
-    coefficient[3] = -realPoleZ;
-    coefficient[4] = 0.0f;
-    return coefficient;
-}
-
-std::shared_ptr<HapticBiquadFilter> createAPF(const float cornerFrequency,
-                                        const float sampleRate,
-                                        const size_t channelCount) {
-    BiquadFilterCoefficients coefficient = apfCoefs(cornerFrequency, sampleRate);
-    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
-}
-
-std::shared_ptr<HapticBiquadFilter> createAPF2(const float cornerFrequency1,
-                                         const float cornerFrequency2,
-                                         const float sampleRate,
-                                         const size_t channelCount) {
-    BiquadFilterCoefficients coefs1 = apfCoefs(cornerFrequency1, sampleRate);
-    BiquadFilterCoefficients coefs2 = apfCoefs(cornerFrequency2, sampleRate);
-    return std::make_shared<HapticBiquadFilter>(
-            channelCount, cascadeFirstOrderFilters(coefs1, coefs2));
-}
-
 std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
                                         const float q,
                                         const float sampleRate,
                                         const size_t channelCount) {
-    BiquadFilterCoefficients coefficient;
-    const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
-    // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
-    coefficient[0] = 1.0f;
-    coefficient[1] = -1.0f;
-    coefficient[2] = 0.0f;
-    coefficient[3] = -2 * real;
-    coefficient[4] = real * real + img * img;
+    BiquadFilterCoefficients coefficient = bpfCoefs(ringingFrequency, q, sampleRate);
     return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
 }
 
@@ -215,19 +275,7 @@
                                         const float pq,
                                         const float sampleRate,
                                         const size_t channelCount) {
-    BiquadFilterCoefficients coefficient;
-    const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
-    float zeroCoeff1 = -2 * zeroReal;
-    float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
-    const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
-    float poleCoeff1 = -2 * poleReal;
-    float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
-    const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
-    coefficient[0] = 1.0f * norm;
-    coefficient[1] = zeroCoeff1 * norm;
-    coefficient[2] = zeroCoeff2 * norm;
-    coefficient[3] = poleCoeff1;
-    coefficient[4] = poleCoeff2;
+    BiquadFilterCoefficients coefficient = bsfCoefs(ringingFrequency, zq, pq, sampleRate);
     return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
 }
 
diff --git a/media/libeffects/hapticgenerator/Processors.h b/media/libeffects/hapticgenerator/Processors.h
index 5cf0557..74ca77d 100644
--- a/media/libeffects/hapticgenerator/Processors.h
+++ b/media/libeffects/hapticgenerator/Processors.h
@@ -44,19 +44,50 @@
 class SlowEnvelope {
 public:
     SlowEnvelope(float cornerFrequency, float sampleRate,
-                 float normalizationPower, size_t channelCount);
+                 float normalizationPower, float envOffset,
+                 size_t channelCount);
 
     void process(float *out, const float *in, size_t frameCount);
 
+    void setNormalizationPower(float normalizationPower);
+
     void clear();
 
 private:
     const std::shared_ptr<HapticBiquadFilter> mLpf;
     std::vector<float> mLpfInBuffer;
     std::vector<float> mLpfOutBuffer;
-    const float mNormalizationPower;
+    float mNormalizationPower;
+    const float mEnvOffset;
     const float mChannelCount;
-    const float mEnv;
+};
+
+
+// A class providing a process function that compressively distorts a waveforms
+class Distortion {
+public:
+    Distortion(float cornerFrequency, float sampleRate,
+               float inputGain, float cubeThreshold,
+               float outputGain, size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+    void setCornerFrequency(float cornerFrequency);
+    void setInputGain(float inputGain);
+    void setCubeThrehold(float cubeThreshold);
+    void setOutputGain(float outputGain);
+
+    void clear();
+
+private:
+    const std::shared_ptr<HapticBiquadFilter> mLpf;
+    std::vector<float> mLpfInBuffer;
+    float mSampleRate;
+    float mCornerFrequency;
+    float mInputGain;
+    float mCubeThreshold;
+    float mOutputGain;
+    const size_t mChannelCount;
 };
 
 // Helper functions
@@ -64,6 +95,17 @@
 BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
                                                   const BiquadFilterCoefficients &coefs2);
 
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate);
+
+BiquadFilterCoefficients bpfCoefs(const float ringingFrequency,
+                                  const float q,
+                                  const float sampleRate);
+
+BiquadFilterCoefficients bsfCoefs(const float ringingFrequency,
+                                  const float zq,
+                                  const float pq,
+                                  const float sampleRate);
+
 std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
                                         const float sampleRate,
                                         const size_t channelCount);
@@ -78,16 +120,6 @@
                                          const float sampleRate,
                                          const size_t channelCount);
 
-std::shared_ptr<HapticBiquadFilter> createAPF(const float cornerFrequency,
-                                        const float sampleRate,
-                                        const size_t channelCount);
-
-// Create two cascaded APF with two different corner frequency.
-std::shared_ptr<HapticBiquadFilter> createAPF2(const float cornerFrequency1,
-                                         const float cornerFrequency2,
-                                         const float sampleRate,
-                                         const size_t channelCount);
-
 std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
                                         const float q,
                                         const float sampleRate,
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index 60a9772..8a25b85 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -28,6 +28,7 @@
 cc_benchmark {
     name: "reverb_benchmark",
     vendor: true,
+    host_supported: true,
     include_dirs: [
         "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
     ],
diff --git a/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
index e2e4a85..bdb66d8 100644
--- a/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
+++ b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
@@ -53,8 +53,6 @@
 
 constexpr size_t kNumChMasks = std::size(kChMasks);
 constexpr int kSampleRate = 44100;
-// TODO(b/131240940) Remove once effects are updated to produce mono output
-constexpr size_t kMinOutputChannelCount = 2;
 
 /*******************************************************************
  * A test result running on Pixel 3 for comparison.
@@ -64,6 +62,10 @@
  * -----------------------------------------------------
  * Benchmark           Time             CPU   Iterations
  * -----------------------------------------------------
+ * BM_LVM/1/0       52123 ns        51971 ns        13437
+ * BM_LVM/1/1       75397 ns        75175 ns         9382
+ * BM_LVM/1/2       40253 ns        40140 ns        17418
+ * BM_LVM/1/3       19918 ns        19860 ns        35230
  * BM_LVM/2/0       62455 ns        62283 ns        11214
  * BM_LVM/2/1      110086 ns       109751 ns         6350
  * BM_LVM/2/2       44017 ns        43890 ns        15982
@@ -203,7 +205,7 @@
 
     // Run the test
     for (auto _ : state) {
-        std::vector<float> output(kFrameCount * std::max(channelCount, kMinOutputChannelCount));
+        std::vector<float> output(kFrameCount * channelCount);
 
         benchmark::DoNotOptimize(input.data());
         benchmark::DoNotOptimize(output.data());
@@ -224,8 +226,7 @@
 }
 
 static void LVMArgs(benchmark::internal::Benchmark* b) {
-    // TODO(b/131240940) Test single channel once effects are updated to process mono data
-    for (int i = 2; i <= kNumChMasks; i++) {
+    for (int i = FCC_1; i <= kNumChMasks; i++) {
         for (int j = 0; j < kNumEffectUuids; ++j) {
             b->Args({i, j});
         }
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index ed7ef7f..5d75055 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -59,7 +59,6 @@
         "Eq/src/LVEQNB_Init.cpp",
         "Eq/src/LVEQNB_Process.cpp",
         "Eq/src/LVEQNB_Tables.cpp",
-        "Common/src/InstAlloc.cpp",
         "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
         "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
         "Common/src/Copy_16.cpp",
@@ -140,16 +139,15 @@
     },
 
     vendor: true,
+    host_supported: true,
     srcs: [
         "Reverb/src/LVREV_ApplyNewSettings.cpp",
         "Reverb/src/LVREV_ClearAudioBuffers.cpp",
         "Reverb/src/LVREV_GetControlParameters.cpp",
         "Reverb/src/LVREV_GetInstanceHandle.cpp",
-        "Reverb/src/LVREV_GetMemoryTable.cpp",
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/InstAlloc.cpp",
         "Common/src/LoadConst_32.cpp",
         "Common/src/From2iToMono_32.cpp",
         "Common/src/Mult3s_32x16.cpp",
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
index d860ad0..9fe8116 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
@@ -277,13 +277,15 @@
     /*
      * Create biquad instance
      */
-    pInstance->pHPFBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
-            (FCC_1 == pParams->NrChannels) ? FCC_2 : pParams->NrChannels));
-
+    if (pInstance->Params.NrChannels != pParams->NrChannels) {
+        pInstance->pHPFBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
+    }
     /*
      * Update the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
+        (pInstance->Params.NrChannels != pParams->NrChannels) ||
         (pInstance->Params.CentreFrequency != pParams->CentreFrequency)) {
         LVDBE_SetFilters(pInstance, /* Instance pointer */
                          pParams);  /* New parameters */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index 979644c..b113f48 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -57,10 +57,7 @@
      * Create the instance handle if not already initialised
      */
     if (*phInstance == LVM_NULL) {
-        *phInstance = calloc(1, sizeof(*pInstance));
-    }
-    if (*phInstance == LVM_NULL) {
-        return LVDBE_NULLADDRESS;
+        *phInstance = new LVDBE_Instance_t{};
     }
     pInstance = (LVDBE_Instance_t*)*phInstance;
 
@@ -82,6 +79,7 @@
     pInstance->Params.SampleRate = LVDBE_FS_8000;
     pInstance->Params.VolumeControl = LVDBE_VOLUME_OFF;
     pInstance->Params.VolumedB = 0;
+    pInstance->Params.NrChannels = FCC_2;
 
     /*
      * Create pointer to data and coef memory
@@ -94,7 +92,7 @@
      * Create biquad instance
      */
     pInstance->pHPFBiquad.reset(
-            new android::audio_utils::BiquadFilter<LVM_FLOAT>(LVM_MAX_CHANNELS));
+            new android::audio_utils::BiquadFilter<LVM_FLOAT>(pInstance->Params.NrChannels));
     pInstance->pBPFBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1));
 
     /*
@@ -185,6 +183,6 @@
         free(pInstance->pData);
         pInstance->pData = LVM_NULL;
     }
-    free(pInstance);
+    delete pInstance;
     *phInstance = LVM_NULL;
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
index 8c62e71..0969053 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
@@ -79,11 +79,7 @@
         const LVM_UINT16 NrFrames)  // updated to use samples = frames * channels.
 {
     LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
-
-    /*Extract number of Channels info*/
-    // Mono passed in as stereo
-    const LVM_INT32 NrChannels =
-            pInstance->Params.NrChannels == 1 ? 2 : pInstance->Params.NrChannels;
+    const LVM_INT32 NrChannels = pInstance->Params.NrChannels;
     const LVM_INT32 NrSamples = NrChannels * NrFrames;
 
     /* Space to store DBE path computation */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
index fbb0fe1..1d913d7 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
@@ -487,10 +487,6 @@
 void LVM_BufferUnmanagedOut(LVM_Handle_t hInstance, LVM_UINT16* pNumSamples) {
     LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
     LVM_INT16 NumChannels = pInstance->NrChannels;
-    if (NumChannels == 1) {
-        /* Mono input is processed as stereo by LVM module */
-        NumChannels = 2;
-    }
 #undef NrFrames
 #define NrFrames (*pNumSamples)  // alias for clarity
 
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index c1b375e..9f5f448 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -25,7 +25,6 @@
 #include "LVM_Private.h"
 #include "LVM_Tables.h"
 #include "VectorArithmetic.h"
-#include "InstAlloc.h"
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -93,10 +92,7 @@
     /*
      * Create the instance handle
      */
-    *phInstance = (LVM_Handle_t)calloc(1, sizeof(*pInstance));
-    if (*phInstance == LVM_NULL) {
-        return LVM_NULLADDRESS;
-    }
+    *phInstance = new LVM_Instance_t{};
     pInstance = (LVM_Instance_t*)*phInstance;
 
     pInstance->InstParams = *pInstParams;
@@ -543,7 +539,7 @@
         pInstance->pPSAInput = LVM_NULL;
     }
 
-    free(*phInstance);
+    delete pInstance;
     return;
 }
 
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index 82c0e68..4eea04f 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -23,6 +23,7 @@
 #include <system/audio.h>
 
 #include "LVM_Private.h"
+#include "ScalarArithmetic.h"
 #include "VectorArithmetic.h"
 #include "LVM_Coeffs.h"
 
@@ -111,19 +112,6 @@
     }
 
     /*
-     * Convert from Mono if necessary
-     */
-    if (pInstance->Params.SourceFormat == LVM_MONO) {
-        MonoTo2I_Float(pInData,                /* Source */
-                       pOutData,               /* Destination */
-                       (LVM_INT16)NumSamples); /* Number of input samples */
-        pInput = pOutData;
-        pToProcess = pOutData;
-        NrChannels = 2;
-        ChMask = AUDIO_CHANNEL_OUT_STEREO;
-    }
-
-    /*
      * Process the data with managed buffers
      */
     while (SampleCount != 0) {
@@ -191,6 +179,9 @@
                  * Apply the filter
                  */
                 pInstance->pTEBiquad->process(pProcessed, pProcessed, NrFrames);
+                for (auto i = 0; i < NrChannels * NrFrames; i++) {
+                    pProcessed[i] = LVM_Clamp(pProcessed[i]);
+                }
             }
             /*
              * Volume balance
diff --git a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
deleted file mode 100644
index 17699ef..0000000
--- a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __INSTALLOC_H__
-#define __INSTALLOC_H__
-
-#include "LVM_Types.h"
-/*######################################################################################*/
-/*  Type declarations                                                                   */
-/*######################################################################################*/
-typedef struct {
-    LVM_UINT32 TotalSize;  /*  Accumulative total memory size                      */
-    uintptr_t pNextMember; /*  Pointer to the next instance member to be allocated */
-} INST_ALLOC;
-
-/*######################################################################################*/
-/*  Function prototypes                                                          */
-/*######################################################################################*/
-
-/****************************************************************************************
- *  Name        : InstAlloc_Init()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  StartAddr - Base address of the instance memory
- *  Returns     : Error code
- *  Description : Initializes the instance distribution and memory size calculation function
- *  Remarks     :
- ****************************************************************************************/
-
-void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr);
-
-/****************************************************************************************
- *  Name        : InstAlloc_AddMember()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  Size - The size in bytes of the new added member
- *  Returns     : A pointer to the new added member
- *  Description : Allocates space for a new member in the instance memory and returns
-                  a pointer to this new member.  The start address of all members will
-                  be 32 bit alligned.
- *  Remarks     :
- ****************************************************************************************/
-
-void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size);
-
-/****************************************************************************************
- *  Name        : InstAlloc_GetTotal()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
- *  Returns     : The instance memory size
- *  Description : This functions returns the calculated instance memory size
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms);
-
-void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr);
-
-void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable);
-
-void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable);
-
-void InstAlloc_InitAll_NULL(INST_ALLOC* pms);
-
-#endif /* __JBS_INSTALLOC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index b95d076..7cfaf27 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -121,26 +121,18 @@
     LVM_FS_DUMMY = LVM_MAXENUM
 } LVM_Fs_en;
 
-/* Memory Types */
-typedef enum {
-    LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
-    LVM_PERSISTENT_FAST_DATA = LVM_MEMREGION_PERSISTENT_FAST_DATA,
-    LVM_PERSISTENT_FAST_COEF = LVM_MEMREGION_PERSISTENT_FAST_COEF,
-    LVM_TEMPORARY_FAST = LVM_MEMREGION_TEMPORARY_FAST,
-    LVM_MEMORYTYPE_DUMMY = LVM_MAXENUM
-} LVM_MemoryTypes_en;
-
-/* Memory region definition */
-typedef struct {
-    LVM_UINT32 Size;         /* Region size in bytes */
-    LVM_MemoryTypes_en Type; /* Region type */
-    void* pBaseAddress;      /* Pointer to the region base address */
-} LVM_MemoryRegion_st;
-
-/* Memory table containing the region definitions */
-typedef struct {
-    LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVM_MemoryTable_st;
+static inline LVM_Fs_en lvmFsForSampleRate(int sampleRate) {
+    static const std::map<int, LVM_Fs_en> kLVMFsMap = {
+            {8000, LVM_FS_8000},    {11025, LVM_FS_11025}, {12000, LVM_FS_12000},
+            {16000, LVM_FS_16000},  {22050, LVM_FS_22050}, {24000, LVM_FS_24000},
+            {32000, LVM_FS_32000},  {44100, LVM_FS_44100}, {48000, LVM_FS_48000},
+            {88200, LVM_FS_88200},  {96000, LVM_FS_96000}, {176400, LVM_FS_176400},
+            {192000, LVM_FS_192000}};
+    if (kLVMFsMap.find(sampleRate) != kLVMFsMap.end()) {
+        return kLVMFsMap.at(sampleRate);
+    }
+    return LVM_FS_INVALID;
+}
 
 /****************************************************************************************/
 /*                                                                                      */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index 281d941..18de85b 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -29,17 +29,18 @@
 void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                           LVM_INT32 NrChannels);
-void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, const LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
                           LVM_INT16 NrFrames, LVM_INT32 NrChannels);
 
 void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
 
-void DelayMix_Float(const LVM_FLOAT* src, /* Source 1, to be delayed */
-                    LVM_FLOAT* delay,     /* Delay buffer */
-                    LVM_INT16 size,       /* Delay size */
-                    LVM_FLOAT* dst,       /* Source/destination */
-                    LVM_INT16* pOffset,   /* Delay offset */
-                    LVM_INT16 n);         /* Number of stereo samples */
+void DelayMix_Float(const LVM_FLOAT* src,  /* Source 1, to be delayed */
+                    LVM_FLOAT* delay,      /* Delay buffer */
+                    LVM_INT16 size,        /* Delay size */
+                    LVM_FLOAT* dst,        /* Source/destination */
+                    LVM_INT16* pOffset,    /* Delay offset */
+                    LVM_INT16 n,           /* Number of stereo samples */
+                    LVM_INT32 NrChannels); /* Number of channels */
 void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
 
diff --git a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
index 7046a94..1fe7470 100644
--- a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
@@ -51,25 +51,32 @@
 }
 
 // Merge a multichannel source with stereo contained in StereoOut, to dst.
-void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, const LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
                           LVM_INT16 NrFrames, /* Number of frames*/
                           LVM_INT32 NrChannels) {
     LVM_INT16 ii, jj;
 
-    // pack dst with stereo information of StereoOut
-    // together with the upper channels of src.
-    StereoOut += 2 * (NrFrames - 1);
-    dst += NrChannels * (NrFrames - 1);
-    src += NrChannels * (NrFrames - 1);
-    for (ii = NrFrames; ii != 0; ii--) {
-        dst[1] = StereoOut[1];
-        dst[0] = StereoOut[0];  // copy 1 before 0 is required for NrChannels == 3.
-        for (jj = 2; jj < NrChannels; jj++) {
-            dst[jj] = src[jj];
+    if (NrChannels >= FCC_2) {
+        // pack dst with stereo information of StereoOut
+        // together with the upper channels of src.
+        StereoOut += 2 * (NrFrames - 1);
+        dst += NrChannels * (NrFrames - 1);
+        src += NrChannels * (NrFrames - 1);
+
+        for (ii = NrFrames; ii != 0; ii--) {
+            dst[1] = StereoOut[1];
+            dst[0] = StereoOut[0];  // copy 1 before 0 is required for NrChannels == 3.
+            for (jj = FCC_2; jj < NrChannels; jj++) {
+                dst[jj] = src[jj];
+            }
+            dst -= NrChannels;
+            src -= NrChannels;
+            StereoOut -= 2;
         }
-        dst -= NrChannels;
-        src -= NrChannels;
-        StereoOut -= 2;
+    } else {
+        Copy_Float((const LVM_FLOAT*)StereoOut, /* Source */
+                   (LVM_FLOAT*)dst,             /* Destination */
+                   (LVM_INT16)NrFrames);        /* Number of frames */
     }
 }
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
index d2537eb..a346636 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
@@ -26,34 +26,50 @@
                     LVM_INT16 size,       /* Delay size */
                     LVM_FLOAT* dst,       /* Source/destination */
                     LVM_INT16* pOffset,   /* Delay offset */
-                    LVM_INT16 n)          /* Number of stereo samples */
+                    LVM_INT16 n,          /* Number of samples */
+                    LVM_INT32 NrChannels) /* Number of channels */
 {
     LVM_INT16 i;
     LVM_INT16 Offset = *pOffset;
     LVM_FLOAT temp;
 
     for (i = 0; i < n; i++) {
-        /* Left channel */
-        temp = (LVM_FLOAT)((LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f);
-        *dst = temp;
-        dst++;
+        if (NrChannels == FCC_1) {
+            temp = (LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f;
+            *dst = temp;
+            dst++;
 
-        delay[Offset] = *src;
-        Offset++;
-        src++;
+            delay[Offset] = *src;
+            Offset++;
+            src++;
 
-        /* Right channel */
-        temp = (LVM_FLOAT)((LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f);
-        *dst = temp;
-        dst++;
+            /* Make the reverb delay buffer a circular buffer */
+            if (Offset >= size) {
+                Offset = 0;
+            }
+        } else {
+            /* Left channel */
+            temp = (LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f;
+            *dst = temp;
+            dst++;
 
-        delay[Offset] = *src;
-        Offset++;
-        src++;
+            delay[Offset] = *src;
+            Offset++;
+            src++;
 
-        /* Make the reverb delay buffer a circular buffer */
-        if (Offset >= size) {
-            Offset = 0;
+            /* Right channel */
+            temp = (LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f;
+            *dst = temp;
+            dst++;
+
+            delay[Offset] = *src;
+            Offset++;
+            src++;
+
+            /* Make the reverb delay buffer a circular buffer */
+            if (Offset >= size) {
+                Offset = 0;
+            }
         }
     }
 
diff --git a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
deleted file mode 100644
index 2cfe056..0000000
--- a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "InstAlloc.h"
-
-/****************************************************************************************
- *  Name        : InstAlloc_Init()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  StartAddr - Base address of the instance memory
- *  Returns     : Error code
- *  Description : Initializes the instance distribution and memory size calculation function
- *  Remarks     :
- ****************************************************************************************/
-
-void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr) {
-    pms->TotalSize = 3;
-    pms->pNextMember = (((uintptr_t)StartAddr + 3) & (uintptr_t)~3);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_AddMember()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  Size - The size in bytes of the new added member
- *  Returns     : A pointer to the new added member
- *  Description : Allocates space for a new member in the instance memory and returns
-                  a pointer to this new member.  The start address of all members will
-                  be 32 bit alligned.
- *  Remarks     :
- ****************************************************************************************/
-
-void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size) {
-    void* NewMemberAddress; /* Variable to temporarily store the return value */
-    NewMemberAddress = (void*)pms->pNextMember;
-
-    Size = ((Size + 3) & (LVM_UINT32)~3); /* Ceil the size to a multiple of four */
-
-    pms->TotalSize += Size;
-    pms->pNextMember += Size;
-
-    return (NewMemberAddress);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_GetTotal()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
- *  Returns     : The instance memory size
- *  Description : This functions returns the calculated instance memory size
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms) {
-    if (pms->TotalSize > 3) {
-        return (pms->TotalSize);
-    } else {
-        return 0; /* No memory added */
-    }
-}
-
-void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable) {
-    uintptr_t StartAddr;
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress;
-
-    pms[0].TotalSize = 3;
-    pms[0].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pms[1].TotalSize = 3;
-    pms[1].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pms[2].TotalSize = 3;
-    pms[2].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress;
-
-    pms[3].TotalSize = 3;
-    pms[3].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_InitAll_NULL()
- *  Input       : pms  - Pointer to array of four INST_ALLOC instances
- *  Returns     : Nothing
- *  Description : This function reserves Size of 3 bytes for all memory regions and
- *                intializes pNextMember for all regions to 0
- *  Remarks     :
- ****************************************************************************************/
-
-void InstAlloc_InitAll_NULL(INST_ALLOC* pms) {
-    pms[0].TotalSize = 3;
-    pms[0].pNextMember = 0;
-
-    pms[1].TotalSize = 3;
-    pms[1].pNextMember = 0;
-
-    pms[2].TotalSize = 3;
-    pms[2].pNextMember = 0;
-
-    pms[3].TotalSize = 3;
-    pms[3].pNextMember = 0;
-}
-
-void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable) {
-    void* NewMemberAddress; /* Variable to temporarily store the return value */
-
-    /* coverity[returned_pointer] Ignore coverity warning that ptr is not used */
-    NewMemberAddress =
-            InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size =
-            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress =
-            InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size =
-            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress =
-            InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size =
-            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
-
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-
-    return (NewMemberAddress);
-}
-
-void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr) {
-    ptr[0] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
-    ptr[1] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
-    ptr[2] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
-    ptr[3] = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
-
-    return (ptr[0]);
-}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
index 58bc06e..b0aa172 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
@@ -56,10 +56,11 @@
     Mix_Private_FLOAT_st* pInstance[NrChannels];
 
     if (audio_channel_mask_get_representation(ChMask) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
-        for (int i = 0; i < 2; i++) {
+        int loopLimit = (NrChannels == FCC_1) ? NrChannels : FCC_2;
+        for (int i = 0; i < loopLimit; i++) {
             pInstance[i] = pMixPrivInst[i];
         }
-        for (int i = 2; i < NrChannels; i++) {
+        for (int i = loopLimit; i < NrChannels; i++) {
             pInstance[i] = pMixPrivInst[2];
         }
     } else {
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 3ab6afb..7e5caed 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -311,9 +311,8 @@
     /*
      * Create biquad instance
      */
-    pInstance->eqBiquad.resize(
-            pParams->NBands, android::audio_utils::BiquadFilter<LVM_FLOAT>(
-                                     (FCC_1 == pParams->NrChannels) ? FCC_2 : pParams->NrChannels));
+    pInstance->eqBiquad.resize(pParams->NBands,
+                               android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
     LVEQNB_ClearFilterHistory(pInstance);
 
     if (bChange || modeChange) {
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 833ee5d..3473262 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -24,7 +24,6 @@
 #include <stdlib.h>
 #include "LVEQNB.h"
 #include "LVEQNB_Private.h"
-#include "InstAlloc.h"
 #include <string.h> /* For memset */
 
 /****************************************************************************************/
@@ -52,10 +51,7 @@
                                    LVEQNB_Capabilities_t* pCapabilities, void* pScratch) {
     LVEQNB_Instance_t* pInstance;
 
-    *phInstance = calloc(1, sizeof(*pInstance));
-    if (phInstance == LVM_NULL) {
-        return LVEQNB_NULLADDRESS;
-    }
+    *phInstance = new LVEQNB_Instance_t{};
     pInstance = (LVEQNB_Instance_t*)*phInstance;
 
     pInstance->Capabilities = *pCapabilities;
@@ -146,6 +142,6 @@
         free(pInstance->pBiquadType);
         pInstance->pBiquadType = LVM_NULL;
     }
-    free(pInstance);
+    delete pInstance;
     *phInstance = LVM_NULL;
 }
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
index 8992803..b177dd4 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
@@ -62,10 +62,7 @@
         LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
         const LVM_UINT16 NrFrames) {  // updated to use samples = frames * channels.
     LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
-
-    // Mono passed in as stereo
-    const LVM_INT32 NrChannels =
-            pInstance->Params.NrChannels == 1 ? 2 : pInstance->Params.NrChannels;
+    const LVM_INT32 NrChannels = pInstance->Params.NrChannels;
     const LVM_INT32 NrSamples = NrChannels * NrFrames;
 
     /* Check for NULL pointers */
@@ -104,7 +101,6 @@
                  * Check if band is non-zero dB gain
                  */
                 if (pInstance->pBandDefinitions[i].Gain != 0) {
-
                     /*
                      * Select single or double precision as required
                      */
diff --git a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
index 484787a..82e94da 100644
--- a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
+++ b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
@@ -78,11 +78,6 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table containing the region definitions */
-typedef struct {
-    LVM_MemoryRegion_st Region[LVREV_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVREV_MemoryTable_st;
-
 /* Control Parameter structure */
 typedef struct {
     /* General parameters */
@@ -121,46 +116,6 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVREV_GetMemoryTable                                        */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used to obtain the LVREV module memory requirements to support     */
-/*  memory allocation. It can also be used to return the memory base address provided   */
-/*  during memory allocation to support freeing of memory when the LVREV module is no   */
-/*  longer required. It is called in two ways:                                          */
-/*                                                                                      */
-/*  hInstance = NULL                Returns the memory requirements                     */
-/*  hInstance = Instance handle     Returns the memory requirements and allocated       */
-/*                                  base addresses.                                     */
-/*                                                                                      */
-/*  When this function is called with hInstance = NULL the memory base address pointers */
-/*  will be NULL on return.                                                             */
-/*                                                                                      */
-/*  When the function is called for freeing memory, hInstance = Instance Handle the     */
-/*  memory table returns the allocated memory and base addresses used during            */
-/*  initialisation.                                                                     */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory table                            */
-/*  pInstanceParams         Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVREV_SUCCESS           Succeeded                                                   */
-/*  LVREV_NULLADDRESS       When pMemoryTable is NULL                                   */
-/*  LVREV_NULLADDRESS       When requesting memory requirements and pInstanceParams     */
-/*                          is NULL                                                     */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVREV_Process function                  */
-/*                                                                                      */
-/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
-                                           LVREV_MemoryTable_st* pMemoryTable,
-                                           LVREV_InstanceParams_st* pInstanceParams);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVREV_GetInstanceHandle                                     */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
@@ -174,7 +129,6 @@
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
 /*  phInstance              Pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
 /*  pInstanceParams         Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -186,11 +140,27 @@
 /*                                                                                      */
 /****************************************************************************************/
 LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
-                                              LVREV_MemoryTable_st* pMemoryTable,
                                               LVREV_InstanceParams_st* pInstanceParams);
 
 /****************************************************************************************/
 /*                                                                                      */
+/* FUNCTION:                LVREV_FreeInstance                                          */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to free the internal allocations of the module.               */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  hInstance               Instance handle                                             */
+/*                                                                                      */
+/* RETURNS:                                                                             */
+/*  LVREV_SUCCESS          free instance succeeded                                      */
+/*  LVREV_NULLADDRESS      Instance is NULL                                             */
+/*                                                                                      */
+/****************************************************************************************/
+LVREV_ReturnStatus_en LVREV_FreeInstance(LVREV_Handle_t hInstance);
+
+/****************************************************************************************/
+/*                                                                                      */
 /* FUNCTION:                LVXX_GetControlParameters                                   */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index 9a797bd..3a63698 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -21,7 +21,6 @@
 /*                                                                                      */
 /****************************************************************************************/
 #include "LVREV_Private.h"
-#include "InstAlloc.h"
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -34,7 +33,6 @@
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
 /*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
 /*  pInstanceParams         Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -46,12 +44,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
-                                              LVREV_MemoryTable_st* pMemoryTable,
                                               LVREV_InstanceParams_st* pInstanceParams) {
-    INST_ALLOC SlowData;
-    INST_ALLOC FastData;
-    INST_ALLOC FastCoef;
-    INST_ALLOC Temporary;
     LVREV_Instance_st* pLVREV_Private;
     LVM_INT16 i;
     LVM_UINT16 MaxBlockSize;
@@ -60,18 +53,9 @@
      * Check for error conditions
      */
     /* Check for NULL pointers */
-    if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
+    if ((phInstance == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
-    /* Check the memory table for NULL pointers */
-    for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++) {
-        if (pMemoryTable->Region[i].Size != 0) {
-            if (pMemoryTable->Region[i].pBaseAddress == LVM_NULL) {
-                return (LVREV_NULLADDRESS);
-            }
-        }
-    }
-
     /*
      * Check all instance parameters are in range
      */
@@ -88,36 +72,12 @@
     }
 
     /*
-     * Initialise the InstAlloc instances
-     */
-    InstAlloc_Init(&SlowData, pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
-    InstAlloc_Init(&FastData, pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
-    InstAlloc_Init(&FastCoef, pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
-    InstAlloc_Init(&Temporary, pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress);
-
-    /*
-     * Zero all memory regions
-     */
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / sizeof(LVM_FLOAT)));
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / sizeof(LVM_FLOAT)));
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / sizeof(LVM_FLOAT)));
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / sizeof(LVM_FLOAT)));
-    /*
      * Set the instance handle if not already initialised
      */
     if (*phInstance == LVM_NULL) {
-        *phInstance = InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
+        *phInstance = new LVREV_Instance_st{};
     }
     pLVREV_Private = (LVREV_Instance_st*)*phInstance;
-    pLVREV_Private->MemoryTable = *pMemoryTable;
 
     if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
         MaxBlockSize = LVREV_MAX_AP_DELAY[3];
@@ -135,12 +95,9 @@
      * Set the data, coefficient and temporary memory pointers
      */
     for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
-        pLVREV_Private->pDelay_T[i] = (LVM_FLOAT*)InstAlloc_AddMember(
-                &FastData, LVREV_MAX_T_DELAY[i] * sizeof(LVM_FLOAT));
+        pLVREV_Private->pDelay_T[i] = (LVM_FLOAT*)calloc(LVREV_MAX_T_DELAY[i], sizeof(LVM_FLOAT));
         /* Scratch for each delay line output */
-        pLVREV_Private->pScratchDelayLine[i] =
-                (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[i], LVREV_MAX_T_DELAY[i]);
+        pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT*)calloc(MaxBlockSize, sizeof(LVM_FLOAT));
     }
     /* All-pass delay buffer addresses and sizes */
     for (size_t i = 0; i < LVREV_DELAYLINES_4; i++) {
@@ -149,12 +106,9 @@
     pLVREV_Private->AB_Selection = 1; /* Select smoothing A to B */
 
     /* General purpose scratch */
-    pLVREV_Private->pScratch =
-            (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
+    pLVREV_Private->pScratch = (LVM_FLOAT*)calloc(MaxBlockSize, sizeof(LVM_FLOAT));
     /* Mono->stereo input save for end mix */
-    pLVREV_Private->pInputSave =
-            (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
-    LoadConst_Float(0, pLVREV_Private->pInputSave, (LVM_INT16)(MaxBlockSize * 2));
+    pLVREV_Private->pInputSave = (LVM_FLOAT*)calloc(FCC_2 * MaxBlockSize, sizeof(LVM_FLOAT));
 
     /*
      * Save the instance parameters in the instance structure
@@ -269,4 +223,48 @@
     return LVREV_SUCCESS;
 }
 
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVREV_FreeInstance                                          */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to free the internal allocations of the module.               */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  hInstance               Instance handle                                             */
+/*                                                                                      */
+/* RETURNS:                                                                             */
+/*  LVREV_SUCCESS          free instance succeeded                                      */
+/*  LVREV_NULLADDRESS      Instance is NULL                                             */
+/*                                                                                      */
+/****************************************************************************************/
+LVREV_ReturnStatus_en LVREV_FreeInstance(LVREV_Handle_t hInstance) {
+    if (hInstance == LVM_NULL) {
+        return LVREV_NULLADDRESS;
+    }
+
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
+
+    for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
+        if (pLVREV_Private->pDelay_T[i]) {
+            free(pLVREV_Private->pDelay_T[i]);
+            pLVREV_Private->pDelay_T[i] = LVM_NULL;
+        }
+        if (pLVREV_Private->pScratchDelayLine[i]) {
+            free(pLVREV_Private->pScratchDelayLine[i]);
+            pLVREV_Private->pScratchDelayLine[i] = LVM_NULL;
+        }
+    }
+    if (pLVREV_Private->pScratch) {
+        free(pLVREV_Private->pScratch);
+        pLVREV_Private->pScratch = LVM_NULL;
+    }
+    if (pLVREV_Private->pInputSave) {
+        free(pLVREV_Private->pInputSave);
+        pLVREV_Private->pInputSave = LVM_NULL;
+    }
+
+    delete pLVREV_Private;
+    return LVREV_SUCCESS;
+}
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
deleted file mode 100644
index 02ceb16..0000000
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/****************************************************************************************/
-/*                                                                                      */
-/*  Includes                                                                            */
-/*                                                                                      */
-/****************************************************************************************/
-#include "LVREV_Private.h"
-#include "InstAlloc.h"
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVREV_GetMemoryTable                                        */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*  hInstance = NULL                Returns the memory requirements                     */
-/*  hInstance = Instance handle     Returns the memory requirements and allocated       */
-/*                                  base addresses.                                     */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory table                            */
-/*  pInstanceParams         Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVREV_Success           Succeeded                                                   */
-/*  LVREV_NULLADDRESS       When pMemoryTable is NULL                                   */
-/*  LVREV_NULLADDRESS       When requesting memory requirements and pInstanceParams     */
-/*                          is NULL                                                     */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVREV_Process function                  */
-/*                                                                                      */
-/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
-                                           LVREV_MemoryTable_st* pMemoryTable,
-                                           LVREV_InstanceParams_st* pInstanceParams) {
-    INST_ALLOC SlowData;
-    INST_ALLOC FastData;
-    INST_ALLOC FastCoef;
-    INST_ALLOC Temporary;
-    LVM_UINT16 MaxBlockSize;
-
-    /*
-     * Check for error conditions
-     */
-    /* Check for NULL pointer */
-    if (pMemoryTable == LVM_NULL) {
-        return (LVREV_NULLADDRESS);
-    }
-
-    /*
-     * Check all instance parameters are in range
-     */
-    if (pInstanceParams != LVM_NULL) {
-        /*
-         * Call for memory allocation, so check the parameters
-         */
-        /* Check for a non-zero block size */
-        if (pInstanceParams->MaxBlockSize == 0) {
-            return LVREV_OUTOFRANGE;
-        }
-
-        /* Check for a valid number of delay lines */
-        if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
-            return LVREV_OUTOFRANGE;
-        }
-    }
-
-    /*
-     * Initialise the InstAlloc instances
-     */
-    InstAlloc_Init(&SlowData, (void*)LVM_NULL);
-    InstAlloc_Init(&FastData, (void*)LVM_NULL);
-    InstAlloc_Init(&FastCoef, (void*)LVM_NULL);
-    InstAlloc_Init(&Temporary, (void*)LVM_NULL);
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL) {
-        /*
-         * Check for null pointers
-         */
-        if (pInstanceParams == LVM_NULL) {
-            return (LVREV_NULLADDRESS);
-        }
-
-        /*
-         * Select the maximum internal block size
-         */
-        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
-            MaxBlockSize = LVREV_MAX_AP_DELAY[3];
-        } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
-            MaxBlockSize = LVREV_MAX_AP_DELAY[1];
-        } else {
-            MaxBlockSize = LVREV_MAX_AP_DELAY[0];
-        }
-
-        if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
-            MaxBlockSize = pInstanceParams->MaxBlockSize;
-        }
-
-        /*
-         * Slow data memory
-         */
-        InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&SlowData);
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent fast data memory
-         */
-        for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T_DELAY[i] * sizeof(LVM_FLOAT));
-        }
-
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&FastData);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent fast coefficient memory
-         */
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&FastCoef);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Temporary fast memory
-         */
-        /* General purpose scratch memory */
-        InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        /* Mono->stereo input saved for end mix */
-        InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
-        for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
-            /* A Scratch buffer for each delay line */
-            InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        }
-
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&Temporary);
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-
-    } else {
-        LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
-
-        /*
-         * Read back memory allocation table
-         */
-        *pMemoryTable = pLVREV_Private->MemoryTable;
-    }
-
-    return (LVREV_SUCCESS);
-}
-
-/* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
index 33f8165..9a2f9ca 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
@@ -103,11 +103,9 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-
 typedef struct {
     /* General */
     LVREV_InstanceParams_st InstanceParams; /* Initialisation time instance parameters */
-    LVREV_MemoryTable_st MemoryTable;       /* Memory table */
     LVREV_ControlParams_st CurrentParams;   /* Parameters being used */
     LVREV_ControlParams_st NewParams;       /* New parameters from the \
                                                calling application */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 9874dcc..a0f28b1 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -18,7 +18,6 @@
 #include <stdlib.h>
 #include "LVPSA.h"
 #include "LVPSA_Private.h"
-#include "InstAlloc.h"
 
 /************************************************************************************/
 /*                                                                                  */
@@ -49,10 +48,7 @@
     LVM_UINT32 BufferLength = 0;
 
     /* Set the instance handle if not already initialised */
-    *phInstance = calloc(1, sizeof(*pLVPSA_Inst));
-    if (*phInstance == LVM_NULL) {
-        return LVPSA_ERROR_NULLADDRESS;
-    }
+    *phInstance = new LVPSA_InstancePr_t{};
     pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
 
     pLVPSA_Inst->pScratch = pScratch;
@@ -191,6 +187,6 @@
         free(pLVPSA_Inst->pQPD_Taps);
         pLVPSA_Inst->pQPD_Taps = LVM_NULL;
     }
-    free(pLVPSA_Inst);
+    delete pLVPSA_Inst;
     *phInstance = LVM_NULL;
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
index efca27d..f805fca 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
@@ -200,6 +200,8 @@
                                       LVM_UINT16 NumSamples) {
     LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
     LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
+    LVM_UINT16 destNumSamples =
+            (pInstance->Params.NrChannels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
 
     /*
      * Check if the bypass mixer is enabled
@@ -209,12 +211,12 @@
          * Apply the bypass mix
          */
         LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance, pProcessed, (LVM_FLOAT*)pUnprocessed,
-                                   pOutData, (LVM_INT16)(2 * NumSamples));
+                                   pOutData, (LVM_INT16)destNumSamples);
         /*
          * Apply output gain correction shift
          */
         Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift, (LVM_FLOAT*)pOutData,
-                        (LVM_FLOAT*)pOutData, (LVM_INT16)(2 * NumSamples)); /* Left and right*/
+                        (LVM_FLOAT*)pOutData, (LVM_INT16)destNumSamples);
     }
 
     return (LVCS_SUCCESS);
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
index 8f88986..89f2f3b 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
@@ -180,7 +180,9 @@
         if (pInstance->bInOperatingModeTransition != LVM_TRUE) {
             pInstance->bTimerDone = LVM_FALSE;
             pInstance->TimerParams.TimeInMs =
-                    (LVM_INT16)(((pInstance->Reverberation.DelaySize << 2) /
+                    (LVM_INT16)(((pInstance->Params.NrChannels == FCC_1
+                                          ? pInstance->Reverberation.DelaySize << 3
+                                          : pInstance->Reverberation.DelaySize << 2) /
                                  pInstance->TimerParams.SamplingRate) +
                                 1);
             LVM_Timer_Init(&pInstance->TimerInstance, &pInstance->TimerParams);
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
index c8ad94e..2b628f1 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
@@ -72,9 +72,10 @@
 
         std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
                 pEqualiserCoefTable[Offset].A0, pEqualiserCoefTable[Offset].A1,
-                pEqualiserCoefTable[Offset].A2, -(pEqualiserCoefTable[Offset].B1),
-                -(pEqualiserCoefTable[Offset].B2)};
-        pInstance->pEqBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_2, coefs));
+                pEqualiserCoefTable[Offset].A2, pEqualiserCoefTable[Offset].B1,
+                pEqualiserCoefTable[Offset].B2};
+        pInstance->pEqBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
+                (pParams->NrChannels == FCC_1) ? FCC_1 : FCC_2, coefs));
     }
 
     return (LVCS_SUCCESS);
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
index ba3202f..dd1baf3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
@@ -55,10 +55,7 @@
      * Create the instance handle if not already initialised
      */
     if (*phInstance == LVM_NULL) {
-        *phInstance = calloc(1, sizeof(*pInstance));
-    }
-    if (*phInstance == LVM_NULL) {
-        return LVCS_NULLADDRESS;
+        *phInstance = new LVCS_Instance_t{};
     }
     pInstance = (LVCS_Instance_t*)*phInstance;
 
@@ -123,7 +120,7 @@
     if (pInstance == LVM_NULL) {
         return;
     }
-    free(pInstance);
+    delete pInstance;
     *phInstance = LVM_NULL;
     return;
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
index d18f2c3..6af0f75 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
@@ -75,16 +75,6 @@
     LVM_INT32 channels = pInstance->Params.NrChannels;
 #define NrFrames NumSamples  // alias for clarity
 
-    /*In case of mono processing, stereo input is created from mono
-     *and stored in pInData before applying any of the effects.
-     *However we do not update the value pInstance->Params.NrChannels
-     *at this point.
-     *So to treat the pInData as stereo we are setting channels to 2
-     */
-    if (channels == 1) {
-        channels = 2;
-    }
-
     pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
@@ -97,11 +87,16 @@
      */
     pInput = pScratch + (2 * NrFrames);
     pStIn = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
-    /* The first two channel data is extracted from the input data and
-     * copied into pInput buffer
-     */
-    Copy_Float_Mc_Stereo((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, NrFrames, channels);
-    Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)(2 * NrFrames));
+    if (channels == FCC_1) {
+        Copy_Float((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, (LVM_INT16)NrFrames);
+        Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)NrFrames);
+    } else {
+        /* The first two channel data is extracted from the input data and
+         * copied into pInput buffer
+         */
+        Copy_Float_Mc_Stereo((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, NrFrames, channels);
+        Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)(FCC_2 * NrFrames));
+    }
     /*
      * Call the stereo enhancer
      */
@@ -172,10 +167,10 @@
     LVCS_ReturnStatus_en err;
     /*Extract number of Channels info*/
     LVM_INT32 channels = pInstance->Params.NrChannels;
+    LVM_UINT16 destNumSamples = (channels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
+    LVM_INT32 compGainInterval =
+            (channels == FCC_1) ? LVCS_COMPGAINFRAME : FCC_2 * LVCS_COMPGAINFRAME;
 #define NrFrames NumSamples  // alias for clarity
-    if (channels == 1) {
-        channels = 2;
-    }
     /*
      * Check the number of samples is not too large
      */
@@ -227,7 +222,7 @@
 
             if (NumSamples < LVCS_COMPGAINFRAME) {
                 NonLinComp_Float(Gain, /* Compressor gain setting */
-                                 pStereoOut, pStereoOut, (LVM_INT32)(2 * NrFrames));
+                                 pStereoOut, pStereoOut, (LVM_INT32)destNumSamples);
             } else {
                 LVM_FLOAT GainStep;
                 LVM_FLOAT FinalGain;
@@ -266,12 +261,15 @@
 
                     if (SampleToProcess > LVCS_COMPGAINFRAME) {
                         NonLinComp_Float(Gain, /* Compressor gain setting */
-                                         pOutPtr, pOutPtr, (LVM_INT32)(2 * LVCS_COMPGAINFRAME));
-                        pOutPtr += (2 * LVCS_COMPGAINFRAME);
+                                         pOutPtr, pOutPtr, compGainInterval);
+                        pOutPtr += compGainInterval;
                         SampleToProcess = (LVM_INT16)(SampleToProcess - LVCS_COMPGAINFRAME);
                     } else {
                         NonLinComp_Float(Gain, /* Compressor gain setting */
-                                         pOutPtr, pOutPtr, (LVM_INT32)(2 * SampleToProcess));
+                                         pOutPtr, pOutPtr,
+                                         (channels == FCC_1)
+                                                 ? (LVM_INT32)(SampleToProcess)
+                                                 : (LVM_INT32)(FCC_2 * SampleToProcess));
                         SampleToProcess = 0;
                     }
                 }
@@ -297,7 +295,7 @@
                 LVM_Timer(&pInstance->TimerInstance, (LVM_INT16)NumSamples);
             }
         }
-        Copy_Float_Stereo_Mc(pInData, pStereoOut, pOutData, NrFrames, channels);
+        Copy_Float_Stereo_Mc(pInData, (const LVM_FLOAT*)pStereoOut, pOutData, NrFrames, channels);
     } else {
         if (pInData != pOutData) {
             /*
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index 15acda9..c5b6598 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -65,7 +65,6 @@
     LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
     const BiquadA012B12CoefsSP_t* pReverbCoefTable;
 
-
     /*
      * Initialise the delay and filters if:
      *  - the sample rate has changed
@@ -79,7 +78,8 @@
          */
         Delay = (LVM_UINT16)LVCS_StereoDelayCS[(LVM_UINT16)pParams->SampleRate];
 
-        pConfig->DelaySize = (LVM_INT16)(2 * Delay);
+        pConfig->DelaySize =
+                (pParams->NrChannels == FCC_1) ? (LVM_INT16)Delay : (LVM_INT16)(FCC_2 * Delay);
         pConfig->DelayOffset = 0;
         LoadConst_Float(0,                                      /* Value */
                         (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
@@ -93,10 +93,10 @@
 
         std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
                 pReverbCoefTable[Offset].A0, pReverbCoefTable[Offset].A1,
-                pReverbCoefTable[Offset].A2, -(pReverbCoefTable[Offset].B1),
-                -(pReverbCoefTable[Offset].B2)};
-        pInstance->pRevBiquad.reset(
-                new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_2, coefs));
+                pReverbCoefTable[Offset].A2, pReverbCoefTable[Offset].B1,
+                pReverbCoefTable[Offset].B2};
+        pInstance->pRevBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
+                (pParams->NrChannels == FCC_1) ? FCC_1 : FCC_2, coefs));
 
         /*
          * Setup the mixer
@@ -155,6 +155,9 @@
     LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
     LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
     LVM_FLOAT* pScratch;
+    LVM_INT32 NumChannels = pInstance->Params.NrChannels;
+    LVM_UINT16 destNumSamples =
+            (pInstance->Params.NrChannels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
 
     pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
@@ -165,9 +168,9 @@
         /*
          * Reverb not required so just copy the data
          */
-        Copy_Float((LVM_FLOAT*)pInData,          /* Source */
-                   (LVM_FLOAT*)pOutData,         /* Destination */
-                   (LVM_INT16)(2 * NumSamples)); /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,        /* Source */
+                   (LVM_FLOAT*)pOutData,       /* Destination */
+                   (LVM_INT16)destNumSamples); /* Number of frames */
     }
 
     /*
@@ -188,9 +191,9 @@
         /*
          * Copy the input data to the scratch memory
          */
-        Copy_Float((LVM_FLOAT*)pInData,          /* Source */
-                   (LVM_FLOAT*)pScratch,         /* Destination */
-                   (LVM_INT16)(2 * NumSamples)); /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,        /* Source */
+                   (LVM_FLOAT*)pScratch,       /* Destination */
+                   (LVM_INT16)destNumSamples); /* Number of frames */
 
         /*
          * Filter the data
@@ -198,13 +201,13 @@
         pInstance->pRevBiquad->process(pScratch, pScratch, NumSamples);
 
         Mult3s_Float((LVM_FLOAT*)pScratch, pConfig->ReverbLevel, (LVM_FLOAT*)pScratch,
-                     (LVM_INT16)(2 * NumSamples));
+                     (LVM_INT16)destNumSamples); /* Number of frames */
 
         /*
          * Apply the delay mix
          */
         DelayMix_Float((LVM_FLOAT*)pScratch, &pConfig->StereoSamples[0], pConfig->DelaySize,
-                       pOutData, &pConfig->DelayOffset, (LVM_INT16)NumSamples);
+                       pOutData, &pConfig->DelayOffset, (LVM_INT16)NumSamples, NumChannels);
     }
 
     return (LVCS_SUCCESS);
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
index 00bb26c..3ca25f9 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
@@ -55,7 +55,6 @@
     LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
     const BiquadA012B12CoefsSP_t* pSESideCoefs;
 
-
     /*
      * If the sample rate or speaker type has changed update the filters
      */
@@ -69,7 +68,7 @@
 
         std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
                 LVCS_SEMidCoefTable[Offset].A0, LVCS_SEMidCoefTable[Offset].A1, 0.0,
-                -(LVCS_SEMidCoefTable[Offset].B1), 0.0};
+                LVCS_SEMidCoefTable[Offset].B1, 0.0};
         pInstance->pSEMidBiquad.reset(
                 new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
 
@@ -78,7 +77,7 @@
 
         /* Side filter */
         coefs = {pSESideCoefs[Offset].A0, pSESideCoefs[Offset].A1, pSESideCoefs[Offset].A2,
-                 -(pSESideCoefs[Offset].B1), -(pSESideCoefs[Offset].B2)};
+                 pSESideCoefs[Offset].B1, pSESideCoefs[Offset].B2};
         pInstance->pSESideBiquad.reset(
                 new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
     }
@@ -129,6 +128,8 @@
     LVCS_StereoEnhancer_t* pConfig = (LVCS_StereoEnhancer_t*)&pInstance->StereoEnhancer;
     LVM_FLOAT* pScratch;
     pScratch = (LVM_FLOAT*)pInstance->pScratch;
+    LVM_INT32 NumChannels = pInstance->Params.NrChannels;
+    LVM_UINT16 destNumSamples = (NumChannels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
     /*
      * Check if the Stereo Enhancer is enabled
      */
@@ -136,7 +137,12 @@
         /*
          * Convert from stereo to middle and side
          */
-        From2iToMS_Float(pInData, pScratch, pScratch + NumSamples, (LVM_INT16)NumSamples);
+        if (NumChannels == 1) {
+            // Copy same input to scratch as Middle data
+            Copy_Float((LVM_FLOAT*)pInData, (LVM_FLOAT*)pScratch, (LVM_INT16)NumSamples);
+        } else {
+            From2iToMS_Float(pInData, pScratch, pScratch + NumSamples, (LVM_INT16)NumSamples);
+        }
 
         /*
          * Apply filter to the middle signal
@@ -159,18 +165,23 @@
                                               NumSamples);
         }
 
-        /*
-         * Convert from middle and side to stereo
-         */
-        MSTo2i_Sat_Float(pScratch, pScratch + NumSamples, pOutData, (LVM_INT16)NumSamples);
+        if (NumChannels == 1) {
+            // Copy processed Middle data from scratch to pOutData
+            Copy_Float((LVM_FLOAT*)pScratch, (LVM_FLOAT*)pOutData, (LVM_INT16)NumSamples);
+        } else {
+            /*
+             * Convert from middle and side to stereo
+             */
+            MSTo2i_Sat_Float(pScratch, pScratch + NumSamples, pOutData, (LVM_INT16)NumSamples);
+        }
 
     } else {
         /*
          * The stereo enhancer is disabled so just copy the data
          */
-        Copy_Float((LVM_FLOAT*)pInData,          /* Source */
-                   (LVM_FLOAT*)pOutData,         /* Destination */
-                   (LVM_INT16)(2 * NumSamples)); /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,        /* Source */
+                   (LVM_FLOAT*)pOutData,       /* Destination */
+                   (LVM_INT16)destNumSamples); /* Number of frames */
     }
 
     return (LVCS_SUCCESS);
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 8627c13..9939ed1 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -10,6 +10,56 @@
 }
 
 cc_test {
+    name: "EffectReverbTest",
+    vendor: true,
+    gtest: true,
+    host_supported: true,
+    srcs: [
+        "EffectReverbTest.cpp",
+        "EffectTestHelper.cpp",
+    ],
+    include_dirs: [
+        "frameworks/av/media/libeffects/lvm/lib/Common/lib",
+        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libreverb",
+        "libreverbwrapper",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+}
+
+cc_test {
+    name: "EffectBundleTest",
+    vendor: true,
+    gtest: true,
+    host_supported: true,
+    test_suites: ["device-tests"],
+    srcs: [
+        "EffectBundleTest.cpp",
+        "EffectTestHelper.cpp",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libbundlewrapper",
+        "libmusicbundle",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    header_libs: [
+        "libhardware_headers",
+    ],
+}
+
+cc_test {
     name: "lvmtest",
     host_supported: false,
     proprietary: true,
@@ -54,8 +104,9 @@
 
 cc_test {
     name: "reverb_test",
-    host_supported: false,
+    host_supported: true,
     proprietary: true,
+    gtest: false,
 
     include_dirs: [
         "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
diff --git a/media/libeffects/lvm/tests/EffectBundleTest.cpp b/media/libeffects/lvm/tests/EffectBundleTest.cpp
new file mode 100644
index 0000000..881ffb1
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectBundleTest.cpp
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+using namespace android;
+
+// Update isBassBoost, if the order of effects is updated
+constexpr effect_uuid_t kEffectUuids[] = {
+        // NXP SW BassBoost
+        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Virtualizer
+        {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Equalizer
+        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Volume
+        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+static bool isBassBoost(const effect_uuid_t* uuid) {
+    // Update this, if the order of effects in kEffectUuids is updated
+    return uuid == &kEffectUuids[0];
+}
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+typedef std::tuple<int, int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mChannelCount(audio_channel_count_from_out_mask(mChMask)),
+          mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<4>(GetParam())]) {}
+
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << "chMask: " << mChMask << " sampleRate: " << mSampleRate
+                 << " frameCount: " << mFrameCount << " loopCount: " << mLoopCount);
+
+    EffectTestHelper effect(mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<float> input(mTotalFrameCount * mChannelCount);
+    std::vector<float> output(mTotalFrameCount * mChannelCount);
+    std::minstd_rand gen(mChMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectBundleTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids)));
+
+typedef std::tuple<int, int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<3>(GetParam())]) {}
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+};
+
+// Compares first two channels in multi-channel output to stereo output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " sampleRate: " << mSampleRate << " frameCount: "
+                                    << mFrameCount << " loopCount: " << mLoopCount);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<float> monoInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+
+    // Generate stereo by repeating mono channel data
+    std::vector<float> stereoInput(mTotalFrameCount * FCC_2);
+    adjust_channels(monoInput.data(), FCC_1, stereoInput.data(), FCC_2, sizeof(float),
+                    mTotalFrameCount * sizeof(float) * FCC_1);
+
+    // Apply effect on stereo channels
+    EffectTestHelper stereoEffect(mUuid, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_STEREO,
+                                  mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.setConfig());
+
+    std::vector<float> stereoOutput(mTotalFrameCount * FCC_2);
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.process(stereoInput.data(), stereoOutput.data()));
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.releaseEffect());
+
+    // Convert stereo float data to stereo int16_t to be used as reference
+    std::vector<int16_t> stereoRefI16(mTotalFrameCount * FCC_2);
+    memcpy_to_i16_from_float(stereoRefI16.data(), stereoOutput.data(), mTotalFrameCount * FCC_2);
+
+    for (size_t chMask : EffectTestHelper::kChMasks) {
+        size_t channelCount = audio_channel_count_from_out_mask(chMask);
+        EffectTestHelper testEffect(mUuid, chMask, chMask, mSampleRate, mFrameCount, mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+
+        std::vector<float> testInput(mTotalFrameCount * channelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fp = &testInput[i * channelCount];
+            std::fill(fp, fp + channelCount, monoInput[i]);
+        }
+
+        std::vector<float> testOutput(mTotalFrameCount * channelCount);
+        ASSERT_NO_FATAL_FAILURE(testEffect.process(testInput.data(), testOutput.data()));
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        // Extract first two channels
+        std::vector<float> stereoTestOutput(mTotalFrameCount * FCC_2);
+        adjust_channels(testOutput.data(), channelCount, stereoTestOutput.data(), FCC_2,
+                        sizeof(float), mTotalFrameCount * sizeof(float) * channelCount);
+
+        // Convert the test data to int16_t
+        std::vector<int16_t> stereoTestI16(mTotalFrameCount * FCC_2);
+        memcpy_to_i16_from_float(stereoTestI16.data(), stereoTestOutput.data(),
+                                 mTotalFrameCount * FCC_2);
+
+        if (isBassBoost(mUuid)) {
+            // SNR must be above the threshold
+            float snr = computeSnr<int16_t>(stereoRefI16.data(), stereoTestI16.data(),
+                                            mTotalFrameCount * FCC_2);
+            ASSERT_GT(snr, EffectTestHelper::kSNRThreshold)
+                    << "SNR " << snr << "is lower than " << EffectTestHelper::kSNRThreshold;
+        } else {
+            ASSERT_EQ(0,
+                      memcmp(stereoRefI16.data(), stereoTestI16.data(), mTotalFrameCount * FCC_2))
+                    << "First two channels do not match with stereo output \n";
+        }
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectBundleTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libeffects/lvm/tests/EffectReverbTest.cpp b/media/libeffects/lvm/tests/EffectReverbTest.cpp
new file mode 100644
index 0000000..59453eb
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectReverbTest.cpp
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <audio_effects/effect_presetreverb.h>
+#include <VectorArithmetic.h>
+
+#include "EffectTestHelper.h"
+using namespace android;
+
+constexpr effect_uuid_t kEffectUuids[] = {
+        // NXP SW insert environmental reverb
+        {0xc7a511a0, 0xa3bb, 0x11df, 0x860e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW insert preset reverb
+        {0x172cdf00, 0xa3bc, 0x11df, 0xa72f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW auxiliary environmental reverb
+        {0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW auxiliary preset reverb
+        {0xf29a1400, 0xa3bb, 0x11df, 0x8ddc, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+static bool isAuxMode(const effect_uuid_t* uuid) {
+    // Update this, if the order of effects in kEffectUuids is updated
+    return (uuid == &kEffectUuids[2] || uuid == &kEffectUuids[3]);
+}
+
+constexpr int kPresets[] = {
+        REVERB_PRESET_NONE,      REVERB_PRESET_SMALLROOM,  REVERB_PRESET_MEDIUMROOM,
+        REVERB_PRESET_LARGEROOM, REVERB_PRESET_MEDIUMHALL, REVERB_PRESET_LARGEHALL,
+        REVERB_PRESET_PLATE,
+};
+
+constexpr size_t kNumPresets = std::size(kPresets);
+
+typedef std::tuple<int, int, int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<4>(GetParam())]),
+          mInChMask(isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO
+                                     : EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
+          mOutChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
+          mPreset(kPresets[std::get<5>(GetParam())]) {}
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+    const size_t mInChMask;
+    const size_t mInChannelCount;
+    const size_t mOutChMask;
+    const size_t mOutChannelCount;
+    const size_t mPreset;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << "outChMask: " << mOutChMask << " sampleRate: " << mSampleRate
+                                    << " frameCount: " << mFrameCount
+                                    << " loopCount: " << mLoopCount << " preset: " << mPreset);
+
+    EffectTestHelper effect(mUuid, mInChMask, mOutChMask, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+    ASSERT_NO_FATAL_FAILURE(effect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<float> input(mTotalFrameCount * mInChannelCount);
+    std::vector<float> output(mTotalFrameCount * mOutChannelCount);
+    std::minstd_rand gen(mOutChMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectReverbTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids),
+                           ::testing::Range(0, (int)kNumPresets)));
+
+typedef std::tuple<int, int, int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<3>(GetParam())]),
+          mPreset(kPresets[std::get<4>(GetParam())]) {}
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+    const size_t mPreset;
+};
+
+// Compares first two channels in multi-channel output to stereo output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << " sampleRate: " << mSampleRate << " frameCount: " << mFrameCount
+                 << " loopCount: " << mLoopCount << " preset: " << mPreset);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<float> monoInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+
+    // Generate stereo by repeating mono channel data
+    std::vector<float> stereoInput(mTotalFrameCount * FCC_2);
+    adjust_channels(monoInput.data(), FCC_1, stereoInput.data(), FCC_2, sizeof(float),
+                    mTotalFrameCount * sizeof(float) * FCC_1);
+
+    // Apply effect on stereo channels
+    EffectTestHelper stereoEffect(
+            mUuid, isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : AUDIO_CHANNEL_OUT_STEREO,
+            AUDIO_CHANNEL_OUT_STEREO, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.setConfig());
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+    std::vector<float> stereoOutput(mTotalFrameCount * FCC_2);
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.process(
+            (isAuxMode(mUuid) ? monoInput.data() : stereoInput.data()), stereoOutput.data()));
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.releaseEffect());
+
+    // Average of both channels data is stored for mono comparison
+    std::vector<float> monoOutput(mTotalFrameCount);
+    From2iToMono_Float((const float*)stereoOutput.data(), monoOutput.data(), mTotalFrameCount);
+
+    // Convert stereo float data to stereo int16_t to be used as reference
+    std::vector<int16_t> stereoRefI16(mTotalFrameCount * FCC_2);
+    memcpy_to_i16_from_float(stereoRefI16.data(), stereoOutput.data(), mTotalFrameCount * FCC_2);
+
+    // mono int16_t to be used as refernece for mono comparison
+    std::vector<int16_t> monoRefI16(mTotalFrameCount);
+    memcpy_to_i16_from_float(monoRefI16.data(), monoOutput.data(), mTotalFrameCount);
+
+    for (size_t outChMask : EffectTestHelper::kChMasks) {
+        size_t outChannelCount = audio_channel_count_from_out_mask(outChMask);
+        size_t inChMask = isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : outChMask;
+
+        EffectTestHelper testEffect(mUuid, inChMask, outChMask, mSampleRate, mFrameCount,
+                                    mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+        std::vector<float> testInput(mTotalFrameCount * outChannelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fp = &testInput[i * outChannelCount];
+            std::fill(fp, fp + outChannelCount, monoInput[i]);
+        }
+
+        std::vector<float> testOutput(mTotalFrameCount * outChannelCount);
+        ASSERT_NO_FATAL_FAILURE(testEffect.process(
+                (isAuxMode(mUuid) ? monoInput.data() : testInput.data()), testOutput.data()));
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        if (outChannelCount == FCC_1) {
+            // Convert the test data to int16_t
+            std::vector<int16_t> monoTestI16(mTotalFrameCount);
+            memcpy_to_i16_from_float(monoTestI16.data(), testOutput.data(), mTotalFrameCount);
+
+            ASSERT_EQ(0, memcmp(monoRefI16.data(), monoTestI16.data(), mTotalFrameCount * FCC_2))
+                    << "Mono channel do not match with reference output \n";
+        } else {
+            // Extract first two channels
+            std::vector<float> stereoTestOutput(mTotalFrameCount * FCC_2);
+            adjust_channels(testOutput.data(), outChannelCount, stereoTestOutput.data(), FCC_2,
+                            sizeof(float), mTotalFrameCount * sizeof(float) * outChannelCount);
+
+            // Convert the test data to int16_t
+            std::vector<int16_t> stereoTestI16(mTotalFrameCount * FCC_2);
+            memcpy_to_i16_from_float(stereoTestI16.data(), stereoTestOutput.data(),
+                                     mTotalFrameCount * FCC_2);
+
+            ASSERT_EQ(0,
+                      memcmp(stereoRefI16.data(), stereoTestI16.data(), mTotalFrameCount * FCC_2))
+                    << "First two channels do not match with stereo output \n";
+        }
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectReverbTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids),
+                           ::testing::Range(0, (int)kNumPresets)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.cpp b/media/libeffects/lvm/tests/EffectTestHelper.cpp
new file mode 100644
index 0000000..625c15a
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectTestHelper.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+namespace android {
+
+void EffectTestHelper::createEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
+    ASSERT_EQ(status, 0) << "create_effect returned an error " << status;
+}
+
+void EffectTestHelper::releaseEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
+    ASSERT_EQ(status, 0) << "release_effect returned an error " << status;
+}
+
+void EffectTestHelper::setConfig() {
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
+    config.inputCfg.channels = mInChMask;
+    config.outputCfg.channels = mOutChMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                   &config, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "set_config returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_config reply non zero " << reply;
+
+    status = (*mEffectHandle)
+                     ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "cmd_enable returned an error " << status;
+    ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
+}
+
+void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {type, value};
+    auto effectParam = new effect_param_t[sizeof(effect_param_t) + sizeof(paramData)];
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    delete[] effectParam;
+    ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+}
+
+void EffectTestHelper::process(float* input, float* output) {
+    audio_buffer_t inBuffer = {.frameCount = mFrameCount, .f32 = input};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .f32 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        inBuffer.f32 += mFrameCount * mInChannelCount;
+        outBuffer.f32 += mFrameCount * mOutChannelCount;
+    }
+}
+}  // namespace android
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.h b/media/libeffects/lvm/tests/EffectTestHelper.h
new file mode 100644
index 0000000..3854d46
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectTestHelper.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <climits>
+#include <cstdlib>
+#include <gtest/gtest.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <random>
+#include <stdint.h>
+#include <system/audio.h>
+#include <vector>
+
+namespace android {
+template <typename T>
+static float computeSnr(const T* ref, const T* tst, size_t count) {
+    double signal{};
+    double noise{};
+
+    for (size_t i = 0; i < count; ++i) {
+        const double value(ref[i]);
+        const double diff(tst[i] - value);
+        signal += value * value;
+        noise += diff * diff;
+    }
+    // Initialized to large value to handle
+    // cases where ref and tst match exactly
+    float snr = FLT_MAX;
+    if (signal > 0.0f && noise > 0.0f) {
+        snr = 10.f * log(signal / noise);
+    }
+    return snr;
+}
+
+class EffectTestHelper {
+  public:
+    EffectTestHelper(const effect_uuid_t* uuid, size_t inChMask, size_t outChMask,
+                     size_t sampleRate, size_t frameCount, size_t loopCount)
+        : mUuid(uuid),
+          mInChMask(inChMask),
+          mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
+          mOutChMask(outChMask),
+          mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
+          mSampleRate(sampleRate),
+          mFrameCount(frameCount),
+          mLoopCount(loopCount) {}
+    void createEffect();
+    void releaseEffect();
+    void setConfig();
+    void setParam(uint32_t type, uint32_t val);
+    void process(float* input, float* output);
+
+    // Corresponds to SNR for 1 bit difference between two int16_t signals
+    static constexpr float kSNRThreshold = 90.308998;
+
+    static constexpr audio_channel_mask_t kChMasks[] = {
+            AUDIO_CHANNEL_OUT_MONO,          AUDIO_CHANNEL_OUT_STEREO,
+            AUDIO_CHANNEL_OUT_2POINT1,       AUDIO_CHANNEL_OUT_2POINT0POINT2,
+            AUDIO_CHANNEL_OUT_QUAD,          AUDIO_CHANNEL_OUT_QUAD_BACK,
+            AUDIO_CHANNEL_OUT_QUAD_SIDE,     AUDIO_CHANNEL_OUT_SURROUND,
+            AUDIO_CHANNEL_INDEX_MASK_4,      AUDIO_CHANNEL_OUT_2POINT1POINT2,
+            AUDIO_CHANNEL_OUT_3POINT0POINT2, AUDIO_CHANNEL_OUT_PENTA,
+            AUDIO_CHANNEL_INDEX_MASK_5,      AUDIO_CHANNEL_OUT_3POINT1POINT2,
+            AUDIO_CHANNEL_OUT_5POINT1,       AUDIO_CHANNEL_OUT_5POINT1_BACK,
+            AUDIO_CHANNEL_OUT_5POINT1_SIDE,  AUDIO_CHANNEL_INDEX_MASK_6,
+            AUDIO_CHANNEL_OUT_6POINT1,       AUDIO_CHANNEL_INDEX_MASK_7,
+            AUDIO_CHANNEL_OUT_5POINT1POINT2, AUDIO_CHANNEL_OUT_7POINT1,
+            AUDIO_CHANNEL_INDEX_MASK_8,      AUDIO_CHANNEL_INDEX_MASK_9,
+            AUDIO_CHANNEL_INDEX_MASK_10,     AUDIO_CHANNEL_INDEX_MASK_11,
+            AUDIO_CHANNEL_INDEX_MASK_12,     AUDIO_CHANNEL_INDEX_MASK_13,
+            AUDIO_CHANNEL_INDEX_MASK_14,     AUDIO_CHANNEL_INDEX_MASK_15,
+            AUDIO_CHANNEL_INDEX_MASK_16,     AUDIO_CHANNEL_INDEX_MASK_17,
+            AUDIO_CHANNEL_INDEX_MASK_18,     AUDIO_CHANNEL_INDEX_MASK_19,
+            AUDIO_CHANNEL_INDEX_MASK_20,     AUDIO_CHANNEL_INDEX_MASK_21,
+            AUDIO_CHANNEL_INDEX_MASK_22,     AUDIO_CHANNEL_INDEX_MASK_23,
+            AUDIO_CHANNEL_INDEX_MASK_24,
+    };
+
+    static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+    static constexpr size_t kSampleRates[] = {8000,  11025, 12000, 16000, 22050,  24000, 32000,
+                                              44100, 48000, 88200, 96000, 176400, 192000};
+
+    static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+    static constexpr size_t kFrameCounts[] = {4, 2048};
+
+    static constexpr size_t kNumFrameCounts = std::size(kFrameCounts);
+
+    static constexpr size_t kLoopCounts[] = {1, 4};
+
+    static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
+  private:
+    const effect_uuid_t* mUuid;
+    const size_t mInChMask;
+    const size_t mInChannelCount;
+    const size_t mOutChMask;
+    const size_t mOutChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    effect_handle_t mEffectHandle{};
+};
+}  // namespace android
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index 7b0ff5e..7571a24 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -53,16 +53,16 @@
 flags_arr=(
     "-csE"
     "-eqE"
-    "-tE"
-    "-csE -tE -eqE"
+    "-tE -trebleLvl:15"
+    "-csE -tE -trebleLvl:15 -eqE"
     "-bE -M"
-    "-csE -tE"
-    "-csE -eqE" "-tE -eqE"
-    "-csE -tE -bE -M -eqE"
-    "-tE -eqE -vcBal:96 -M"
-    "-tE -eqE -vcBal:-96 -M"
-    "-tE -eqE -vcBal:0 -M"
-    "-tE -eqE -bE -vcBal:30 -M"
+    "-csE -tE -trebleLvl:15"
+    "-csE -eqE" "-tE -trebleLvl:15 -eqE"
+    "-csE -tE -trebleLvl:15 -bE -M -eqE"
+    "-tE -trebleLvl:15 -eqE -vcBal:96 -M"
+    "-tE -trebleLvl:15 -eqE -vcBal:-96 -M"
+    "-tE -trebleLvl:15 -eqE -vcBal:0 -M"
+    "-tE -trebleLvl:15 -eqE -bE -vcBal:30 -M"
 )
 
 fs_arr=(
@@ -102,6 +102,11 @@
                     ((++error_count))
                 fi
 
+                # Do not compare cases where -vcBal is in flags and chMask is 0 (due to
+                # stereo computation)
+                if [[ $flags == *"-vcBal:"* ]] && [[ $chMask -eq 0 ]]; then
+                    continue
+                fi
 
                 # two channel files should be identical to higher channel
                 # computation (first 2 channels).
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index f107b18..e65228c 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -79,6 +79,7 @@
     int bassEffectLevel = 0;
     int eqPresetLevel = 0;
     int frameLength = 256;
+    int trebleEffectLevel = 0;
     LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
     LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
     LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
@@ -303,10 +304,6 @@
     params->PSA_Enable = LVM_PSA_OFF;
     params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
-    /* TE Control parameters */
-    params->TE_OperatingMode = LVM_TE_OFF;
-    params->TE_EffectLevel = 0;
-
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
 
@@ -415,52 +412,11 @@
     } else {
         return -EINVAL;
     }
-
-    LVM_Fs_en sampleRate;
-    switch (plvmConfigParams->samplingFreq) {
-        case 8000:
-            sampleRate = LVM_FS_8000;
-            break;
-        case 11025:
-            sampleRate = LVM_FS_11025;
-            break;
-        case 12000:
-            sampleRate = LVM_FS_12000;
-            break;
-        case 16000:
-            sampleRate = LVM_FS_16000;
-            break;
-        case 22050:
-            sampleRate = LVM_FS_22050;
-            break;
-        case 24000:
-            sampleRate = LVM_FS_24000;
-            break;
-        case 32000:
-            sampleRate = LVM_FS_32000;
-            break;
-        case 44100:
-            sampleRate = LVM_FS_44100;
-            break;
-        case 48000:
-            sampleRate = LVM_FS_48000;
-            break;
-        case 88200:
-            sampleRate = LVM_FS_88200;
-            break;
-        case 96000:
-            sampleRate = LVM_FS_96000;
-            break;
-        case 176400:
-            sampleRate = LVM_FS_176400;
-            break;
-        case 192000:
-            sampleRate = LVM_FS_192000;
-            break;
-        default:
-            return -EINVAL;
+    params->SampleRate = lvmFsForSampleRate(plvmConfigParams->samplingFreq);
+    if (params->SampleRate == LVM_FS_INVALID) {
+        ALOGE("lvmControl invalid sampling rate %d", plvmConfigParams->samplingFreq);
+        return -EINVAL;
     }
-    params->SampleRate = sampleRate;
 
     /* Concert Sound parameters */
     params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
@@ -486,6 +442,7 @@
 
     /* Treble Enhancement parameters */
     params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+    params->TE_EffectLevel = plvmConfigParams->trebleEffectLevel;
 
     /* PSA Control parameters */
     params->PSA_Enable = LVM_PSA_ON;
@@ -530,19 +487,11 @@
     const int ioChannelCount = plvmConfigParams->fChannels;
     const int ioFrameSize = ioChannelCount * sizeof(short);  // file load size
     const int maxChannelCount = std::max(channelCount, ioChannelCount);
-    /*
-     * Mono input will be converted to 2 channels internally in the process call
-     * by copying the same data into the second channel.
-     * Hence when channelCount is 1, output buffer should be allocated for
-     * 2 channels. The memAllocChCount takes care of allocation of sufficient
-     * memory for the output buffer.
-     */
-    const int memAllocChCount = (channelCount == 1 ? 2 : channelCount);
 
     std::vector<short> in(frameLength * maxChannelCount);
     std::vector<short> out(frameLength * maxChannelCount);
     std::vector<float> floatIn(frameLength * channelCount);
-    std::vector<float> floatOut(frameLength * memAllocChCount);
+    std::vector<float> floatOut(frameLength * channelCount);
 
     int frameCounter = 0;
     while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
@@ -653,6 +602,15 @@
                 return -1;
             }
             lvmConfigParams.eqPresetLevel = eqPresetLevel;
+        } else if (!strncmp(argv[i], "-trebleLvl:", 11)) {
+            const int trebleEffectLevel = atoi(argv[i] + 11);
+            if (trebleEffectLevel > LVM_TE_MAX_EFFECTLEVEL ||
+                trebleEffectLevel < LVM_TE_MIN_EFFECTLEVEL) {
+                printf("Error: Unsupported Treble Effect Level : %d\n", trebleEffectLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.trebleEffectLevel = trebleEffectLevel;
         } else if (!strcmp(argv[i], "-bE")) {
             lvmConfigParams.bassEnable = LVM_BE_ON;
         } else if (!strcmp(argv[i], "-eqE")) {
diff --git a/media/libeffects/lvm/tests/reverb_test.cpp b/media/libeffects/lvm/tests/reverb_test.cpp
index cecc975..dfb6970 100644
--- a/media/libeffects/lvm/tests/reverb_test.cpp
+++ b/media/libeffects/lvm/tests/reverb_test.cpp
@@ -212,7 +212,9 @@
         printUsage();
         return EXIT_FAILURE;
     }
-
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
     reverbConfigParams_t revConfigParams{};  // default initialize
     const char* inputFile = nullptr;
     const char* outputFile = nullptr;
@@ -312,9 +314,6 @@
     config.inputCfg.samplingRate = config.outputCfg.samplingRate = revConfigParams.sampleRate;
     config.inputCfg.channels = config.outputCfg.channels = revConfigParams.chMask;
     config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
-    if (AUDIO_CHANNEL_OUT_MONO == revConfigParams.chMask) {
-        config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-    }
     if (int status = reverbCreateEffect(&effectHandle, &config, sessionId, ioId,
                                         revConfigParams.auxiliary);
         status != 0) {
@@ -346,19 +345,11 @@
     const int ioChannelCount = revConfigParams.fChannels;
     const int ioFrameSize = ioChannelCount * sizeof(short);
     const int maxChannelCount = std::max(channelCount, ioChannelCount);
-    /*
-     * Mono input will be converted to 2 channels internally in the process call
-     * by copying the same data into the second channel.
-     * Hence when channelCount is 1, output buffer should be allocated for
-     * 2 channels. The outChannelCount takes care of allocation of sufficient
-     * memory for the output buffer.
-     */
-    const int outChannelCount = (channelCount == 1 ? 2 : channelCount);
 
     std::vector<short> in(frameLength * maxChannelCount);
-    std::vector<short> out(frameLength * outChannelCount);
+    std::vector<short> out(frameLength * maxChannelCount);
     std::vector<float> floatIn(frameLength * channelCount);
-    std::vector<float> floatOut(frameLength * outChannelCount);
+    std::vector<float> floatOut(frameLength * channelCount);
 
     int frameCounter = 0;
 
@@ -392,11 +383,11 @@
 #else
         memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
 #endif
-        memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * outChannelCount);
+        memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
 
-        if (ioChannelCount != outChannelCount) {
-            adjust_channels(out.data(), outChannelCount, out.data(), ioChannelCount, sizeof(short),
-                            frameLength * outChannelCount * sizeof(short));
+        if (ioChannelCount != channelCount) {
+            adjust_channels(out.data(), channelCount, out.data(), ioChannelCount, sizeof(short),
+                            frameLength * channelCount * sizeof(short));
         }
         (void)fwrite(out.data(), ioFrameSize, frameLength, outputFp.get());
         frameCounter += frameLength;
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 09c4aef..e169e3c 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -67,6 +67,7 @@
     },
 
     vendor: true,
+    host_supported: true,
     srcs: ["Reverb/EffectReverb.cpp"],
 
     cppflags: [
@@ -83,7 +84,6 @@
     shared_libs: [
         "libaudioutils",
         "libcutils",
-        "libdl",
         "liblog",
     ],
 
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index c9d41ba..df64676 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -957,51 +957,12 @@
     pContext->config = *pConfig;
     const LVM_INT16 NrChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
 
-    switch (pConfig->inputCfg.samplingRate) {
-        case 8000:
-            SampleRate = LVM_FS_8000;
-            pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
-            break;
-        case 16000:
-            SampleRate = LVM_FS_16000;
-            pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
-            break;
-        case 22050:
-            SampleRate = LVM_FS_22050;
-            pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
-            break;
-        case 32000:
-            SampleRate = LVM_FS_32000;
-            pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
-            break;
-        case 44100:
-            SampleRate = LVM_FS_44100;
-            pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
-            break;
-        case 48000:
-            SampleRate = LVM_FS_48000;
-            pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
-            break;
-        case 88200:
-            SampleRate = LVM_FS_88200;
-            pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
-            break;
-        case 96000:
-            SampleRate = LVM_FS_96000;
-            pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
-            break;
-        case 176400:
-            SampleRate = LVM_FS_176400;
-            pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
-            break;
-        case 192000:
-            SampleRate = LVM_FS_192000;
-            pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
-            break;
-        default:
-            ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
-            return -EINVAL;
+    SampleRate = lvmFsForSampleRate(pConfig->inputCfg.samplingRate);
+    if (SampleRate == LVM_FS_INVALID) {
+        ALOGV("Effect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+        return -EINVAL;
     }
+    pContext->pBundledContext->SamplesPerSecond = pConfig->inputCfg.samplingRate * NrChannels;
 
     if (pContext->pBundledContext->SampleRate != SampleRate ||
         pContext->pBundledContext->ChMask != pConfig->inputCfg.channels) {
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 9ea70ce..290a7b1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -33,6 +33,7 @@
 #include "EffectReverb.h"
 // from Reverb/lib
 #include "LVREV.h"
+#include "VectorArithmetic.h"
 
 // effect_handle_t interface implementation for reverb
 extern "C" const struct effect_interface_s gReverbInterface;
@@ -190,8 +191,8 @@
 
 /* Effect Library Interface Implementation */
 
-extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId __unused,
-                            int32_t ioId __unused, effect_handle_t* pHandle) {
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t /* sessionId __unused */,
+                            int32_t /* ioId __unused */, effect_handle_t* pHandle) {
     int ret;
     int i;
     int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
@@ -332,6 +333,7 @@
 //----------------------------------------------------------------------------
 int process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount, ReverbContext* pContext) {
     int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
+    int outChannels = audio_channel_count_from_out_mask(pContext->config.outputCfg.channels);
     LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     // Reverb only effects the stereo channels in multichannel source.
@@ -454,33 +456,49 @@
         }
     }
 
-    if (channels > 2) {
+    if (outChannels > 2) {
         // Accumulate if required
         if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
             for (int i = 0; i < frameCount; i++) {
-                pOut[channels * i] += pContext->OutFrames[FCC_2 * i];
-                pOut[channels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
+                pOut[outChannels * i] += pContext->OutFrames[FCC_2 * i];
+                pOut[outChannels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
-                pOut[channels * i] = pContext->OutFrames[FCC_2 * i];
-                pOut[channels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
+                pOut[outChannels * i] = pContext->OutFrames[FCC_2 * i];
+                pOut[outChannels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
             }
         }
-        for (int i = 0; i < frameCount; i++) {
-            for (int j = FCC_2; j < channels; j++) {
-                pOut[channels * i + j] = pIn[channels * i + j];
+        if (!pContext->auxiliary) {
+            for (int i = 0; i < frameCount; i++) {
+                // channels and outChannels are expected to be same.
+                for (int j = FCC_2; j < outChannels; j++) {
+                    pOut[outChannels * i + j] = pIn[outChannels * i + j];
+                }
             }
         }
     } else {
         if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-            for (int i = 0; i < frameCount * FCC_2; i++) {
-                pOut[i] += pContext->OutFrames[i];
+            if (outChannels == FCC_1) {
+                for (int i = 0; i < frameCount; i++) {
+                    pOut[i] +=
+                            ((pContext->OutFrames[i * FCC_2] + pContext->OutFrames[i * FCC_2 + 1]) *
+                             0.5f);
+                }
+            } else {
+                for (int i = 0; i < frameCount * FCC_2; i++) {
+                    pOut[i] += pContext->OutFrames[i];
+                }
             }
         } else {
-            memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+            if (outChannels == FCC_1) {
+                From2iToMono_Float((const process_buffer_t*)pContext->OutFrames, pOut, frameCount);
+            } else {
+                memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+            }
         }
     }
+
     return 0;
 } /* end process */
 
@@ -498,25 +516,9 @@
 
 void Reverb_free(ReverbContext* pContext) {
     LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
-    LVREV_MemoryTable_st MemTab;
 
-    /* Free the algorithm memory */
-    LvmStatus = LVREV_GetMemoryTable(pContext->hInstance, &MemTab, LVM_NULL);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "Reverb_free")
-
-    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-        if (MemTab.Region[i].Size != 0) {
-            if (MemTab.Region[i].pBaseAddress != NULL) {
-                free(MemTab.Region[i].pBaseAddress);
-            } else {
-                ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32
-                      " bytes "
-                      "for region %u at %p ERROR\n",
-                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
+    LvmStatus = LVREV_FreeInstance(pContext->hInstance);
+    LVM_ERROR_CHECK(LvmStatus, "LVREV_FreeInstance", "Reverb_free")
 } /* end Reverb_free */
 
 //----------------------------------------------------------------------------
@@ -546,47 +548,17 @@
     CHECK_ARG((pContext->auxiliary && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) ||
               ((!pContext->auxiliary) && (inputChannels <= LVM_MAX_CHANNELS)));
     int outputChannels = audio_channel_count_from_out_mask(pConfig->outputCfg.channels);
-    CHECK_ARG(outputChannels >= FCC_2 && outputChannels <= LVM_MAX_CHANNELS);
+    CHECK_ARG(outputChannels <= LVM_MAX_CHANNELS);
     CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
               pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
     CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     // ALOGV("\tReverb_setConfig calling memcpy");
     pContext->config = *pConfig;
 
-    switch (pConfig->inputCfg.samplingRate) {
-        case 8000:
-            SampleRate = LVM_FS_8000;
-            break;
-        case 16000:
-            SampleRate = LVM_FS_16000;
-            break;
-        case 22050:
-            SampleRate = LVM_FS_22050;
-            break;
-        case 32000:
-            SampleRate = LVM_FS_32000;
-            break;
-        case 44100:
-            SampleRate = LVM_FS_44100;
-            break;
-        case 48000:
-            SampleRate = LVM_FS_48000;
-            break;
-        case 88200:
-            SampleRate = LVM_FS_88200;
-            break;
-        case 96000:
-            SampleRate = LVM_FS_96000;
-            break;
-        case 176400:
-            SampleRate = LVM_FS_176400;
-            break;
-        case 192000:
-            SampleRate = LVM_FS_192000;
-            break;
-        default:
-            ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
-            return -EINVAL;
+    SampleRate = lvmFsForSampleRate(pConfig->inputCfg.samplingRate);
+    if (SampleRate == LVM_FS_INVALID) {
+        ALOGE("Reverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+        return -EINVAL;
     }
 
     if (pContext->SampleRate != SampleRate) {
@@ -686,65 +658,17 @@
     LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
     LVREV_ControlParams_st params;                   /* Control Parameters */
     LVREV_InstanceParams_st InstParams;              /* Instance parameters */
-    LVREV_MemoryTable_st MemTab;                     /* Memory allocation table */
-    bool bMallocFailure = LVM_FALSE;
 
     /* Set the capabilities */
     InstParams.MaxBlockSize = MAX_CALL_SIZE;
     InstParams.SourceFormat = LVM_STEREO;  // Max format, could be mono during process
     InstParams.NumDelays = LVREV_DELAYLINES_4;
 
-    /* Allocate memory, forcing alignment */
-    LvmStatus = LVREV_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVREV_GetMemoryTable", "Reverb_init")
-    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
-
-    ALOGV("\tCreateInstance Successfully called LVM_GetMemoryTable\n");
-
-    /* Allocate memory */
-    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-        if (MemTab.Region[i].Size != 0) {
-            MemTab.Region[i].pBaseAddress = calloc(1, MemTab.Region[i].Size);
-
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-                ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                      " bytes for region %u\n",
-                      MemTab.Region[i].Size, i);
-                bMallocFailure = LVM_TRUE;
-            } else {
-                ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
-                      " bytes for region %u at %p\n",
-                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-
-    /* If one or more of the memory regions failed to allocate, free the regions that were
-     * succesfully allocated and return with an error
-     */
-    if (bMallocFailure == LVM_TRUE) {
-        for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-                ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                      " bytes for region %u - Not freeing\n",
-                      MemTab.Region[i].Size, i);
-            } else {
-                ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
-                      " bytes for region %u at %p- free\n",
-                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-                free(MemTab.Region[i].pBaseAddress);
-            }
-        }
-        return -EINVAL;
-    }
-    ALOGV("\tReverb_init CreateInstance Successfully malloc'd memory\n");
-
     /* Initialise */
     pContext->hInstance = LVM_NULL;
 
     /* Init sets the instance handle */
-    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &MemTab, &InstParams);
+    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "Reverb_init")
     if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index e46a136..c6e036a 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -18,14 +18,10 @@
     ],
 }
 
-cc_library_shared {
-    name: "libaudiopreprocessing",
+cc_defaults {
+    name: "libaudiopreprocessing-defaults",
     vendor: true,
-    relative_install_path: "soundfx",
-    srcs: ["PreProcessing.cpp"],
-    local_include_dirs: [
-        ".",
-    ],
+    host_supported: true,
     cflags: [
         "-Wall",
         "-Werror",
@@ -45,6 +41,20 @@
     header_libs: [
         "libaudioeffects",
         "libhardware_headers",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_library {
+    name: "libaudiopreprocessing",
+    defaults: ["libaudiopreprocessing-defaults"],
+    relative_install_path: "soundfx",
+    srcs: ["PreProcessing.cpp"],
+    header_libs: [
         "libwebrtc_absl_headers",
     ],
 }
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 03ccc34..19a8b2f 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -105,9 +105,8 @@
     webrtc::AudioProcessing* apm;  // handle on webRTC audio processing module (APM)
     // Audio Processing module builder
     webrtc::AudioProcessingBuilder ap_builder;
-    size_t apmFrameCount;      // buffer size for webRTC process (10 ms)
-    uint32_t apmSamplingRate;  // webRTC APM sampling rate (8/16 or 32 kHz)
-    size_t frameCount;         // buffer size before input resampler ( <=> apmFrameCount)
+    // frameCount represents the size of the buffers used for processing, and must represent 10ms.
+    size_t frameCount;
     uint32_t samplingRate;     // sampling rate at effect process interface
     uint32_t inChannelCount;   // input channel count
     uint32_t outChannelCount;  // output channel count
@@ -119,21 +118,12 @@
     webrtc::AudioProcessing::Config config;
     webrtc::StreamConfig inputConfig;   // input stream configuration
     webrtc::StreamConfig outputConfig;  // output stream configuration
-    int16_t* inBuf;    // input buffer used when resampling
-    size_t inBufSize;  // input buffer size in frames
-    size_t framesIn;   // number of frames in input buffer
-    int16_t* outBuf;    // output buffer used when resampling
-    size_t outBufSize;  // output buffer size in frames
-    size_t framesOut;   // number of frames in output buffer
     uint32_t revChannelCount;  // number of channels on reverse stream
     uint32_t revEnabledMsk;    // bit field containing IDs of enabled pre processors
                                // with reverse channel
     uint32_t revProcessedMsk;  // bit field containing IDs of pre processors with reverse
                                // channel already processed in current round
     webrtc::StreamConfig revConfig;     // reverse stream configuration.
-    int16_t* revBuf;    // reverse channel input buffer
-    size_t revBufSize;  // reverse channel input buffer size
-    size_t framesRev;   // number of frames in reverse channel input buffer
 };
 
 #ifdef DUAL_MIC_TEST
@@ -670,8 +660,8 @@
     return 0;
 }
 
-int NsGetParameter(preproc_effect_t* effect __unused, void* pParam __unused,
-                   uint32_t* pValueSize __unused, void* pValue __unused) {
+int NsGetParameter(preproc_effect_t* /*effect __unused*/, void* /*pParam __unused*/,
+                   uint32_t* /*pValueSize __unused*/, void* /*pValue __unused*/) {
     int status = 0;
     return status;
 }
@@ -862,9 +852,7 @@
             ALOGW("Session_CreateEffect could not get apm engine");
             goto error;
         }
-        session->apmSamplingRate = kPreprocDefaultSr;
-        session->apmFrameCount = (kPreprocDefaultSr) / 100;
-        session->frameCount = session->apmFrameCount;
+        session->frameCount = kPreprocDefaultSr / 100;
         session->samplingRate = kPreprocDefaultSr;
         session->inChannelCount = kPreProcDefaultCnl;
         session->outChannelCount = kPreProcDefaultCnl;
@@ -879,12 +867,6 @@
         session->processedMsk = 0;
         session->revEnabledMsk = 0;
         session->revProcessedMsk = 0;
-        session->inBuf = NULL;
-        session->inBufSize = 0;
-        session->outBuf = NULL;
-        session->outBufSize = 0;
-        session->revBuf = NULL;
-        session->revBufSize = 0;
     }
     status = Effect_Create(&session->effects[procId], session, interface);
     if (status < 0) {
@@ -908,13 +890,6 @@
     if (session->createdMsk == 0) {
         delete session->apm;
         session->apm = NULL;
-        delete session->inBuf;
-        session->inBuf = NULL;
-        delete session->outBuf;
-        session->outBuf = NULL;
-        delete session->revBuf;
-        session->revBuf = NULL;
-
         session->id = 0;
     }
 
@@ -934,24 +909,8 @@
     ALOGV("Session_SetConfig sr %d cnl %08x", config->inputCfg.samplingRate,
           config->inputCfg.channels);
 
-    // AEC implementation is limited to 16kHz
-    if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
-        session->apmSamplingRate = 32000;
-    } else if (config->inputCfg.samplingRate >= 16000) {
-        session->apmSamplingRate = 16000;
-    } else if (config->inputCfg.samplingRate >= 8000) {
-        session->apmSamplingRate = 8000;
-    }
-
-
     session->samplingRate = config->inputCfg.samplingRate;
-    session->apmFrameCount = session->apmSamplingRate / 100;
-    if (session->samplingRate == session->apmSamplingRate) {
-        session->frameCount = session->apmFrameCount;
-    } else {
-        session->frameCount =
-                (session->apmFrameCount * session->samplingRate) / session->apmSamplingRate;
-    }
+    session->frameCount = session->samplingRate / 100;
     session->inChannelCount = inCnl;
     session->outChannelCount = outCnl;
     session->inputConfig.set_sample_rate_hz(session->samplingRate);
@@ -963,13 +922,6 @@
     session->revConfig.set_sample_rate_hz(session->samplingRate);
     session->revConfig.set_num_channels(inCnl);
 
-    // force process buffer reallocation
-    session->inBufSize = 0;
-    session->outBufSize = 0;
-    session->framesIn = 0;
-    session->framesOut = 0;
-
-
     session->state = PREPROC_SESSION_STATE_CONFIG;
     return 0;
 }
@@ -1004,9 +956,6 @@
     }
     uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
     session->revChannelCount = inCnl;
-    // force process buffer reallocation
-    session->revBufSize = 0;
-    session->framesRev = 0;
 
     return 0;
 }
@@ -1023,12 +972,8 @@
 
 void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled) {
     if (enabled) {
-        if (session->enabledMsk == 0) {
-            session->framesIn = 0;
-        }
         session->enabledMsk |= (1 << procId);
         if (HasReverseStream(procId)) {
-            session->framesRev = 0;
             session->revEnabledMsk |= (1 << procId);
         }
     } else {
@@ -1117,43 +1062,24 @@
         return -EINVAL;
     }
 
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
+
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
+
     session->processedMsk |= (1 << effect->procId);
 
     //    ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
     //         inBuffer->frameCount, session->enabledMsk, session->processedMsk);
-
     if ((session->processedMsk & session->enabledMsk) == session->enabledMsk) {
         effect->session->processedMsk = 0;
-        size_t framesRq = outBuffer->frameCount;
-        size_t framesWr = 0;
-        if (session->framesOut) {
-            size_t fr = session->framesOut;
-            if (outBuffer->frameCount < fr) {
-                fr = outBuffer->frameCount;
-            }
-            memcpy(outBuffer->s16, session->outBuf,
-                   fr * session->outChannelCount * sizeof(int16_t));
-            memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
-                    (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-            session->framesOut -= fr;
-            framesWr += fr;
-        }
-        outBuffer->frameCount = framesWr;
-        if (framesWr == framesRq) {
-            inBuffer->frameCount = 0;
-            return 0;
-        }
-
-        size_t fr = session->frameCount - session->framesIn;
-        if (inBuffer->frameCount < fr) {
-            fr = inBuffer->frameCount;
-        }
-        session->framesIn += fr;
-        inBuffer->frameCount = fr;
-        if (session->framesIn < session->frameCount) {
-            return 0;
-        }
-        session->framesIn = 0;
         if (int status = effect->session->apm->ProcessStream(
                     (const int16_t* const)inBuffer->s16,
                     (const webrtc::StreamConfig)effect->session->inputConfig,
@@ -1163,34 +1089,6 @@
             ALOGE("Process Stream failed with error %d\n", status);
             return status;
         }
-        outBuffer->frameCount = inBuffer->frameCount;
-
-        if (session->outBufSize < session->framesOut + session->frameCount) {
-            int16_t* buf;
-            session->outBufSize = session->framesOut + session->frameCount;
-            buf = (int16_t*)realloc(
-                    session->outBuf,
-                    session->outBufSize * session->outChannelCount * sizeof(int16_t));
-            if (buf == NULL) {
-                session->framesOut = 0;
-                free(session->outBuf);
-                session->outBuf = NULL;
-                return -ENOMEM;
-            }
-            session->outBuf = buf;
-        }
-
-        fr = session->framesOut;
-        if (framesRq - framesWr < fr) {
-            fr = framesRq - framesWr;
-        }
-        memcpy(outBuffer->s16 + framesWr * session->outChannelCount, session->outBuf,
-               fr * session->outChannelCount * sizeof(int16_t));
-        memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
-                (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-        session->framesOut -= fr;
-        outBuffer->frameCount += fr;
-
         return 0;
     } else {
         return -ENODATA;
@@ -1551,7 +1449,7 @@
 }
 
 int PreProcessingFx_ProcessReverse(effect_handle_t self, audio_buffer_t* inBuffer,
-                                   audio_buffer_t* outBuffer __unused) {
+                                   audio_buffer_t* outBuffer) {
     preproc_effect_t* effect = (preproc_effect_t*)self;
 
     if (effect == NULL) {
@@ -1565,6 +1463,18 @@
         return -EINVAL;
     }
 
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
+
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
+
     session->revProcessedMsk |= (1 << effect->procId);
 
     //    ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk
@@ -1573,16 +1483,6 @@
 
     if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
         effect->session->revProcessedMsk = 0;
-        size_t fr = session->frameCount - session->framesRev;
-        if (inBuffer->frameCount < fr) {
-            fr = inBuffer->frameCount;
-        }
-        session->framesRev += fr;
-        inBuffer->frameCount = fr;
-        if (session->framesRev < session->frameCount) {
-            return 0;
-        }
-        session->framesRev = 0;
         if (int status = effect->session->apm->ProcessReverseStream(
                     (const int16_t* const)inBuffer->s16,
                     (const webrtc::StreamConfig)effect->session->revConfig,
diff --git a/media/libeffects/preprocessing/README.md b/media/libeffects/preprocessing/README.md
new file mode 100644
index 0000000..af46376
--- /dev/null
+++ b/media/libeffects/preprocessing/README.md
@@ -0,0 +1,7 @@
+# Preprocessing effects
+
+## Limitations
+- Preprocessing effects currently work on 10ms worth of data and do not support
+  arbitrary frame counts. This limiation comes from the underlying effects in
+  webrtc modules
+- There is currently no api to communicate this requirement
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index 246d14e..fbbcab4 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -11,25 +11,10 @@
 
 cc_benchmark {
     name: "preprocessing_benchmark",
-    vendor: true,
-    relative_install_path: "soundfx",
+    defaults: ["libaudiopreprocessing-defaults"],
     srcs: ["preprocessing_benchmark.cpp"],
-    shared_libs: [
+    static_libs: [
         "libaudiopreprocessing",
         "libaudioutils",
-        "liblog",
-        "libutils",
-    ],
-    cflags: [
-        "-DWEBRTC_POSIX",
-        "-fvisibility=default",
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-        "libwebrtc_absl_headers",
     ],
 }
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index cfa6559..d80b135 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -11,25 +11,29 @@
 }
 
 cc_test {
-    name: "AudioPreProcessingTest",
-
-    vendor: true,
-
-    relative_install_path: "soundfx",
-
-    srcs: ["PreProcessingTest.cpp"],
-
-    shared_libs: [
+    name: "EffectPreprocessingTest",
+    defaults: ["libaudiopreprocessing-defaults"],
+    gtest: true,
+    test_suites: ["device-tests"],
+    srcs: [
+        "EffectPreprocessingTest.cpp",
+        "EffectTestHelper.cpp",
+    ],
+    static_libs: [
         "libaudiopreprocessing",
         "libaudioutils",
-        "liblog",
-        "libutils",
     ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-    ],
+}
+
+cc_test {
+    name: "AudioPreProcessingTest",
+    defaults: ["libaudiopreprocessing-defaults"],
     gtest: false,
+    srcs: ["PreProcessingTest.cpp"],
+    static_libs: [
+        "libaudiopreprocessing",
+        "libaudioutils",
+    ],
 }
 
 cc_test {
diff --git a/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
new file mode 100644
index 0000000..07006a1
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <tuple>
+#include <vector>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <log/log.h>
+
+constexpr effect_uuid_t kAGCUuid = {
+        0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+constexpr effect_uuid_t kAGC2Uuid = {
+        0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}};
+constexpr effect_uuid_t kAECUuid = {
+        0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+constexpr effect_uuid_t kNSUuid = {
+        0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+
+static bool isAGCEffect(const effect_uuid_t* uuid) {
+    return uuid == &kAGCUuid;
+}
+static bool isAGC2Effect(const effect_uuid_t* uuid) {
+    return uuid == &kAGC2Uuid;
+}
+static bool isAECEffect(const effect_uuid_t* uuid) {
+    return uuid == &kAECUuid;
+}
+static bool isNSEffect(const effect_uuid_t* uuid) {
+    return uuid == &kNSUuid;
+}
+
+constexpr int kAGCTargetLevels[] = {0, -300, -500, -1000, -3100};
+
+constexpr int kAGCCompLevels[] = {0, -300, -500, -1000, -9000};
+
+constexpr size_t kAGC2FixedDigitalGains[] = {0, 3, 10, 20, 49};
+
+constexpr size_t kAGC2AdaptGigitalLevelEstimators[] = {0, 1};
+
+constexpr size_t kAGC2ExtraSaturationMargins[] = {0, 3, 10, 20, 100};
+
+constexpr size_t kAECEchoDelays[] = {0, 250, 500};
+
+constexpr size_t kNSLevels[] = {0, 1, 2, 3};
+
+struct AGCParams {
+    int targetLevel;
+    int compLevel;
+};
+
+struct AGC2Params {
+    size_t fixedDigitalGain;
+    size_t adaptDigiLevelEstimator;
+    size_t extraSaturationMargin;
+};
+
+struct AECParams {
+    size_t echoDelay;
+};
+
+struct NSParams {
+    size_t level;
+};
+
+struct PreProcParams {
+    const effect_uuid_t* uuid;
+    union {
+        AGCParams agcParams;
+        AGC2Params agc2Params;
+        AECParams aecParams;
+        NSParams nsParams;
+    };
+};
+
+// Create a list of pre-processing parameters to be used for testing
+static const std::vector<PreProcParams> kPreProcParams = [] {
+    std::vector<PreProcParams> result;
+
+    for (const auto targetLevel : kAGCTargetLevels) {
+        for (const auto compLevel : kAGCCompLevels) {
+            AGCParams agcParams = {.targetLevel = targetLevel, .compLevel = compLevel};
+            PreProcParams params = {.uuid = &kAGCUuid, .agcParams = agcParams};
+            result.push_back(params);
+        }
+    }
+
+    for (const auto fixedDigitalGain : kAGC2FixedDigitalGains) {
+        for (const auto adaptDigiLevelEstimator : kAGC2AdaptGigitalLevelEstimators) {
+            for (const auto extraSaturationMargin : kAGC2ExtraSaturationMargins) {
+                AGC2Params agc2Params = {.fixedDigitalGain = fixedDigitalGain,
+                                         .adaptDigiLevelEstimator = adaptDigiLevelEstimator,
+                                         .extraSaturationMargin = extraSaturationMargin};
+                PreProcParams params = {.uuid = &kAGC2Uuid, .agc2Params = agc2Params};
+                result.push_back(params);
+            }
+        }
+    }
+
+    for (const auto echoDelay : kAECEchoDelays) {
+        AECParams aecParams = {.echoDelay = echoDelay};
+        PreProcParams params = {.uuid = &kAECUuid, .aecParams = aecParams};
+        result.push_back(params);
+    }
+
+    for (const auto level : kNSLevels) {
+        NSParams nsParams = {.level = level};
+        PreProcParams params = {.uuid = &kNSUuid, .nsParams = nsParams};
+        result.push_back(params);
+    }
+    return result;
+}();
+
+static const size_t kNumPreProcParams = std::size(kPreProcParams);
+
+void setPreProcParams(const effect_uuid_t* uuid, EffectTestHelper& effect, size_t paramIdx) {
+    const PreProcParams* params = &kPreProcParams[paramIdx];
+    if (isAGCEffect(uuid)) {
+        const AGCParams* agcParams = &params->agcParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC_PARAM_TARGET_LEVEL, agcParams->targetLevel));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC_PARAM_COMP_GAIN, agcParams->compLevel));
+    } else if (isAGC2Effect(uuid)) {
+        const AGC2Params* agc2Params = &params->agc2Params;
+        ASSERT_NO_FATAL_FAILURE(
+                effect.setParam(AGC2_PARAM_FIXED_DIGITAL_GAIN, agc2Params->fixedDigitalGain));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+                                                agc2Params->adaptDigiLevelEstimator));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+                                                agc2Params->extraSaturationMargin));
+    } else if (isAECEffect(uuid)) {
+        const AECParams* aecParams = &params->aecParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AEC_PARAM_ECHO_DELAY, aecParams->echoDelay));
+    } else if (isNSEffect(uuid)) {
+        const NSParams* nsParams = &params->nsParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(NS_PARAM_LEVEL, nsParams->level));
+    }
+}
+
+typedef std::tuple<int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(mSampleRate * EffectTestHelper::kTenMilliSecVal),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mChannelCount(audio_channel_count_from_in_mask(mChMask)),
+          mParamIdx(std::get<3>(GetParam())),
+          mUuid(kPreProcParams[mParamIdx].uuid){};
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mParamIdx;
+    const effect_uuid_t* mUuid;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " chMask: " << mChMask << " sampleRate: " << mSampleRate
+                                    << " loopCount: " << mLoopCount << " paramIdx " << mParamIdx);
+
+    EffectTestHelper effect(mUuid, mChMask, mSampleRate, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig(isAECEffect(mUuid)));
+    ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, effect, mParamIdx));
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<int16_t> input(mTotalFrameCount * mChannelCount);
+    std::vector<int16_t> output(mTotalFrameCount * mChannelCount);
+    std::vector<int16_t> farInput(mTotalFrameCount * mChannelCount);
+    std::minstd_rand gen(mChMask);
+    std::uniform_int_distribution<int16_t> dis(INT16_MIN, INT16_MAX);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    if (isAECEffect(mUuid)) {
+        for (auto& farIn : farInput) {
+            farIn = dis(gen);
+        }
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data(), isAECEffect(mUuid)));
+    if (isAECEffect(mUuid)) {
+        ASSERT_NO_FATAL_FAILURE(effect.process_reverse(farInput.data(), output.data()));
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        PreProcTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumPreProcParams)));
+
+typedef std::tuple<int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(mSampleRate * EffectTestHelper::kTenMilliSecVal),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<1>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mParamIdx(std::get<2>(GetParam())),
+          mUuid(kPreProcParams[mParamIdx].uuid){};
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const size_t mParamIdx;
+    const effect_uuid_t* mUuid;
+};
+
+// Compares first channel in multi-channel output to mono output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " sampleRate: " << mSampleRate
+                                    << " loopCount: " << mLoopCount << " paramIdx " << mParamIdx);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<int16_t> monoInput(mTotalFrameCount);
+    std::vector<int16_t> monoFarInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_int_distribution<int16_t> dis(INT16_MIN, INT16_MAX);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+    if (isAECEffect(mUuid)) {
+        for (auto& farIn : monoFarInput) {
+            farIn = dis(gen);
+        }
+    }
+
+    // Apply effect on mono channel
+    EffectTestHelper monoEffect(mUuid, AUDIO_CHANNEL_INDEX_MASK_1, mSampleRate, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(monoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(monoEffect.setConfig(isAECEffect(mUuid)));
+    ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, monoEffect, mParamIdx));
+
+    std::vector<int16_t> monoOutput(mTotalFrameCount);
+    ASSERT_NO_FATAL_FAILURE(
+            monoEffect.process(monoInput.data(), monoOutput.data(), isAECEffect(mUuid)));
+    if (isAECEffect(mUuid)) {
+        ASSERT_NO_FATAL_FAILURE(monoEffect.process_reverse(monoFarInput.data(), monoOutput.data()));
+    }
+    ASSERT_NO_FATAL_FAILURE(monoEffect.releaseEffect());
+
+    for (size_t chMask : EffectTestHelper::kChMasks) {
+        size_t channelCount = audio_channel_count_from_in_mask(chMask);
+
+        EffectTestHelper testEffect(mUuid, chMask, mSampleRate, mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig(isAECEffect(mUuid)));
+        ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, testEffect, mParamIdx));
+
+        std::vector<int16_t> testInput(mTotalFrameCount * channelCount);
+        std::vector<int16_t> testFarInput(mTotalFrameCount * channelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fpInput = &testInput[i * channelCount];
+            std::fill(fpInput, fpInput + channelCount, monoInput[i]);
+        }
+        if (isAECEffect(mUuid)) {
+            for (size_t i = 0; i < mTotalFrameCount; ++i) {
+                auto* fpFarInput = &testFarInput[i * channelCount];
+                std::fill(fpFarInput, fpFarInput + channelCount, monoFarInput[i]);
+            }
+        }
+
+        std::vector<int16_t> testOutput(mTotalFrameCount * channelCount);
+        ASSERT_NO_FATAL_FAILURE(
+                testEffect.process(testInput.data(), testOutput.data(), isAECEffect(mUuid)));
+        if (isAECEffect(mUuid)) {
+            ASSERT_NO_FATAL_FAILURE(
+                    testEffect.process_reverse(testFarInput.data(), testOutput.data()));
+        }
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        // Adjust the test output to mono channel
+        std::vector<int16_t> monoTestOutput(mTotalFrameCount);
+        adjust_channels(testOutput.data(), channelCount, monoTestOutput.data(), FCC_1,
+                        sizeof(int16_t), mTotalFrameCount * sizeof(int16_t) * channelCount);
+
+        ASSERT_EQ(0, memcmp(monoOutput.data(), monoTestOutput.data(),
+                            mTotalFrameCount * sizeof(int16_t)))
+                << "Mono channel output does not match with reference output \n";
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        PreProcTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumPreProcParams)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d", status);
+    return status;
+}
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.cpp b/media/libeffects/preprocessing/tests/EffectTestHelper.cpp
new file mode 100644
index 0000000..79200b6
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+void EffectTestHelper::createEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
+    ASSERT_EQ(status, 0) << "create_effect returned an error " << status;
+}
+
+void EffectTestHelper::releaseEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
+    ASSERT_EQ(status, 0) << "release_effect returned an error " << status;
+}
+
+void EffectTestHelper::setConfig(bool configReverse) {
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = mChMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                   &config, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "set_config returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_config reply non zero " << reply;
+
+    if (configReverse) {
+        int status = (*mEffectHandle)
+                             ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE,
+                                       sizeof(effect_config_t), &config, &replySize, &reply);
+        ASSERT_EQ(status, 0) << "set_config_reverse returned an error " << status;
+        ASSERT_EQ(reply, 0) << "set_config_reverse reply non zero " << reply;
+    }
+
+    status = (*mEffectHandle)
+                     ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "cmd_enable returned an error " << status;
+    ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
+}
+
+void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {type, value};
+    auto effectParam = (effect_param_t*)malloc(sizeof(effect_param_t) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    free(effectParam);
+    ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+}
+
+void EffectTestHelper::process(int16_t* input, int16_t* output, bool setAecEchoDelay) {
+    audio_buffer_t inBuffer = {.frameCount = mFrameCount, .s16 = input};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .s16 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        if (setAecEchoDelay) ASSERT_NO_FATAL_FAILURE(setParam(AEC_PARAM_ECHO_DELAY, kAECDelay));
+        int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        inBuffer.s16 += mFrameCount * mChannelCount;
+        outBuffer.s16 += mFrameCount * mChannelCount;
+    }
+}
+
+void EffectTestHelper::process_reverse(int16_t* farInput, int16_t* output) {
+    audio_buffer_t farInBuffer = {.frameCount = mFrameCount, .s16 = farInput};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .s16 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        int status = (*mEffectHandle)->process_reverse(mEffectHandle, &farInBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        farInBuffer.s16 += mFrameCount * mChannelCount;
+        outBuffer.s16 += mFrameCount * mChannelCount;
+    }
+}
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.h b/media/libeffects/preprocessing/tests/EffectTestHelper.h
new file mode 100644
index 0000000..117cf7b
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <audio_effects/effect_aec.h>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <climits>
+#include <cstdlib>
+#include <gtest/gtest.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <random>
+#include <stdint.h>
+#include <system/audio.h>
+#include <vector>
+
+template <typename T>
+static float computeSnr(const T* ref, const T* tst, size_t count) {
+    double signal{};
+    double noise{};
+
+    for (size_t i = 0; i < count; ++i) {
+        const double value(ref[i]);
+        const double diff(tst[i] - value);
+        signal += value * value;
+        noise += diff * diff;
+    }
+    // Initialized to large value to handle
+    // cases where ref and tst match exactly
+    float snr = FLT_MAX;
+    if (signal > 0.0f && noise > 0.0f) {
+        snr = 10.f * log(signal / noise);
+    }
+    return snr;
+}
+
+class EffectTestHelper {
+  public:
+    EffectTestHelper(const effect_uuid_t* uuid, size_t chMask, size_t sampleRate, size_t loopCount)
+        : mUuid(uuid),
+          mChMask(chMask),
+          mChannelCount(audio_channel_count_from_in_mask(mChMask)),
+          mSampleRate(sampleRate),
+          mFrameCount(mSampleRate * kTenMilliSecVal),
+          mLoopCount(loopCount) {}
+    void createEffect();
+    void releaseEffect();
+    void setConfig(bool configReverse);
+    void setParam(uint32_t type, uint32_t val);
+    void process(int16_t* input, int16_t* output, bool setAecEchoDelay);
+    void process_reverse(int16_t* farInput, int16_t* output);
+
+    // Corresponds to SNR for 1 bit difference between two int16_t signals
+    static constexpr float kSNRThreshold = 90.308998;
+
+    static constexpr audio_channel_mask_t kChMasks[] = {
+            AUDIO_CHANNEL_IN_MONO,
+            AUDIO_CHANNEL_IN_STEREO,
+            AUDIO_CHANNEL_IN_FRONT_BACK,
+            AUDIO_CHANNEL_IN_6,
+            AUDIO_CHANNEL_IN_2POINT0POINT2,
+            AUDIO_CHANNEL_IN_2POINT1POINT2,
+            AUDIO_CHANNEL_IN_3POINT0POINT2,
+            AUDIO_CHANNEL_IN_3POINT1POINT2,
+            AUDIO_CHANNEL_IN_5POINT1,
+            AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO,
+            AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO,
+            AUDIO_CHANNEL_IN_VOICE_CALL_MONO,
+    };
+
+    static constexpr float kTenMilliSecVal = 0.01;
+
+    static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+    static constexpr size_t kSampleRates[] = {8000,  11025, 12000, 16000, 22050,
+                                              24000, 32000, 44100, 48000};
+
+    static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+    static constexpr size_t kLoopCounts[] = {1, 4};
+
+    static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
+    static constexpr size_t kAECDelay = 0;
+
+  private:
+    const effect_uuid_t* mUuid;
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    effect_handle_t mEffectHandle{};
+};
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index 5f223c9..3bd93f8 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -24,6 +24,7 @@
 #include <audio_effects/effect_agc.h>
 #include <audio_effects/effect_agc2.h>
 #include <audio_effects/effect_ns.h>
+#include <audio_utils/channels.h>
 #include <log/log.h>
 
 // This is the only symbol that needs to be imported
@@ -55,7 +56,9 @@
     ARG_NS_LVL,
     ARG_AGC2_GAIN,
     ARG_AGC2_LVL,
-    ARG_AGC2_SAT_MGN
+    ARG_AGC2_SAT_MGN,
+    ARG_FILE_CHANNELS,
+    ARG_MONO_MODE
 };
 
 struct preProcConfigParams_t {
@@ -68,6 +71,8 @@
     float agc2SaturationMargin = 2.f;  // in dB
     int agc2Level = 0;                 // either kRms(0) or kPeak(1)
     int aecDelay = 0;  // in ms
+    int fileChannels = 1;
+    int monoMode = 0;
 };
 
 const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
@@ -106,7 +111,7 @@
     printf("\n           Prints this usage information");
     printf("\n     --fs <sampling_freq>");
     printf("\n           Sampling frequency in Hz, default 16000.");
-    printf("\n     -ch_mask <channel_mask>\n");
+    printf("\n     --ch_mask <channel_mask>\n");
     printf("\n         0  - AUDIO_CHANNEL_IN_MONO");
     printf("\n         1  - AUDIO_CHANNEL_IN_STEREO");
     printf("\n         2  - AUDIO_CHANNEL_IN_FRONT_BACK");
@@ -144,6 +149,10 @@
     printf("\n           AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
     printf("\n     --aec_delay <delay>");
     printf("\n           AEC delay value in ms, default value 0ms");
+    printf("\n     --fch <fileChannels>");
+    printf("\n           number of channels in the input file");
+    printf("\n     --mono <Mono Mode>");
+    printf("\n           Mode to make data of all channels the same as first channel");
     printf("\n");
 }
 
@@ -189,10 +198,17 @@
         printUsage();
         return EXIT_FAILURE;
     }
+
+    // Print the arguments passed
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
+
     const char* inputFile = nullptr;
     const char* outputFile = nullptr;
     const char* farFile = nullptr;
     int effectEn[PREPROC_NUM_EFFECTS] = {0};
+    struct preProcConfigParams_t preProcCfgParams {};
 
     const option long_opts[] = {
             {"help", no_argument, nullptr, ARG_HELP},
@@ -212,9 +228,10 @@
             {"agc", no_argument, &effectEn[PREPROC_AGC], 1},
             {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
             {"ns", no_argument, &effectEn[PREPROC_NS], 1},
+            {"fch", required_argument, nullptr, ARG_FILE_CHANNELS},
+            {"mono", no_argument, &preProcCfgParams.monoMode, 1},
             {nullptr, 0, nullptr, 0},
     };
-    struct preProcConfigParams_t preProcCfgParams {};
 
     while (true) {
         const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
@@ -279,6 +296,14 @@
                 preProcCfgParams.nsLevel = atoi(optarg);
                 break;
             }
+            case ARG_FILE_CHANNELS: {
+                preProcCfgParams.fileChannels = atoi(optarg);
+                break;
+            }
+            case ARG_MONO_MODE: {
+                preProcCfgParams.monoMode = 1;
+                break;
+            }
             default:
                 break;
         }
@@ -402,29 +427,52 @@
     // Process Call
     const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);
     const int ioChannelCount = audio_channel_count_from_in_mask(preProcCfgParams.chMask);
+    const int fileChannelCount = preProcCfgParams.fileChannels;
     const int ioFrameSize = ioChannelCount * sizeof(short);
+    const int inFrameSize = fileChannelCount * sizeof(short);
     int frameCounter = 0;
     while (true) {
         std::vector<short> in(frameLength * ioChannelCount);
         std::vector<short> out(frameLength * ioChannelCount);
         std::vector<short> farIn(frameLength * ioChannelCount);
-        size_t samplesRead = fread(in.data(), ioFrameSize, frameLength, inputFp.get());
+        size_t samplesRead = fread(in.data(), inFrameSize, frameLength, inputFp.get());
         if (samplesRead == 0) {
             break;
         }
+        if (fileChannelCount != ioChannelCount) {
+            adjust_channels(in.data(), fileChannelCount, in.data(), ioChannelCount, sizeof(short),
+                            frameLength * inFrameSize);
+            if (preProcCfgParams.monoMode == 1) {
+                for (int i = 0; i < frameLength; ++i) {
+                    auto* fp = &in[i * ioChannelCount];
+                    std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                }
+            }
+        }
         audio_buffer_t inputBuffer, outputBuffer;
         audio_buffer_t farInBuffer{};
-        inputBuffer.frameCount = samplesRead;
-        outputBuffer.frameCount = samplesRead;
+        inputBuffer.frameCount = frameLength;
+        outputBuffer.frameCount = frameLength;
         inputBuffer.s16 = in.data();
         outputBuffer.s16 = out.data();
 
         if (farFp != nullptr) {
-            samplesRead = fread(farIn.data(), ioFrameSize, frameLength, farFp.get());
+            samplesRead = fread(farIn.data(), inFrameSize, frameLength, farFp.get());
             if (samplesRead == 0) {
                 break;
             }
-            farInBuffer.frameCount = samplesRead;
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(farIn.data(), fileChannelCount, farIn.data(), ioChannelCount,
+                                sizeof(short), frameLength * inFrameSize);
+                if (preProcCfgParams.monoMode == 1) {
+                    for (int i = 0; i < frameLength; ++i) {
+                        auto* fp = &farIn[i * ioChannelCount];
+                        std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                    }
+                }
+            }
+
+            farInBuffer.frameCount = frameLength;
             farInBuffer.s16 = farIn.data();
         }
 
@@ -458,8 +506,12 @@
             }
         }
         if (outputFp != nullptr) {
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(out.data(), ioChannelCount, out.data(), fileChannelCount,
+                                sizeof(short), frameLength * ioFrameSize);
+            }
             size_t samplesWritten =
-                    fwrite(out.data(), ioFrameSize, outputBuffer.frameCount, outputFp.get());
+                    fwrite(out.data(), inFrameSize, outputBuffer.frameCount, outputFp.get());
             if (samplesWritten != outputBuffer.frameCount) {
                 ALOGE("\nError: Output file writing failed");
                 break;
@@ -467,6 +519,7 @@
         }
         frameCounter += frameLength;
     }
+    printf("frameCounter: [%d]\n", frameCounter);
     // Release all the effect handles created
     for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
         if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
diff --git a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..35da13e
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,119 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/AudioPreProcessingTest"
+
+echo "========================================"
+echo "testing PreProcessing modules"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
+adb push $OUT/testcases/snr/arm64/snr $testdir
+
+E_VAL=1
+if [ -z "$1" ]
+then
+    cmds=("adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+          "adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+)
+elif [ "$1" == "32" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+elif [ "$1" == "64" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+else
+    echo ""
+    echo "Invalid \"val\""
+    echo "Usage:"
+    echo "      "$0" [val]"
+    echo "      where, val can be either 32 or 64."
+    echo ""
+    echo "      If val is not specified then both 32 bit and 64 bit binaries"
+    echo "      are tested."
+    exit $E_VAL
+fi
+
+flags_arr=(
+    "--agc --mono"
+    "--ns --mono"
+    "--agc2 --mono"
+    "--aec --mono"
+)
+
+fs_arr=(
+    8000
+    11025
+    12000
+    16000
+    22050
+    24000
+    32000
+    44100
+    48000
+)
+
+# run multichannel effects at different configs, saving only the mono channel
+error_count=0
+test_count=0
+for cmd in "${cmds[@]}"
+do
+    $cmd
+    for flags in "${flags_arr[@]}"
+    do
+        for fs in ${fs_arr[*]}
+        do
+            for chMask in {0..7}
+            do
+                adb shell $testdir/AudioPreProcessingTest $flags \
+                    --i $testdir/sinesweepraw.raw --far $testdir/sinesweepraw.raw \
+                    --output $testdir/sinesweep_$((chMask))_$((fs)).raw --ch_mask $chMask \
+                    --fs $fs --fch 1
+
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error shell_ret here is zero: $shell_ret"
+                    ((++error_count))
+                fi
+
+
+                # single channel files should be identical to higher channel
+                # computation (first channel).
+                if  [[ "$chMask" -gt 1 ]]
+                then
+                    adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                        $testdir/sinesweep_$((chMask))_$((fs)).raw
+                fi
+
+                # cmp return EXIT_FAILURE on mismatch.
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error: $shell_ret"
+                    ((++error_count))
+                fi
+                ((++test_count))
+            done
+        done
+    done
+done
+
+adb shell rm -r $testdir
+echo "$test_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index c010d68..be9f8c0 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -116,6 +116,16 @@
         pContext->sube[SUB_FX_OFFLOAD] = sube[1];
         pContext->desc[SUB_FX_OFFLOAD] = desc[1];
         pContext->aeli[SUB_FX_OFFLOAD] = aeli[1];
+    } else {
+        ALOGE("Both effects have (or don't have) EFFECT_FLAG_HW_ACC_TUNNEL flag");
+        delete[] sube;
+        delete[] desc;
+        delete[] aeli;
+        delete[] pContext->sube;
+        delete[] pContext->desc;
+        delete[] pContext->aeli;
+        delete pContext;
+        return -EINVAL;
     }
     delete[] desc;
     delete[] aeli;
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index f6c585e..8dd6789 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -1,4 +1,23 @@
 // Visualizer library
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_visualizer_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_visualizer_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libvisualizer",
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 0223cfd..b2056ad 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -359,6 +359,7 @@
         "libaudioclient",
         "libmedia_codeclist",
         "libmedia_omx",
+        "media_permission-aidl-cpp",
     ],
 
     export_shared_lib_headers: [
@@ -367,14 +368,17 @@
         "libandroidicu",
         //"libsonivox",
         "libmedia_omx",
+        "media_permission-aidl-cpp",
     ],
 
     static_libs: [
         "resourcemanager_aidl_interface-ndk_platform",
+        "media_permission-aidl-cpp",
     ],
 
     export_static_lib_headers: [
         "resourcemanager_aidl_interface-ndk_platform",
+        "media_permission-aidl-cpp",
     ],
 
     export_include_dirs: [
@@ -428,5 +432,8 @@
         },
     },
 
-    apex_available: ["com.android.media"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media"
+    ],
 }
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 8a4b17c..c89c023 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -20,6 +20,7 @@
 #include <sys/types.h>
 
 #include <android/IDataSource.h>
+#include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <media/AudioResamplerPublic.h>
@@ -34,59 +35,37 @@
 
 using media::VolumeShaper;
 
-enum {
-    DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
-    SET_DATA_SOURCE_URL,
-    SET_DATA_SOURCE_FD,
-    SET_DATA_SOURCE_STREAM,
-    SET_DATA_SOURCE_CALLBACK,
-    SET_DATA_SOURCE_RTP,
-    SET_BUFFERING_SETTINGS,
-    GET_BUFFERING_SETTINGS,
-    PREPARE_ASYNC,
-    START,
-    STOP,
-    IS_PLAYING,
-    SET_PLAYBACK_SETTINGS,
-    GET_PLAYBACK_SETTINGS,
-    SET_SYNC_SETTINGS,
-    GET_SYNC_SETTINGS,
-    PAUSE,
-    SEEK_TO,
-    GET_CURRENT_POSITION,
-    GET_DURATION,
-    RESET,
-    NOTIFY_AT,
-    SET_AUDIO_STREAM_TYPE,
-    SET_LOOPING,
-    SET_VOLUME,
-    INVOKE,
-    SET_METADATA_FILTER,
-    GET_METADATA,
-    SET_AUX_EFFECT_SEND_LEVEL,
-    ATTACH_AUX_EFFECT,
-    SET_VIDEO_SURFACETEXTURE,
-    SET_PARAMETER,
-    GET_PARAMETER,
-    SET_RETRANSMIT_ENDPOINT,
-    GET_RETRANSMIT_ENDPOINT,
-    SET_NEXT_PLAYER,
-    APPLY_VOLUME_SHAPER,
-    GET_VOLUME_SHAPER_STATE,
-    // Modular DRM
-    PREPARE_DRM,
-    RELEASE_DRM,
-    // AudioRouting
-    SET_OUTPUT_DEVICE,
-    GET_ROUTED_DEVICE_ID,
-    ENABLE_AUDIO_DEVICE_CALLBACK,
-};
-
 // ModDrm helpers
-static void readVector(const Parcel& reply, Vector<uint8_t>& vector) {
-    uint32_t size = reply.readUint32();
-    vector.insertAt((size_t)0, size);
-    reply.read(vector.editArray(), size);
+static status_t readVector(const Parcel& reply, Vector<uint8_t>& vector) {
+    uint32_t size = 0;
+    status_t status = reply.readUint32(&size);
+    if (status == OK) {
+        status = size <= reply.dataAvail() ? OK : BAD_VALUE;
+    }
+    if (status == OK) {
+        status = vector.insertAt((size_t) 0, size) >= 0 ? OK : NO_MEMORY;
+    }
+    if (status == OK) {
+        status = reply.read(vector.editArray(), size);
+    }
+    if (status != OK) {
+        char errorMsg[100];
+        char buganizerId[] = "173720767";
+        snprintf(errorMsg,
+                sizeof(errorMsg),
+                "%s: failed to read array. Size: %d, status: %d.",
+                __func__,
+                size,
+                status);
+        android_errorWriteWithInfoLog(
+                /* safetyNet tag= */ 0x534e4554,
+                buganizerId,
+                IPCThreadState::self()->getCallingUid(),
+                errorMsg,
+                strlen(errorMsg));
+        ALOGE("%s (b/%s)", errorMsg, buganizerId);
+    }
+    return status;
 }
 
 static void writeVector(Parcel& data, Vector<uint8_t> const& vector) {
@@ -977,8 +956,10 @@
             uint8_t uuid[16] = {};
             data.read(uuid, sizeof(uuid));
             Vector<uint8_t> drmSessionId;
-            readVector(data, drmSessionId);
-
+            status_t status = readVector(data, drmSessionId);
+            if (status != OK) {
+              return status;
+            }
             uint32_t result = prepareDrm(uuid, drmSessionId);
             reply->writeInt32(result);
             return OK;
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 11005c6..0f189ee 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -35,6 +35,8 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 enum {
     CREATE = IBinder::FIRST_CALL_TRANSACTION,
     CREATE_MEDIA_RECORDER,
@@ -63,22 +65,22 @@
 
     virtual sp<IMediaPlayer> create(
             const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId,
-            const std::string opPackageName) {
+            const Identity& identity) {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeStrongBinder(IInterface::asBinder(client));
         data.writeInt32(audioSessionId);
-        data.writeCString(opPackageName.c_str());
+        data.writeParcelable(identity);
 
         remote()->transact(CREATE, data, &reply);
         return interface_cast<IMediaPlayer>(reply.readStrongBinder());
     }
 
-    virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName)
+    virtual sp<IMediaRecorder> createMediaRecorder(const Identity& identity)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
-        data.writeString16(opPackageName);
+        data.writeParcelable(identity);
         remote()->transact(CREATE_MEDIA_RECORDER, data, &reply);
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
@@ -129,19 +131,23 @@
             sp<IMediaPlayerClient> client =
                 interface_cast<IMediaPlayerClient>(data.readStrongBinder());
             audio_session_t audioSessionId = (audio_session_t) data.readInt32();
-            const char* opPackageName = data.readCString();
-            if (opPackageName == nullptr) {
-                return FAILED_TRANSACTION;
+            Identity identity;
+            status_t status = data.readParcelable(&identity);
+            if (status != NO_ERROR) {
+                return status;
             }
-            std::string opPackageNameStr(opPackageName);
-            sp<IMediaPlayer> player = create(client, audioSessionId, opPackageNameStr);
+            sp<IMediaPlayer> player = create(client, audioSessionId, identity);
             reply->writeStrongBinder(IInterface::asBinder(player));
             return NO_ERROR;
         } break;
         case CREATE_MEDIA_RECORDER: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
-            const String16 opPackageName = data.readString16();
-            sp<IMediaRecorder> recorder = createMediaRecorder(opPackageName);
+            Identity identity;
+            status_t status = data.readParcelable(&identity);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            sp<IMediaRecorder> recorder = createMediaRecorder(identity);
             reply->writeStrongBinder(IInterface::asBinder(recorder));
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index ac86f72..154988d 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -66,6 +66,7 @@
     ENABLE_AUDIO_DEVICE_CALLBACK,
     GET_ACTIVE_MICROPHONES,
     GET_PORT_ID,
+    GET_RTP_DATA_USAGE,
     SET_PREFERRED_MICROPHONE_DIRECTION,
     SET_PREFERRED_MICROPHONE_FIELD_DIMENSION,
     SET_PRIVACY_SENSITIVE,
@@ -476,6 +477,23 @@
         *portId = (audio_port_handle_t)reply.readInt32();
         return NO_ERROR;
     }
+
+    status_t getRtpDataUsage(uint64_t *bytes)
+    {
+        ALOGV("getRtpDataUsage");
+        if (bytes == nullptr) {
+            return BAD_VALUE;
+        }
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        status_t status = remote()->transact(GET_RTP_DATA_USAGE, data, &reply);
+        if (status != OK
+                || (status = (status_t)reply.readInt32()) != NO_ERROR) {
+            *bytes = 0;
+            return status;
+        }
+        return reply.readUint64(bytes);
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
@@ -759,6 +777,17 @@
             }
             return NO_ERROR;
         }
+        case GET_RTP_DATA_USAGE: {
+            ALOGV("GET_RTP_DATA_USAGE");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            uint64_t bytes;
+            status_t status = getRtpDataUsage(&bytes);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeUint64(bytes);
+            }
+            return NO_ERROR;
+        }
         case SET_PREFERRED_MICROPHONE_DIRECTION: {
             ALOGV("SET_PREFERRED_MICROPHONE_DIRECTION");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index 3548a1e..28684d1 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -137,6 +137,56 @@
     virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
     virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
     virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
+protected:
+
+    friend class IMediaPlayerTest;
+    enum {
+        DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
+        SET_DATA_SOURCE_URL,
+        SET_DATA_SOURCE_FD,
+        SET_DATA_SOURCE_STREAM,
+        SET_DATA_SOURCE_CALLBACK,
+        SET_DATA_SOURCE_RTP,
+        SET_BUFFERING_SETTINGS,
+        GET_BUFFERING_SETTINGS,
+        PREPARE_ASYNC,
+        START,
+        STOP,
+        IS_PLAYING,
+        SET_PLAYBACK_SETTINGS,
+        GET_PLAYBACK_SETTINGS,
+        SET_SYNC_SETTINGS,
+        GET_SYNC_SETTINGS,
+        PAUSE,
+        SEEK_TO,
+        GET_CURRENT_POSITION,
+        GET_DURATION,
+        RESET,
+        NOTIFY_AT,
+        SET_AUDIO_STREAM_TYPE,
+        SET_LOOPING,
+        SET_VOLUME,
+        INVOKE,
+        SET_METADATA_FILTER,
+        GET_METADATA,
+        SET_AUX_EFFECT_SEND_LEVEL,
+        ATTACH_AUX_EFFECT,
+        SET_VIDEO_SURFACETEXTURE,
+        SET_PARAMETER,
+        GET_PARAMETER,
+        SET_RETRANSMIT_ENDPOINT,
+        GET_RETRANSMIT_ENDPOINT,
+        SET_NEXT_PLAYER,
+        APPLY_VOLUME_SHAPER,
+        GET_VOLUME_SHAPER_STATE,
+        // Modular DRM
+        PREPARE_DRM,
+        RELEASE_DRM,
+        // AudioRouting
+        SET_OUTPUT_DEVICE,
+        GET_ROUTED_DEVICE_ID,
+        ENABLE_AUDIO_DEVICE_CALLBACK,
+    };
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/IMediaPlayerService.h b/media/libmedia/include/media/IMediaPlayerService.h
index a4207eb..243e9c7 100644
--- a/media/libmedia/include/media/IMediaPlayerService.h
+++ b/media/libmedia/include/media/IMediaPlayerService.h
@@ -27,6 +27,7 @@
 
 #include <media/IMediaPlayerClient.h>
 #include <media/IMediaMetadataRetriever.h>
+#include <android/media/permission/Identity.h>
 
 #include <string>
 
@@ -46,11 +47,13 @@
 public:
     DECLARE_META_INTERFACE(MediaPlayerService);
 
-    virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName) = 0;
+    virtual sp<IMediaRecorder> createMediaRecorder(
+        const android::media::permission::Identity &identity) = 0;
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
             audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE,
-            const std::string opPackage = "") = 0;
+            const android::media::permission::Identity& identity =
+                android::media::permission::Identity()) = 0;
     virtual sp<IMediaCodecList> getCodecList() const = 0;
 
     // Connects to a remote display.
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 651bd5e..6e69782 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -78,6 +78,7 @@
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t getPortId(audio_port_handle_t *portId) = 0;
+    virtual status_t getRtpDataUsage(uint64_t *bytes) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 8493f64..b5325ce 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -21,6 +21,7 @@
 #include <media/AudioSystem.h>
 #include <media/MicrophoneInfo.h>
 #include <media/mediarecorder.h>
+#include <android/media/permission/Identity.h>
 
 #include <system/audio.h>
 
@@ -33,8 +34,8 @@
 struct PersistentSurface;
 
 struct MediaRecorderBase {
-    MediaRecorderBase(const String16 &opPackageName)
-        : mOpPackageName(opPackageName) {}
+    explicit MediaRecorderBase(const media::permission::Identity &client)
+        : mClient(client) {}
     virtual ~MediaRecorderBase() {}
 
     virtual status_t init() = 0;
@@ -77,12 +78,13 @@
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t getPortId(audio_port_handle_t *portId) const = 0;
+    virtual status_t getRtpDataUsage(uint64_t *bytes) = 0;
 
 
 
 protected:
 
-    String16 mOpPackageName;
+    media::permission::Identity mClient;
 
 private:
     MediaRecorderBase(const MediaRecorderBase &);
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 71c0bc5..fbba398 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -29,6 +29,7 @@
 #include <media/IMediaPlayer.h>
 #include <media/IMediaDeathNotifier.h>
 #include <media/IStreamSource.h>
+#include <android/media/permission/Identity.h>
 
 #include <utils/KeyedVector.h>
 #include <utils/String8.h>
@@ -211,7 +212,8 @@
                     public virtual IMediaDeathNotifier
 {
 public:
-    MediaPlayer(const std::string opPackageName = "");
+    explicit MediaPlayer(const android::media::permission::Identity& mIdentity =
+        android::media::permission::Identity());
     ~MediaPlayer();
             void            died();
             void            disconnect();
@@ -315,7 +317,7 @@
     float                       mSendLevel;
     struct sockaddr_in          mRetransmitEndpoint;
     bool                        mRetransmitEndpointValid;
-    const std::string           mOpPackageName;
+    const android::media::permission::Identity mIdentity;
 };
 
 }; // namespace android
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index fbcdb28..96a3293 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -25,6 +25,7 @@
 #include <media/IMediaRecorderClient.h>
 #include <media/IMediaDeathNotifier.h>
 #include <media/MicrophoneInfo.h>
+#include <android/media/permission/Identity.h>
 
 namespace android {
 
@@ -226,7 +227,7 @@
                       public virtual IMediaDeathNotifier
 {
 public:
-    MediaRecorder(const String16& opPackageName);
+    explicit MediaRecorder(const media::permission::Identity& identity);
     ~MediaRecorder();
 
     void        died();
@@ -270,6 +271,7 @@
     status_t    setPreferredMicrophoneFieldDimension(float zoom);
 
     status_t    getPortId(audio_port_handle_t *portId) const;
+    status_t    getRtpDataUsage(uint64_t *bytes);
 
 private:
     void                    doCleanUp();
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 30c5006..7504787 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -40,8 +40,9 @@
 namespace android {
 
 using media::VolumeShaper;
+using media::permission::Identity;
 
-MediaPlayer::MediaPlayer(const std::string opPackageName) : mOpPackageName(opPackageName)
+MediaPlayer::MediaPlayer(const Identity& identity) : mIdentity(identity)
 {
     ALOGV("constructor");
     mListener = NULL;
@@ -152,7 +153,7 @@
     if (url != NULL) {
         const sp<IMediaPlayerService> service(getMediaPlayerService());
         if (service != 0) {
-            sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
+            sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mIdentity));
             if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
                 (NO_ERROR != player->setDataSource(httpService, url, headers))) {
                 player.clear();
@@ -169,7 +170,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mIdentity));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(fd, offset, length))) {
             player.clear();
@@ -185,7 +186,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mIdentity));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(source))) {
             player.clear();
@@ -201,7 +202,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mIdentity));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(rtpParams))) {
             player.clear();
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index d9d1f25..da2b190 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -33,6 +33,8 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 status_t MediaRecorder::setCamera(const sp<hardware::ICamera>& camera,
         const sp<ICameraRecordingProxy>& proxy)
 {
@@ -758,13 +760,13 @@
     return INVALID_OPERATION;
 }
 
-MediaRecorder::MediaRecorder(const String16& opPackageName) : mSurfaceMediaSource(NULL)
+MediaRecorder::MediaRecorder(const Identity &identity) : mSurfaceMediaSource(NULL)
 {
     ALOGV("constructor");
 
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != NULL) {
-        mMediaRecorder = service->createMediaRecorder(opPackageName);
+        mMediaRecorder = service->createMediaRecorder(identity);
     }
     if (mMediaRecorder != NULL) {
         mCurrentState = MEDIA_RECORDER_IDLE;
@@ -913,4 +915,14 @@
     return mMediaRecorder->getPortId(portId);
 }
 
+status_t MediaRecorder::getRtpDataUsage(uint64_t *bytes)
+{
+    ALOGV("getRtpDataUsage");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    return mMediaRecorder->getRtpDataUsage(bytes);
+}
 } // namespace android
diff --git a/media/libmedia/tests/fuzzer/Android.bp b/media/libmedia/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..c03b5b1
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/Android.bp
@@ -0,0 +1,19 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_fuzz {
+  name: "libmedia_metadata_fuzzer",
+  srcs: [
+    "libmedia_metadata_fuzzer.cpp",
+  ],
+  shared_libs: [
+    "libmedia",
+    "libbinder",
+  ],
+}
diff --git a/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
new file mode 100644
index 0000000..058e4e5
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
@@ -0,0 +1,52 @@
+//This program fuzzes Metadata.cpp
+
+#include <stddef.h>
+#include <stdint.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/Metadata.h>
+#include <binder/Parcel.h>
+
+using namespace android;
+using namespace media;
+
+static const float want_prob = 0.5;
+
+bool bytesRemain(FuzzedDataProvider *fdp);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    Parcel p;
+    Metadata md = Metadata(&p);
+
+    md.appendHeader();
+    while (bytesRemain(&fdp)) {
+
+        float got_prob = fdp.ConsumeProbability<float>();
+        if (!bytesRemain(&fdp)) {
+            break;
+        }
+
+        if (got_prob < want_prob) {
+            int32_t key_bool = fdp.ConsumeIntegral<int32_t>();
+            if (!bytesRemain(&fdp)) {
+                break;
+            }
+            bool val_bool = fdp.ConsumeBool();
+            md.appendBool(key_bool, val_bool);
+        } else {
+            int32_t key_int32 = fdp.ConsumeIntegral<int32_t>();
+            if (!bytesRemain(&fdp)) {
+                break;
+            }
+            bool val_int32 = fdp.ConsumeIntegral<int32_t>();
+            md.appendInt32(key_int32, val_int32);
+        }
+        md.updateLength();
+    }
+    md.resetParcel();
+    return 0;
+}
+
+bool bytesRemain(FuzzedDataProvider *fdp){
+    return fdp -> remaining_bytes() > 0;
+}
\ No newline at end of file
diff --git a/media/libmedia/tests/mediaplayer/Android.bp b/media/libmedia/tests/mediaplayer/Android.bp
new file mode 100644
index 0000000..50f35ea
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/Android.bp
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_test {
+    name: "IMediaPlayerTest",
+    test_suites: ["device-tests", "mts"],
+    gtest: true,
+
+    srcs: [
+        "IMediaPlayerTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+        "media_permission-aidl-cpp",
+    ],
+    compile_multilib: "first",
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
new file mode 100644
index 0000000..cc60933
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/IServiceManager.h>
+#include <binder/Parcel.h>
+#include <gtest/gtest.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/IMediaPlayer.h>
+#include <media/IMediaPlayerService.h>
+#include <media/mediaplayer.h>
+
+namespace android {
+
+constexpr uint8_t kMockByteArray[16] = {};
+
+ class IMediaPlayerTest : public testing::Test {
+  protected:
+   static constexpr uint32_t PREPARE_DRM = IMediaPlayer::PREPARE_DRM;
+
+   void SetUp() override {
+    mediaPlayer_ = sp<MediaPlayer>::make();
+    sp<IServiceManager> serviceManager = defaultServiceManager();
+    sp<IBinder> mediaPlayerService = serviceManager->getService(String16("media.player"));
+    sp<IMediaPlayerService> iMediaPlayerService =
+            IMediaPlayerService::asInterface(mediaPlayerService);
+    iMediaPlayer_ = iMediaPlayerService->create(mediaPlayer_);
+   }
+
+   sp<MediaPlayer> mediaPlayer_;
+   sp<IMediaPlayer> iMediaPlayer_;
+ };
+
+TEST_F(IMediaPlayerTest, PrepareDrmInvalidTransaction) {
+    Parcel data, reply;
+    data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+    data.write(kMockByteArray, 16);
+
+    // We write a length greater than the following session id array. Should be discarded.
+    data.writeUint32(2);
+    data.writeUnpadded(kMockByteArray, 1);
+
+    status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+            ->transact(PREPARE_DRM, data, &reply);
+    ASSERT_EQ(result, BAD_VALUE);
+}
+
+TEST_F(IMediaPlayerTest, PrepareDrmValidTransaction) {
+    Parcel data, reply;
+    data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+    data.write(kMockByteArray, 16);
+
+    // We write a length equal to the length of the following data. The transaction should be valid.
+    data.writeUint32(1);
+    data.write(kMockByteArray, 1);
+
+    status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+            ->transact(PREPARE_DRM, data, &reply);
+    ASSERT_EQ(result, OK);
+}
+}  // namespace android
diff --git a/media/libmediaformatshaper/Android.bp b/media/libmediaformatshaper/Android.bp
new file mode 100644
index 0000000..3107e12
--- /dev/null
+++ b/media/libmediaformatshaper/Android.bp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+// these headers include the structure of needed function pointers
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_headers {
+    name: "libmediaformatshaper_headers",
+    export_include_dirs: ["include"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_defaults {
+    name: "libmediaformatshaper_defaults",
+    srcs: [
+        "CodecProperties.cpp",
+        "CodecSeeding.cpp",
+        "FormatShaper.cpp",
+        "ManageShapingCodecs.cpp",
+        "VideoShaper.cpp",
+        "VQApply.cpp",
+    ],
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+        "-Wthread-safety",                      // enables GUARDED_BY()
+    ],
+
+    target: {
+        android: {
+            shared_libs: [
+                "libmediandk#29",
+            ],
+        },
+    },
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_library {
+    name: "libmediaformatshaper",
+    defaults: ["libmediaformatshaper_defaults"],
+
+    min_sdk_version: "29",
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+
+    version_script: "exports.lds",
+
+}
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/libmediaformatshaper/CodecProperties.cpp
new file mode 100644
index 0000000..e6b3c46
--- /dev/null
+++ b/media/libmediaformatshaper/CodecProperties.cpp
@@ -0,0 +1,352 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecProperties"
+#include <utils/Log.h>
+
+#include <string>
+#include <stdlib.h>
+
+#include <media/formatshaper/CodecProperties.h>
+
+
+// we aren't going to mess with shaping points dimensions beyond this
+static const int32_t DIMENSION_LIMIT = 16384;
+
+namespace android {
+namespace mediaformatshaper {
+
+CodecProperties::CodecProperties(std::string name, std::string mediaType) {
+    ALOGV("CodecProperties(%s, %s)", name.c_str(), mediaType.c_str());
+    mName = name;
+    mMediaType = mediaType;
+}
+
+std::string CodecProperties::getName(){
+    return mName;
+}
+
+std::string CodecProperties::getMediaType(){
+    return mMediaType;
+}
+
+int CodecProperties::supportedMinimumQuality() {
+    return mMinimumQuality;
+}
+void CodecProperties::setSupportedMinimumQuality(int vmaf) {
+    mMinimumQuality = vmaf;
+}
+
+int CodecProperties::targetQpMax() {
+    return mTargetQpMax;
+}
+void CodecProperties::setTargetQpMax(int qpMax) {
+    mTargetQpMax = qpMax;
+}
+
+// what API is this codec set up for (e.g. API of the associated partition)
+// vendor-side (OEM) codecs may be older, due to 'vendor freeze' and treble
+int CodecProperties::supportedApi() {
+    return mApi;
+}
+
+void CodecProperties::setFeatureValue(std::string key, int32_t value) {
+    ALOGD("setFeatureValue(%s,%d)", key.c_str(), value);
+    mFeatures.insert({key, value});
+
+    if (!strcmp(key.c_str(), "qp-bounds")) {               // official key
+        setSupportsQp(1);
+    } else if (!strcmp(key.c_str(), "vq-supports-qp")) {   // key from prototyping
+        setSupportsQp(1);
+    } else if (!strcmp(key.c_str(), "vq-minimum-quality")) {
+        setSupportedMinimumQuality(1);
+    }
+}
+
+bool CodecProperties::getFeatureValue(std::string key, int32_t *valuep) {
+    ALOGV("getFeatureValue(%s)", key.c_str());
+    if (valuep == nullptr) {
+        return false;
+    }
+    auto mapped = mFeatures.find(key);
+    if (mapped != mFeatures.end()) {
+        *valuep = mapped->second;
+        return true;
+    }
+    return false;
+}
+
+// Tuning values (which differ from Features)
+// this is where we set up things like target bitrates and QP ranges
+// NB the tuning values arrive as a string, allowing us to convert it into an appropriate
+// format (int, float, ranges, other combinations)
+//
+void CodecProperties::setTuningValue(std::string key, std::string value) {
+    ALOGD("setTuningValue(%s,%s)", key.c_str(), value.c_str());
+    mTunings.insert({key, value});
+
+    bool legal = false;
+    // NB: old school strtol() because std::stoi() throws exceptions
+    if (!strcmp(key.c_str(), "vq-target-qpmax")) {
+        const char *p = value.c_str();
+        char *q;
+        int32_t iValue =  strtol(p, &q, 0);
+        if (q != p) {
+            setTargetQpMax(iValue);
+            legal = true;
+        }
+    } else if (!strcmp(key.c_str(), "vq-target-bpp")) {
+        const char *p = value.c_str();
+        char *q;
+        double bpp = strtod(p, &q);
+        if (q != p) {
+            setBpp(bpp);
+            legal = true;
+        }
+    } else if (!strncmp(key.c_str(), "vq-target-bpp-", strlen("vq-target-bpp-"))) {
+            std::string resolution = key.substr(strlen("vq-target-bpp-"));
+            if (bppPoint(resolution, value)) {
+                legal = true;
+            }
+    } else if (!strcmp(key.c_str(), "vq-target-bppx100")) {
+        // legacy, prototyping
+        const char *p = value.c_str();
+        char *q;
+        int32_t iValue =  strtol(p, &q, 0);
+        if (q != p) {
+            double bpp = iValue / 100.0;
+            setBpp(bpp);
+            legal = true;
+        }
+    } else {
+        legal = true;
+    }
+
+    if (!legal) {
+        ALOGW("setTuningValue() unable to apply tuning '%s' with value '%s'",
+              key.c_str(), value.c_str());
+    }
+    return;
+}
+
+bool CodecProperties::getTuningValue(std::string key, std::string &value) {
+    ALOGV("getTuningValue(%s)", key.c_str());
+    auto mapped = mFeatures.find(key);
+    if (mapped != mFeatures.end()) {
+        value = mapped->second;
+        return true;
+    }
+    return false;
+}
+
+bool CodecProperties::bppPoint(std::string resolution, std::string value) {
+
+    int32_t width = 0;
+    int32_t height = 0;
+    double bpp = -1;
+
+    // resolution is "WxH", "W*H" or a standard name like "720p"
+    if (resolution == "1080p") {
+        width = 1080; height = 1920;
+    } else if (resolution == "720p") {
+        width = 720; height = 1280;
+    } else if (resolution == "540p") {
+        width = 540; height = 960;
+    } else if (resolution == "480p") {
+        width = 480; height = 854;
+    } else {
+        size_t sep = resolution.find('x');
+        if (sep == std::string::npos) {
+            sep = resolution.find('*');
+        }
+        if (sep == std::string::npos) {
+            ALOGW("unable to parse resolution: '%s'", resolution.c_str());
+            return false;
+        }
+        std::string w = resolution.substr(0, sep);
+        std::string h = resolution.substr(sep+1);
+
+        char *q;
+        const char *p = w.c_str();
+        width = strtol(p, &q, 0);
+        if (q == p) {
+                width = -1;
+        }
+        p = h.c_str();
+        height = strtol(p, &q, 0);
+        if (q == p) {
+                height = -1;
+        }
+        if (width <= 0 || height <= 0 || width > DIMENSION_LIMIT || height > DIMENSION_LIMIT) {
+            ALOGW("unparseable: width, height '%s'", resolution.c_str());
+            return false;
+        }
+    }
+
+    const char *p = value.c_str();
+    char *q;
+    bpp = strtod(p, &q);
+    if (q == p) {
+        ALOGW("unparseable bpp '%s'", value.c_str());
+        return false;
+    }
+
+    struct bpp_point *point = (struct bpp_point*) malloc(sizeof(*point));
+    if (point == nullptr) {
+        ALOGW("unable to allocate memory for bpp point");
+        return false;
+    }
+
+    point->pixels = width * height;
+    point->width = width;
+    point->height = height;
+    point->bpp = bpp;
+
+    if (mBppPoints == nullptr) {
+        point->next = nullptr;
+        mBppPoints = point;
+    } else if (point->pixels < mBppPoints->pixels) {
+        // at the front
+        point->next = mBppPoints;
+        mBppPoints = point;
+    } else {
+        struct bpp_point *after = mBppPoints;
+        while (after->next) {
+            if (point->pixels > after->next->pixels) {
+                after = after->next;
+                continue;
+            }
+
+            // insert before after->next
+            point->next = after->next;
+            after->next = point;
+            break;
+        }
+        if (after->next == nullptr) {
+            // hasn't gone in yet
+            point->next = nullptr;
+            after->next = point;
+        }
+    }
+
+    return true;
+}
+
+double CodecProperties::getBpp(int32_t width, int32_t height) {
+    // look in the per-resolution list
+
+    int32_t pixels = width * height;
+
+    if (mBppPoints) {
+        struct bpp_point *point = mBppPoints;
+        while (point && point->pixels < pixels) {
+            point = point->next;
+        }
+        if (point) {
+            ALOGV("getBpp(w=%d,h=%d) returns %f from bpppoint w=%d h=%d",
+                width, height, point->bpp, point->width, point->height);
+            return point->bpp;
+        }
+    }
+
+    ALOGV("defaulting to %f bpp", mBpp);
+    return mBpp;
+}
+
+std::string CodecProperties::getMapping(std::string key, std::string kind) {
+    ALOGV("getMapping(key %s, kind %s )", key.c_str(), kind.c_str());
+    //play with mMappings
+    auto mapped = mMappings.find(kind + "-" + key);
+    if (mapped != mMappings.end()) {
+        std::string result = mapped->second;
+        ALOGV("getMapping(%s, %s) -> %s", key.c_str(), kind.c_str(), result.c_str());
+        return result;
+    }
+    ALOGV("nope, return unchanged key");
+    return key;
+}
+
+
+// really a bit of debugging code here.
+void CodecProperties::showMappings() {
+    ALOGD("Mappings:");
+    int count = 0;
+    for (const auto& [key, value] : mMappings) {
+         count++;
+         ALOGD("'%s' -> '%s'", key.c_str(), value.c_str());
+    }
+    ALOGD("total %d mappings", count);
+}
+
+void CodecProperties::setMapping(std::string kind, std::string key, std::string value) {
+    ALOGV("setMapping(%s,%s,%s)", kind.c_str(), key.c_str(), value.c_str());
+    std::string metaKey = kind + "-" + key;
+    mMappings.insert({metaKey, value});
+}
+
+const char **CodecProperties::getMappings(std::string kind, bool reverse) {
+    ALOGV("getMappings(kind %s, reverse %d", kind.c_str(), reverse);
+    // how many do we need?
+    int count = mMappings.size();
+    if (count == 0) {
+        ALOGV("empty mappings");
+        return nullptr;
+    }
+    size_t size = sizeof(char *) * (2 * count + 2);
+    const char **result = (const char **)malloc(size);
+    if (result == nullptr) {
+        ALOGW("no memory to return mappings");
+        return nullptr;
+    }
+    memset(result, '\0', size);
+
+    const char **pp = result;
+    for (const auto& [key, value] : mMappings) {
+        // split out the kind/key
+        size_t pos = key.find('-');
+        if (pos == std::string::npos) {
+            ALOGD("ignoring malformed key: %s", key.c_str());
+            continue;
+        }
+        std::string actualKind = key.substr(0,pos);
+        if (kind.length() != 0 && kind != actualKind) {
+            ALOGD("kinds don't match: want '%s' got '%s'", kind.c_str(), actualKind.c_str());
+            continue;
+        }
+        if (reverse) {
+            // codec specific -> std aka 'unmapping'
+            pp[0] = strdup( value.c_str());
+            pp[1] = strdup( key.substr(pos+1).c_str());
+        } else {
+            // std -> codec specific
+            pp[0] = strdup( key.substr(pos+1).c_str());
+            pp[1] = strdup( value.c_str());
+        }
+        ALOGV(" %s -> %s", pp[0], pp[1]);
+        pp += 2;
+    }
+
+    pp[0] = nullptr;
+    pp[1] = nullptr;
+
+    return result;
+}
+
+
+} // namespace mediaformatshaper
+} // namespace android
+
diff --git a/media/libmediaformatshaper/CodecSeeding.cpp b/media/libmediaformatshaper/CodecSeeding.cpp
new file mode 100644
index 0000000..a7fcc66
--- /dev/null
+++ b/media/libmediaformatshaper/CodecSeeding.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecSeeding"
+#include <utils/Log.h>
+
+#include <string>
+
+#include <media/formatshaper/CodecProperties.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * a block of pre-loaded tunings for codecs.
+ *
+ * things the library seeds into the codecproperties based
+ * on the mediaType.
+ * XXX: parsing from a file is likely better than embedding in code.
+ */
+typedef struct {
+    bool overrideable;
+    const char *key;
+    const char *value;
+} preloadTuning_t;
+
+typedef struct {
+    const char *mediaType;
+    preloadTuning_t *features;
+} preloadTunings_t;
+
+/*
+ * 240 = 2.4 bits per pixel-per-second == 5mbps@1080, 2.3mbps@720p, which is about where
+ * we want our initial floor for now.
+ */
+
+static preloadTuning_t featuresAvc[] = {
+      {true, "vq-target-bpp", "2.45"},
+      {true, "vq-target-bpp-1080p", "2.40"},
+      {true, "vq-target-bpp-540p", "2.60"},
+      {true, "vq-target-bpp-480p", "3.00"},
+      {true, "vq-target-qpmax", "40"},
+      {true, nullptr, 0}
+};
+
+static preloadTuning_t featuresHevc[] = {
+      {true, "vq-target-bpp", "2.30"},
+      {true, "vq-target-qpmax", "40"}, // nop, since hevc codecs don't declare qp support
+      {true, nullptr, 0}
+};
+
+static preloadTuning_t featuresGenericVideo[] = {
+      {true, "vq-target-bpp", "2.40"},
+      {true, nullptr, 0}
+};
+
+static preloadTunings_t preloadTunings[] = {
+    { "video/avc", featuresAvc},
+    { "video/hevc", &featuresHevc[0]},
+
+    // wildcard for any video format not already captured
+    { "video/*", &featuresGenericVideo[0]},
+
+    { nullptr, nullptr}
+};
+
+void CodecProperties::addMediaDefaults(bool overrideable) {
+    ALOGD("Seed: codec %s, mediatype %s, overrideable %d",
+          mName.c_str(), mMediaType.c_str(), overrideable);
+
+    // load me up with initial configuration data
+    int count = 0;
+    for (int i = 0; ; i++) {
+        preloadTunings_t *p = &preloadTunings[i];
+        if (p->mediaType == nullptr) {
+            break;
+        }
+        bool found = false;
+        if (strcmp(p->mediaType, mMediaType.c_str()) == 0) {
+            found = true;
+        }
+        const char *r;
+        if (!found && (r = strchr(p->mediaType, '*')) != NULL) {
+            // wildcard; check the prefix
+            size_t len = r - p->mediaType;
+            if (strncmp(p->mediaType, mMediaType.c_str(), len) == 0) {
+                found = true;
+            }
+        }
+
+        if (!found) {
+            continue;
+        }
+        ALOGV("seeding from mediaType '%s'", p->mediaType);
+
+        // walk through, filling things
+        if (p->features != nullptr) {
+            for (int j=0;; j++) {
+                preloadTuning_t *q = &p->features[j];
+                if (q->key == nullptr) {
+                    break;
+                }
+                if (q->overrideable != overrideable) {
+                    continue;
+                }
+                setTuningValue(q->key, q->value);
+                count++;
+            }
+            break;
+        }
+    }
+    ALOGV("loaded %d preset values", count);
+}
+
+// a chance, as we create the codec to inject any default behaviors we want.
+// XXX: consider whether we need pre/post or just post. it affects what can be
+// overridden by way of the codec XML
+//
+void CodecProperties::Seed() {
+    ALOGV("Seed: for codec %s, mediatype %s", mName.c_str(), mMediaType.c_str());
+    addMediaDefaults(true);
+}
+
+void CodecProperties::Finish() {
+    ALOGV("Finish: for codec %s, mediatype %s", mName.c_str(), mMediaType.c_str());
+    addMediaDefaults(false);
+}
+
+} // namespace mediaformatshaper
+} // namespace android
+
diff --git a/media/libmediaformatshaper/FormatShaper.cpp b/media/libmediaformatshaper/FormatShaper.cpp
new file mode 100644
index 0000000..42502e0
--- /dev/null
+++ b/media/libmediaformatshaper/FormatShaper.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FormatShaper"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include <media/formatshaper/VQops.h>
+#include <media/formatshaper/CodecProperties.h>
+#include <media/formatshaper/FormatShaper.h>
+#include <media/formatshaper/VideoShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+
+//
+// the interface to the outside
+//
+
+int shapeFormat(shaperHandle_t shaper, AMediaFormat* inFormat, int flags) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    if (!codec->isRegistered()) {
+        return -1;
+    }
+
+    // run through the list of possible transformations
+    //
+
+    std::string mediaType = codec->getMediaType();
+    if (strncmp(mediaType.c_str(), "video/", 6) == 0) {
+        // video specific shaping
+        (void) videoShaper(codec, inFormat, flags);
+
+    } else if (strncmp(mediaType.c_str(), "audio/", 6) == 0) {
+        // audio specific shaping
+
+    } else {
+        ALOGV("unknown mediatype '%s', left untouched", mediaType.c_str());
+
+    }
+
+    return 0;
+}
+
+int setMap(shaperHandle_t shaper,  const char *kind, const char *key, const char *value) {
+    ALOGV("setMap: kind %s key %s -> value %s", kind, key, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    codec->setMapping(kind, key, value);
+    return 0;
+}
+
+int setFeature(shaperHandle_t shaper, const char *feature, int value) {
+    ALOGV("set_feature: feature %s value %d", feature, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    // save a map of all features
+    codec->setFeatureValue(feature, value);
+
+    return 0;
+}
+
+int setTuning(shaperHandle_t shaper, const char *tuning, const char *value) {
+    ALOGV("setTuning: tuning %s value %s", tuning, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    // save a map of all features
+    codec->setTuningValue(tuning, value);
+
+    return 0;
+}
+
+/*
+ * The routines that manage finding, creating, and registering the shapers.
+ */
+
+shaperHandle_t findShaper(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = findCodec(codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+shaperHandle_t createShaper(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = new CodecProperties(codecName, mediaType);
+    if (codec != nullptr) {
+        codec->Seed();
+    }
+    return (shaperHandle_t) codec;
+}
+
+shaperHandle_t registerShaper(shaperHandle_t shaper, const char *codecName, const char *mediaType) {
+    ALOGV("registerShaper(handle, codecName %s, mediaType %s", codecName, mediaType);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return nullptr;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return nullptr;
+    }
+
+    // any final cleanup for the parameters. This allows us to override
+    // bad parameters from a devices XML file.
+    codec->Finish();
+
+    // may return a different codec, if we lost a race.
+    // if so, registerCodec() reclaims the one we tried to register for us.
+    codec = registerCodec(codec, codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+// mapping & unmapping
+// give me the mappings for 'kind'.
+// kind==null (or empty string), means *all* mappings
+
+const char **getMappings(shaperHandle_t shaper, const char *kind) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr)
+        return nullptr;
+    if (kind == nullptr)
+        kind = "";
+
+    return codec->getMappings(kind, /* reverse */ false);
+}
+
+const char **getReverseMappings(shaperHandle_t shaper, const char *kind) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr)
+        return nullptr;
+    if (kind == nullptr)
+        kind = "";
+
+    return codec->getMappings(kind, /* reverse */ true);
+}
+
+
+// the system grabs this structure
+__attribute__ ((visibility ("default")))
+extern "C" FormatShaperOps_t shaper_ops = {
+    .version = SHAPER_VERSION_V1,
+
+    .findShaper = findShaper,
+    .createShaper = createShaper,
+    .setMap = setMap,
+    .setFeature = setFeature,
+    .registerShaper = registerShaper,
+
+    .shapeFormat = shapeFormat,
+    .getMappings = getMappings,
+    .getReverseMappings = getReverseMappings,
+
+    .setTuning = setTuning,
+};
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/ManageShapingCodecs.cpp b/media/libmediaformatshaper/ManageShapingCodecs.cpp
new file mode 100644
index 0000000..bdc395f
--- /dev/null
+++ b/media/libmediaformatshaper/ManageShapingCodecs.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ManageShapingCodecs"
+#include <utils/Log.h>
+
+#include <mutex>
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+#include <media/formatshaper/CodecProperties.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// manage the list of codec information.
+//
+// XXX: the mutex here is too heavy; rework that.
+//
+
+static std::mutex sCodecMutex;
+static std::map<std::string, CodecProperties*> sCodecTraits;
+
+CodecProperties *findCodec(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = nullptr;
+
+    // synthesize a name from both codecName + mediaType
+    // some codecs support multiple media types and may have different capabilities
+    // for each media type
+    //
+    std::string codecKey = codecName;
+    codecKey += "-";
+    codecKey += mediaType;
+
+    std::lock_guard  _l(sCodecMutex);
+
+    auto it = sCodecTraits.find(codecKey);
+    if (it != sCodecTraits.end()) {
+        codec = it->second;
+    }
+
+    return codec;
+}
+
+CodecProperties *registerCodec(CodecProperties *codec, const char *codecName,
+                               const char *mediaType) {
+
+    CodecProperties *registeredCodec = nullptr;
+
+    if (codec->isRegistered()) {
+        return nullptr;
+    }
+
+    // synthesize a name from both codecName + mediaType
+    // some codecs support multiple media types and may have different capabilities
+    // for each media type
+    //
+    std::string codecKey = codecName;
+    codecKey += "-";
+    codecKey += mediaType;
+
+    std::lock_guard  _l(sCodecMutex);
+
+    auto it = sCodecTraits.find(codecKey);
+    if (it != sCodecTraits.end()) {
+        registeredCodec = it->second;
+    }
+
+    if (registeredCodec == nullptr) {
+        // register the one that was passed to us
+        ALOGV("Creating entry for codec %s, mediaType %s, key %s", codecName, mediaType,
+              codecKey.c_str());
+        sCodecTraits.insert({codecKey, codec});
+        registeredCodec = codec;
+        codec->setRegistered(true);
+    } else {
+        // one has already been registered, use that
+        // and discard the candidate
+        delete codec;
+        codec = nullptr;
+    }
+
+    return registeredCodec;
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
new file mode 100644
index 0000000..08e23cc
--- /dev/null
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -0,0 +1,242 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VQApply"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include <media/formatshaper/VQops.h>
+#include <media/formatshaper/CodecProperties.h>
+#include <media/formatshaper/VideoShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+
+// these are all NDK#31 and we run as NDK#29 (to be within the module)
+// the __builtin_available(android 31, *) constructs didn't work for me.
+//
+#define	AMEDIAFORMAT_VIDEO_QP_MAX	"video-qp-max"
+#define	AMEDIAFORMAT_VIDEO_QP_MIN	"video-qp-min"
+
+#define	AMEDIAFORMAT_VIDEO_QP_B_MAX	"video-qp-b-max"
+#define	AMEDIAFORMAT_VIDEO_QP_B_MIN	"video-qp-b-min"
+#define	AMEDIAFORMAT_VIDEO_QP_I_MAX	"video-qp-i-max"
+#define	AMEDIAFORMAT_VIDEO_QP_I_MIN	"video-qp-i-min"
+#define	AMEDIAFORMAT_VIDEO_QP_P_MAX	"video-qp-p-max"
+#define	AMEDIAFORMAT_VIDEO_QP_P_MIN	"video-qp-p-min"
+
+// defined in the SDK, but not in the NDK
+//
+static const int BITRATE_MODE_VBR = 1;
+
+
+// constants we use within the calculations
+//
+constexpr double BITRATE_LEAVE_UNTOUCHED = 2.0;
+constexpr double BITRATE_QP_UNAVAILABLE = 1.20;
+// 10% didn't work so hot on bonito (with no QP support)
+// 15% is next.. still leaves a few short
+// 20% ? this is on the edge of what I want do do
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags) {
+    ALOGV("codecName %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+
+    int32_t bitRateMode = -1;
+    if (AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BITRATE_MODE, &bitRateMode)
+        && bitRateMode != BITRATE_MODE_VBR) {
+        ALOGD("minquality: applies only to VBR encoding");
+        return 0;
+    }
+
+    if (codec->supportedMinimumQuality() > 0) {
+        // allow the codec provided minimum quality behavior to work at it
+        ALOGD("minquality: codec claims to implement minquality=%d",
+              codec->supportedMinimumQuality());
+        return 0;
+    }
+
+    //
+    // consider any and all tools available
+    // -- qp
+    // -- minimum bits-per-pixel
+    //
+    int64_t bitrateChosen = 0;
+    int32_t qpChosen = INT32_MAX;
+
+    int64_t bitrateConfigured = 0;
+    int32_t bitrateConfiguredTmp = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrateConfiguredTmp);
+    bitrateConfigured = bitrateConfiguredTmp;
+    bitrateChosen = bitrateConfigured;
+
+    int32_t width = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+    int32_t height = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+    int64_t pixels = ((int64_t)width) * height;
+    double minimumBpp = codec->getBpp(width, height);
+
+    int64_t bitrateFloor = pixels * minimumBpp;
+    if (bitrateFloor > INT32_MAX) bitrateFloor = INT32_MAX;
+
+    // if we are far enough above the target bpp, leave it alone
+    //
+    ALOGV("bitrate: configured %" PRId64 " floor %" PRId64, bitrateConfigured, bitrateFloor);
+    if (bitrateConfigured >= BITRATE_LEAVE_UNTOUCHED * bitrateFloor) {
+        ALOGV("high enough bitrate: configured %" PRId64 " >= %f * floor %" PRId64,
+                bitrateConfigured, BITRATE_LEAVE_UNTOUCHED, bitrateFloor);
+        return 0;
+    }
+
+    // raise anything below the bitrate floor
+    if (bitrateConfigured < bitrateFloor) {
+        ALOGD("raise bitrate: configured %" PRId64 " to floor %" PRId64,
+                bitrateConfigured, bitrateFloor);
+        bitrateChosen = bitrateFloor;
+    }
+
+    bool qpPresent = hasQp(inFormat);
+
+    // add QP, if not already present
+    if (!qpPresent) {
+        int32_t qpmax = codec->targetQpMax();
+        if (qpmax != INT32_MAX) {
+            ALOGV("choosing qp=%d", qpmax);
+            qpChosen = qpmax;
+        }
+    }
+
+    // if QP is desired but not supported, compensate with additional bits
+    if (!codec->supportsQp()) {
+        if (qpPresent || qpChosen != INT32_MAX) {
+            ALOGD("minquality: desired QP, but unsupported, boost bitrate %" PRId64 " to %" PRId64,
+                bitrateChosen, (int64_t)(bitrateChosen * BITRATE_QP_UNAVAILABLE));
+            bitrateChosen =  bitrateChosen * BITRATE_QP_UNAVAILABLE;
+            qpChosen = INT32_MAX;
+        }
+    }
+
+    // apply our chosen values
+    //
+    if (qpChosen != INT32_MAX) {
+        ALOGD("minquality by QP: inject %s=%d", AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
+
+        // force spreading the QP across frame types, since we are imposing a value
+        qpSpreadMaxPerFrameType(inFormat, info->qpDelta, info->qpMax, /* override */ true);
+    }
+
+    if (bitrateChosen != bitrateConfigured) {
+        ALOGD("minquality/target bitrate raised from %" PRId64 " to %" PRId64 " bps",
+              bitrateConfigured, bitrateChosen);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, (int32_t)bitrateChosen);
+    }
+
+    return 0;
+}
+
+
+bool hasQpPerFrameType(AMediaFormat *format) {
+    int32_t value;
+
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &value)) {
+        return true;
+    }
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &value)) {
+        return true;
+    }
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &value)) {
+        return true;
+    }
+    return false;
+}
+
+bool hasQp(AMediaFormat *format) {
+    int32_t value;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &value)) {
+        return true;
+    }
+    return hasQpPerFrameType(format);
+}
+
+void qpSpreadPerFrameType(AMediaFormat *format, int delta,
+                           int qplow, int qphigh, bool override) {
+     qpSpreadMaxPerFrameType(format, delta, qphigh, override);
+     qpSpreadMinPerFrameType(format, qplow, override);
+}
+
+void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override) {
+    ALOGV("format %p delta %d  hi %d override %d", format, delta, qphigh, override);
+
+    int32_t qpOffered = 0;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &qpOffered)) {
+        // propagate to otherwise unspecified frame-specific keys
+        int32_t maxI;
+        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI)) {
+            int32_t value = std::min(qphigh, qpOffered);
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, value);
+        }
+        int32_t maxP;
+        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP)) {
+            int32_t value = std::min(qphigh, (std::min(qpOffered, INT32_MAX-delta) + delta));
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, value);
+        }
+        int32_t maxB;
+        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB)) {
+            int32_t value = std::min(qphigh, (std::min(qpOffered, INT32_MAX-2*delta) + 2*delta));
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, value);
+        }
+    }
+}
+
+void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override) {
+    ALOGV("format %p lo %d override %d", format, qplow, override);
+
+    int32_t qpOffered = 0;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &qpOffered)) {
+        int value = std::max(qplow, qpOffered);
+        // propagate to otherwise unspecified frame-specific keys
+        int32_t minI;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI)) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, value);
+        }
+        int32_t minP;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP)) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, value);
+        }
+        int32_t minB;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB)) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, value);
+        }
+    }
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VideoShaper.cpp b/media/libmediaformatshaper/VideoShaper.cpp
new file mode 100644
index 0000000..f772a66
--- /dev/null
+++ b/media/libmediaformatshaper/VideoShaper.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoShaper"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include <media/formatshaper/VQops.h>
+#include <media/formatshaper/CodecProperties.h>
+#include <media/formatshaper/VideoShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// mediatype-specific operations
+
+vqOps_t mediaInfo[] = {
+    {
+        .mediaType = "video/avc",
+        .qpMin = 0,
+        .qpMax = 51,
+        .qpDelta = 3,
+    },
+    {
+        .mediaType = "video/hevc",
+        .qpMin = 0,
+        .qpMax = 51,
+        .qpDelta = 3,
+    },
+    {
+        .mediaType = NULL,                // matches everything, it must come last
+        .qpMin = INT32_MIN,
+        .qpMax = INT32_MAX,
+        .qpDelta = 3,
+    }
+};
+int nMediaInfos = sizeof(mediaInfo) / sizeof(mediaInfo[0]);
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+
+int videoShaper(CodecProperties *codec, AMediaFormat* inFormat, int flags) {
+    if (codec == nullptr) {
+        return -1;
+    }
+    ALOGV("codec %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+
+    int ix;
+
+    std::string mediaType = codec->getMediaType();
+    // we should always come out of this with a selection, because the final entry
+    // is deliberaly a NULL -- so that it will act as a default
+    for(ix = 0; mediaInfo[ix].mediaType != NULL; ix++) {
+        if (strcmp(mediaType.c_str(), mediaInfo[ix].mediaType) == 0) {
+            break;
+        }
+    }
+    if (ix >= nMediaInfos) {
+        // shouldn't happen, but if it does .....
+    }
+
+    vqOps_t *info = &mediaInfo[ix];
+
+    // apply any quality transforms in here..
+    (void) VQApply(codec, info, inFormat, flags);
+
+    // We must always spread any QP parameters.
+    // Sometimes it's something we inserted here, sometimes it's a value that the user injected.
+    //
+    qpSpreadPerFrameType(inFormat, info->qpDelta, info->qpMin, info->qpMax, /* override */ false);
+
+    //
+    return 0;
+
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/exports.lds b/media/libmediaformatshaper/exports.lds
new file mode 100644
index 0000000..a29cadb
--- /dev/null
+++ b/media/libmediaformatshaper/exports.lds
@@ -0,0 +1,6 @@
+{
+    global:
+        shaper_ops;
+    local:
+        *;
+};
diff --git a/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h b/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
new file mode 100644
index 0000000..ff7051f
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
+#define _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
+
+#include <map>
+#include <mutex>
+#include <string>
+
+#include <inttypes.h>
+
+#include <utils/RefBase.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+class CodecProperties {
+
+  public:
+    CodecProperties(std::string name, std::string mediaType);
+
+    // seed the codec with some preconfigured values
+    // (e.g. mediaType-granularity defaults)
+    // runs from the constructor
+    void Seed();
+    void Finish();
+
+    std::string getName();
+    std::string getMediaType();
+
+    // establish a mapping from standard 'key' to non-standard 'value' in the namespace 'kind'
+    void setMapping(std::string kind, std::string key, std::string value);
+
+    // translate from from standard key to non-standard key
+    // return original standard key if there is no mapping
+    std::string getMapping(std::string key, std::string kind);
+
+    // returns an array of char *, which are paired "from" and "to" values
+    // for mapping (or unmapping). it's always expressed as from->to
+    // and 'reverse' describes which strings are to be on which side.
+    const char **getMappings(std::string kind, bool reverse);
+
+    // keep a map of all features and their parameters
+    void setFeatureValue(std::string key, int32_t value);
+    bool getFeatureValue(std::string key, int32_t *valuep);
+
+    // keep a map of all tunings and their parameters
+    void setTuningValue(std::string key, std::string value);
+    bool getTuningValue(std::string key, std::string &value);
+
+    // does the codec support the Android S minimum quality rules
+    void setSupportedMinimumQuality(int vmaf);
+    int supportedMinimumQuality();
+
+    // qp max bound used to compensate when SupportedMinimumQuality == 0
+    // 0 == let a system default handle it
+    void setTargetQpMax(int qpmax);
+    int targetQpMax();
+
+    // target bits-per-pixel (per second) for encoding operations.
+    // This is used to calculate a minimum bitrate for any particular resolution.
+    // A 1080p (1920*1080 = 2073600 pixels) to be encoded at 5Mbps has a bpp == 2.41
+    void setBpp(double bpp) { mBpp = bpp;}
+    double getBpp(int32_t width, int32_t height);
+
+    // Does this codec support QP bounding
+    // The getMapping() methods provide any needed mapping to non-standard keys.
+    void setSupportsQp(bool supported) { mSupportsQp = supported;}
+    bool supportsQp() { return mSupportsQp;}
+
+    int  supportedApi();
+
+    // a codec is not usable until it has been registered with its
+    // name/mediaType.
+    bool isRegistered() { return mIsRegistered;}
+    void setRegistered(bool registered) { mIsRegistered = registered;}
+
+  private:
+    std::string mName;
+    std::string mMediaType;
+    int mApi = 0;
+    int mMinimumQuality = 0;
+    int mTargetQpMax = INT32_MAX;
+    bool mSupportsQp = false;
+    double mBpp = 0.0;
+
+    // allow different target bits-per-pixel based on resolution
+    // similar to codec 'performance points'
+    // uses 'next largest' (by pixel count) point as minimum bpp
+    struct bpp_point {
+        struct bpp_point *next;
+        int32_t pixels;
+        int32_t width, height;
+        double bpp;
+    };
+    struct bpp_point *mBppPoints = nullptr;
+    bool bppPoint(std::string resolution, std::string value);
+
+    std::mutex mMappingLock;
+    // XXX figure out why I'm having problems getting compiler to like GUARDED_BY
+    std::map<std::string, std::string> mMappings /*GUARDED_BY(mMappingLock)*/ ;
+
+    std::map<std::string, int32_t> mFeatures /*GUARDED_BY(mMappingLock)*/ ;
+    std::map<std::string, std::string> mTunings /*GUARDED_BY(mMappingLock)*/ ;
+
+    // Seed() and Finish() use this as the underlying implementation
+    void addMediaDefaults(bool overrideable);
+
+    bool mIsRegistered = false;
+
+    // debugging of what's in the mapping dictionary
+    void showMappings();
+
+    // DISALLOW_EVIL_CONSTRUCTORS(CodecProperties);
+};
+
+extern CodecProperties *findCodec(const char *codecName, const char *mediaType);
+extern CodecProperties *registerCodec(CodecProperties *codec, const char *codecName,
+                               const char *mediaType);
+
+
+} // namespace mediaformatshaper
+} // namespace android
+
+#endif  //  _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
diff --git a/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h b/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
new file mode 100644
index 0000000..a1747cc
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * structure defining the function pointers that system-side folks
+ * use to invoke operations within the MediaFormat shaping library
+ *
+ * This is the include file the outside world uses.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
+#define LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * An opaque handle clients use to refer to codec+mediatype being shaped.
+ */
+typedef void (*shaperHandle_t);
+
+/*
+ * shapeFormat applies any re-shaping on the passed AMediaFormat.
+ * The updated format is returned in-place.
+ */
+typedef int (*shapeFormat_t)(shaperHandle_t shaperHandle,
+                             AMediaFormat* inFormat, int flags);
+
+/*
+ * getMapping returns any mappings from standard keys to codec-specific keys.
+ * The return is a vector of const char* which are set up in pairs
+ * of "from", and "to".
+ * This array is always finished with a pair of nulls (to indicate a null from
+ * and a null to)
+ */
+
+typedef const char **(*getMappings_t)(shaperHandle_t shaperHandle, const char *kind);
+
+/*
+ * Returns a handle to the shaperHandle for the specified codec and mediatype.
+ * If none exists, it returns null.
+ */
+typedef shaperHandle_t (*findShaper_t)(const char *codecName, const char *mediaType);
+
+/*
+ * Creates and returns an empty shaperHandle that the client can populate using the
+ * setFeature() and setMap() operations.
+ */
+typedef shaperHandle_t (*createShaper_t)(const char *codecName, const char *mediaType);
+
+/*
+ * Registers the indicated shaperHandle for the indicated codec and mediatype.
+ * This call returns the shaperHandle that is to be used for further shaper operations.
+ * The returned value may be different than the one passed as an argument if another
+ * shaperinfo was registered while the passed one was being configured.
+ */
+typedef shaperHandle_t (*registerShaper_t)(shaperHandle_t shaper, const char *codecName,
+                                         const char *mediaType);
+
+/*
+ * establishes a mapping between the standard key "from" and the codec-specific key "to"
+ * in the "kind" namespace. This mapping is specific to the indicated codecName when
+ * encoding for the indicated mediaType.
+ */
+typedef int (*setMap_t)(shaperHandle_t shaper, const char *kind, const char *from, const char *to);
+
+/*
+ * establishes that codec "codecName" encoding for "mediaType" supports the indicated
+ * feature at the indicated value
+ */
+typedef int (*setFeature_t)(shaperHandle_t shaper, const char *feature, int value);
+
+/*
+ * establishes that codec "codecName" encoding for "mediaType" supports the indicated
+ * tuning at the indicated value
+ */
+typedef int (*setTuning_t)(shaperHandle_t shaper, const char *feature, const char * value);
+
+/*
+ * The expectation is that the client will implement a flow similar to the following when
+ * setting up an encoding.
+ *
+ * if ((shaper=formatShaperops->findShaper(codecName, mediaType)) == NULL) {
+ *     for (all codec features) {
+ *         get feature name, feature value
+ *         formatShaperops->setFeature(shaper,, featurename, featurevalue)
+ *     }
+ *     for (all codec mappings) {
+ *         get mapping 'kind', mapping 'from', mapping 'to'
+ *         formatShaperops->setMap(shaper, kind, from, to)
+ *     }
+ * }
+ *
+ */
+
+typedef struct FormatShaperOps {
+    const uint32_t version;
+
+    /*
+     * find, create, setup, and register the shaper info
+     */
+    findShaper_t findShaper;
+    createShaper_t createShaper;
+    setMap_t setMap;
+    setFeature_t setFeature;
+    registerShaper_t registerShaper;
+
+    /*
+     * use the shaper info
+     */
+    shapeFormat_t shapeFormat;
+    getMappings_t getMappings;
+    getMappings_t getReverseMappings;
+
+    setTuning_t setTuning;
+
+    // additions happen at the end of the structure
+} FormatShaperOps_t;
+
+// versioninf information
+const uint32_t SHAPER_VERSION_UNKNOWN = 0;
+const uint32_t SHAPER_VERSION_V1 = 1;
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
diff --git a/media/libmediaformatshaper/include/media/formatshaper/VQops.h b/media/libmediaformatshaper/include/media/formatshaper/VQops.h
new file mode 100644
index 0000000..807e8af
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/VQops.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_VQOPS_H_
+#define LIBMEDIAFORMATSHAPER_VQOPS_H_
+
+#include <media/formatshaper/CodecProperties.h>
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// parameterized info for the different mediaType types
+typedef struct {
+    const char *mediaType;
+
+    int32_t qpMin;      // codec type limit (e.g. h264, not c2.android.avc.encoder)
+    int32_t qpMax;
+    int32_t qpDelta;    // from I to P to B
+
+} vqOps_t;
+
+int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags);
+
+// spread the overall QP setting to any un-set per-frame-type settings
+void qpSpreadPerFrameType(AMediaFormat *format, int delta, int qplow, int qphigh, bool override);
+void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override);
+void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override);
+
+// does the format have QP bounding entries
+bool hasQp(AMediaFormat *format);
+bool hasQpPerFrameType(AMediaFormat *format);
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_VQOPS_H_
diff --git a/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h b/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h
new file mode 100644
index 0000000..53f1b13
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
+#define LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * runs through video-specific shaping operations for the codec/format combination.
+ * updates inFormat in place.
+ */
+int videoShaper(CodecProperties *codec,  AMediaFormat* inFormat, int flags);
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 2af7eee..de4f8d4 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -123,14 +123,17 @@
 
 #define AMEDIAMETRICS_PROP_FRAMECOUNT     "frameCount"     // int32
 #define AMEDIAMETRICS_PROP_INPUTDEVICES   "inputDevices"   // string value
+#define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
+#define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
 #define AMEDIAMETRICS_PROP_PERFORMANCEMODE "performanceMode"    // string value, "none", lowLatency"
 #define AMEDIAMETRICS_PROP_PLAYBACK_PITCH "playback.pitch" // double value (AudioTrack)
 #define AMEDIAMETRICS_PROP_PLAYBACK_SPEED "playback.speed" // double value (AudioTrack)
+#define AMEDIAMETRICS_PROP_PLAYERIID      "playerIId"      // int32 (-1 invalid/unset IID)
 #define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATE     "sampleRate"     // int32
 #define AMEDIAMETRICS_PROP_SELECTEDDEVICEID "selectedDeviceId" // int32
@@ -139,6 +142,7 @@
 #define AMEDIAMETRICS_PROP_SESSIONID      "sessionId"      // int32
 #define AMEDIAMETRICS_PROP_SHARINGMODE    "sharingMode"    // string value, "exclusive", shared"
 #define AMEDIAMETRICS_PROP_SOURCE         "source"         // string (AudioAttributes)
+#define AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES "startThresholdFrames" // int32 (AudioTrack)
 #define AMEDIAMETRICS_PROP_STARTUPMS      "startupMs"      // double value
 // State is "ACTIVE" or "STOPPED" for AudioRecord
 #define AMEDIAMETRICS_PROP_STATE          "state"          // string
@@ -181,7 +185,10 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_RESTORE    "restore"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE    "setMode" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE    "setBufferSize" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_START      "start"  // AudioTrack, AudioRecord
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 56c8368..287317d 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -35,7 +35,10 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
         "av-types-aidl-cpp",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libbase",
+        "libactivitymanager_aidl",
         "libandroid_net",
         "libaudioclient",
         "libbinder",
@@ -73,9 +76,13 @@
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
         "libstagefright_timedtext",
+        "media_permission-aidl-cpp",
     ],
 
-    export_shared_lib_headers: ["libmedia"],
+    export_shared_lib_headers: [
+        "libmedia",
+        "media_permission-aidl-cpp",
+    ],
 
     include_dirs: [
         "frameworks/av/media/libstagefright/rtsp",
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 1d5ccca..dc4aea5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -49,6 +49,7 @@
 
 #include <codec2/hidl/client.h>
 #include <datasource/HTTPBase.h>
+#include <media/AidlConversion.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IRemoteDisplay.h>
 #include <media/IRemoteDisplayClient.h>
@@ -94,6 +95,7 @@
 using android::NOT_ENOUGH_DATA;
 using android::Parcel;
 using android::media::VolumeShaper;
+using android::media::permission::Identity;
 
 // Max number of entries in the filter.
 const int kMaxFilterSize = 64;  // I pulled that out of thin air.
@@ -453,14 +455,21 @@
     ALOGV("MediaPlayerService destroyed");
 }
 
-sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(const String16 &opPackageName)
+sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(const Identity& identity)
 {
-    pid_t pid = IPCThreadState::self()->getCallingPid();
-    sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid, opPackageName);
+    // TODO b/182392769: use identity util
+    Identity verifiedIdentity = identity;
+    verifiedIdentity.uid = VALUE_OR_FATAL(
+      legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    verifiedIdentity.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
+    sp<MediaRecorderClient> recorder =
+        new MediaRecorderClient(this, verifiedIdentity);
     wp<MediaRecorderClient> w = recorder;
     Mutex::Autolock lock(mLock);
     mMediaRecorderClients.add(w);
-    ALOGV("Create new media recorder client from pid %d", pid);
+    ALOGV("Create new media recorder client from pid %s",
+        verifiedIdentity.toString().c_str());
     return recorder;
 }
 
@@ -480,17 +489,21 @@
 }
 
 sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
-        audio_session_t audioSessionId, std::string opPackageName)
+        audio_session_t audioSessionId, const Identity& identity)
 {
-    pid_t pid = IPCThreadState::self()->getCallingPid();
     int32_t connId = android_atomic_inc(&mNextConnId);
+    // TODO b/182392769: use identity util
+    Identity verifiedIdentity = identity;
+    verifiedIdentity.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
+    verifiedIdentity.uid = VALUE_OR_FATAL(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
 
     sp<Client> c = new Client(
-            this, pid, connId, client, audioSessionId,
-            IPCThreadState::self()->getCallingUid(), opPackageName);
+            this, verifiedIdentity, connId, client, audioSessionId);
 
-    ALOGV("Create new client(%d) from pid %d, uid %d, ", connId, pid,
-         IPCThreadState::self()->getCallingUid());
+    ALOGV("Create new client(%d) from %s, ", connId,
+        verifiedIdentity.toString().c_str());
 
     wp<Client> w = c;
     {
@@ -543,8 +556,8 @@
     char buffer[SIZE];
     String8 result;
     result.append(" Client\n");
-    snprintf(buffer, 255, "  pid(%d), connId(%d), status(%d), looping(%s)\n",
-            mPid, mConnId, mStatus, mLoop?"true": "false");
+    snprintf(buffer, 255, "  Identity(%s), connId(%d), status(%d), looping(%s)\n",
+        mIdentity.toString().c_str(), mConnId, mStatus, mLoop?"true": "false");
     result.append(buffer);
 
     sp<MediaPlayerBase> p;
@@ -608,7 +621,7 @@
             for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) {
                 sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote();
                 if (c != 0) {
-                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n", c->mPid);
+                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n", c->mIdentity.pid);
                     result.append(buffer);
                     write(fd, result.string(), result.size());
                     result = "\n";
@@ -731,20 +744,18 @@
 }
 
 MediaPlayerService::Client::Client(
-        const sp<MediaPlayerService>& service, pid_t pid,
+        const sp<MediaPlayerService>& service, const Identity& identity,
         int32_t connId, const sp<IMediaPlayerClient>& client,
-        audio_session_t audioSessionId, uid_t uid, const std::string& opPackageName)
-        : mOpPackageName(opPackageName)
+        audio_session_t audioSessionId)
+        : mIdentity(identity)
 {
     ALOGV("Client(%d) constructor", connId);
-    mPid = pid;
     mConnId = connId;
     mService = service;
     mClient = client;
     mLoop = false;
     mStatus = NO_INIT;
     mAudioSessionId = audioSessionId;
-    mUid = uid;
     mRetransmitEndpointValid = false;
     mAudioAttributes = NULL;
     mListener = new Listener(this);
@@ -757,7 +768,7 @@
 
 MediaPlayerService::Client::~Client()
 {
-    ALOGV("Client(%d) destructor pid = %d", mConnId, mPid);
+    ALOGV("Client(%d) destructor identity = %s", mConnId, mIdentity.toString().c_str());
     mAudioOutput.clear();
     wp<Client> client(this);
     disconnect();
@@ -770,7 +781,7 @@
 
 void MediaPlayerService::Client::disconnect()
 {
-    ALOGV("disconnect(%d) from pid %d", mConnId, mPid);
+    ALOGV("disconnect(%d) from identity %s", mConnId, mIdentity.toString().c_str());
     // grab local reference and clear main reference to prevent future
     // access to object
     sp<MediaPlayerBase> p;
@@ -810,11 +821,12 @@
         p.clear();
     }
     if (p == NULL) {
-        p = MediaPlayerFactory::createPlayer(playerType, mListener, mPid);
+        p = MediaPlayerFactory::createPlayer(playerType, mListener,
+            VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mIdentity.pid)));
     }
 
     if (p != NULL) {
-        p->setUID(mUid);
+        p->setUID(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mIdentity.uid)));
     }
 
     return p;
@@ -922,8 +934,8 @@
     mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
 
     if (!p->hardwareOutput()) {
-        mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
-                mPid, mAudioAttributes, mAudioDeviceUpdatedListener, mOpPackageName);
+        mAudioOutput = new AudioOutput(mAudioSessionId, mIdentity,
+                mAudioAttributes, mAudioDeviceUpdatedListener);
         static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
     }
 
@@ -1772,9 +1784,8 @@
 
 #undef LOG_TAG
 #define LOG_TAG "AudioSink"
-MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
-        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback,
-        const std::string& opPackageName)
+MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId, const Identity& identity,
+        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
     : mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
@@ -1786,8 +1797,7 @@
       mMsecsPerFrame(0),
       mFrameSize(0),
       mSessionId(sessionId),
-      mUid(uid),
-      mPid(pid),
+      mIdentity(identity),
       mSendLevel(0.0),
       mAuxEffectId(0),
       mFlags(AUDIO_OUTPUT_FLAG_NONE),
@@ -1795,8 +1805,7 @@
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mDeviceCallbackEnabled(false),
-      mDeviceCallback(deviceCallback),
-      mOpPackageName(opPackageName)
+      mDeviceCallback(deviceCallback)
 {
     ALOGV("AudioOutput(%d)", sessionId);
     if (attr != NULL) {
@@ -1822,8 +1831,7 @@
 //static
 void MediaPlayerService::AudioOutput::setMinBufferCount()
 {
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("ro.kernel.qemu", value, 0)) {
+    if (property_get_bool("ro.boot.qemu", false)) {
         mIsOnEmulator = true;
         mMinBufferCount = 12;  // to prevent systematic buffer underrun for emulator
     }
@@ -2185,13 +2193,11 @@
                     mSessionId,
                     AudioTrack::TRANSFER_CALLBACK,
                     offloadInfo,
-                    mUid,
-                    mPid,
+                    mIdentity,
                     mAttributes,
                     doNotReconnect,
                     1.0f,  // default value for maxRequiredSpeed
-                    mSelectedDeviceId,
-                    mOpPackageName);
+                    mSelectedDeviceId);
         } else {
             // TODO: Due to buffer memory concerns, we use a max target playback speed
             // based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
@@ -2214,13 +2220,11 @@
                     mSessionId,
                     AudioTrack::TRANSFER_DEFAULT,
                     NULL, // offload info
-                    mUid,
-                    mPid,
+                    mIdentity,
                     mAttributes,
                     doNotReconnect,
                     targetSpeed,
-                    mSelectedDeviceId,
-                    mOpPackageName);
+                    mSelectedDeviceId);
         }
         // Set caller name so it can be logged in destructor.
         // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index aca4369..35a65d3 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -27,12 +27,13 @@
 #include <utils/String8.h>
 #include <utils/Vector.h>
 
+#include <media/AidlConversion.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/Metadata.h>
 #include <media/stagefright/foundation/ABase.h>
-
+#include <android/media/permission/Identity.h>
 
 #include <system/audio.h>
 
@@ -79,11 +80,9 @@
      public:
                                 AudioOutput(
                                         audio_session_t sessionId,
-                                        uid_t uid,
-                                        int pid,
+                                        const media::permission::Identity& identity,
                                         const audio_attributes_t * attr,
-                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback,
-                                        const std::string& opPackageName);
+                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
         virtual                 ~AudioOutput();
 
         virtual bool            ready() const { return mTrack != 0; }
@@ -170,8 +169,7 @@
         float                   mMsecsPerFrame;
         size_t                  mFrameSize;
         audio_session_t         mSessionId;
-        uid_t                   mUid;
-        int                     mPid;
+        media::permission::Identity mIdentity;
         float                   mSendLevel;
         int                     mAuxEffectId;
         audio_output_flags_t    mFlags;
@@ -181,7 +179,6 @@
         bool                    mDeviceCallbackEnabled;
         wp<AudioSystem::AudioDeviceCallback>        mDeviceCallback;
         mutable Mutex           mLock;
-        const std::string       mOpPackageName;
 
         // static variables below not protected by mutex
         static bool             mIsOnEmulator;
@@ -234,13 +231,13 @@
     static  void                instantiate();
 
     // IMediaPlayerService interface
-    virtual sp<IMediaRecorder>  createMediaRecorder(const String16 &opPackageName);
+    virtual sp<IMediaRecorder>  createMediaRecorder(const media::permission::Identity &identity);
     void    removeMediaRecorderClient(const wp<MediaRecorderClient>& client);
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever();
 
     virtual sp<IMediaPlayer>    create(const sp<IMediaPlayerClient>& client,
                                        audio_session_t audioSessionId,
-                                       const std::string opPackageName);
+                                       const media::permission::Identity& identity);
 
     virtual sp<IMediaCodecList> getCodecList() const;
 
@@ -382,7 +379,9 @@
 
                 void            notify(int msg, int ext1, int ext2, const Parcel *obj);
 
-                pid_t           pid() const { return mPid; }
+                pid_t           pid() const {
+                    return VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mIdentity.pid));
+                }
         virtual status_t        dump(int fd, const Vector<String16>& args);
 
                 audio_session_t getAudioSessionId() { return mAudioSessionId; }
@@ -412,12 +411,10 @@
 
         friend class MediaPlayerService;
                                 Client( const sp<MediaPlayerService>& service,
-                                        pid_t pid,
+                                        const media::permission::Identity& identity,
                                         int32_t connId,
                                         const sp<IMediaPlayerClient>& client,
-                                        audio_session_t audioSessionId,
-                                        uid_t uid,
-                                        const std::string& opPackageName);
+                                        audio_session_t audioSessionId);
                                 Client();
         virtual                 ~Client();
 
@@ -461,20 +458,18 @@
                     sp<MediaPlayerService>        mService;
                     sp<IMediaPlayerClient>        mClient;
                     sp<AudioOutput>               mAudioOutput;
-                    pid_t                         mPid;
+                    const media::permission::Identity mIdentity;
                     status_t                      mStatus;
                     bool                          mLoop;
                     int32_t                       mConnId;
                     audio_session_t               mAudioSessionId;
                     audio_attributes_t *          mAudioAttributes;
-                    uid_t                         mUid;
                     sp<ANativeWindow>             mConnectedWindow;
                     sp<IBinder>                   mConnectedWindowBinder;
                     struct sockaddr_in            mRetransmitEndpoint;
                     bool                          mRetransmitEndpointValid;
                     sp<Client>                    mNextClient;
                     sp<MediaPlayerBase::Listener> mListener;
-                    const std::string             mOpPackageName;
 
         // Metadata filters.
         media::Metadata::Filter mMetadataAllow;  // protected by mLock
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 89c7032..e2c8f8f 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -124,12 +124,10 @@
         ALOGE("Invalid audio source: %d", as);
         return BAD_VALUE;
     }
-    pid_t pid = IPCThreadState::self()->getCallingPid();
-    uid_t uid = IPCThreadState::self()->getCallingUid();
 
     if ((as == AUDIO_SOURCE_FM_TUNER
-            && !(captureAudioOutputAllowed(pid, uid) || captureTunerAudioInputAllowed(pid, uid)))
-            || !recordingAllowed(String16(""), pid, uid)) {
+            && !(captureAudioOutputAllowed(mIdentity) || captureTunerAudioInputAllowed(mIdentity)))
+            || !recordingAllowed(mIdentity)) {
         return PERMISSION_DENIED;
     }
     Mutex::Autolock lock(mLock);
@@ -378,12 +376,13 @@
     return NO_ERROR;
 }
 
-MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid,
-        const String16& opPackageName)
+MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service,
+        const Identity& identity)
 {
     ALOGV("Client constructor");
-    mPid = pid;
-    mRecorder = new StagefrightRecorder(opPackageName);
+    // identity already validated in createMediaRecorder
+    mIdentity = identity;
+    mRecorder = new StagefrightRecorder(identity);
     mMediaPlayerService = service;
 }
 
@@ -592,4 +591,13 @@
     }
     return NO_INIT;
 }
+
+status_t MediaRecorderClient::getRtpDataUsage(uint64_t *bytes) {
+    ALOGV("getRtpDataUsage");
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->getRtpDataUsage(bytes);
+    }
+    return NO_INIT;
+}
 }; // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 12257e5..24c6ee1 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -22,6 +22,7 @@
 
 #include <media/AudioSystem.h>
 #include <media/IMediaRecorder.h>
+#include <android/media/permission/Identity.h>
 
 #include <vector>
 
@@ -86,20 +87,20 @@
     virtual     status_t   setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     virtual     status_t   setPreferredMicrophoneFieldDimension(float zoom);
                 status_t   getPortId(audio_port_handle_t *portId) override;
+    virtual     status_t   getRtpDataUsage(uint64_t *bytes);
 
 private:
     friend class           MediaPlayerService;  // for accessing private constructor
 
                            MediaRecorderClient(
                                    const sp<MediaPlayerService>& service,
-                                                               pid_t pid,
-                                                               const String16& opPackageName);
+                                   const media::permission::Identity& identity);
     virtual                ~MediaRecorderClient();
 
     std::vector<DeathNotifier> mDeathNotifiers;
     sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedNotifier;
 
-    pid_t                  mPid;
+    media::permission::Identity mIdentity;
     mutable Mutex          mLock;
     MediaRecorderBase      *mRecorder;
     sp<MediaPlayerService> mMediaPlayerService;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index b2f6407..b485b1e 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -33,6 +33,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 
+#include <media/AidlConversion.h>
 #include <media/IMediaPlayerService.h>
 #include <media/MediaMetricsItem.h>
 #include <media/stagefright/foundation/ABuffer.h>
@@ -114,8 +115,8 @@
 }
 
 
-StagefrightRecorder::StagefrightRecorder(const String16 &opPackageName)
-    : MediaRecorderBase(opPackageName),
+StagefrightRecorder::StagefrightRecorder(const Identity& clientIdentity)
+    : MediaRecorderBase(clientIdentity),
       mWriter(NULL),
       mOutputFd(-1),
       mAudioSource((audio_source_t)AUDIO_SOURCE_CNT), // initialize with invalid value
@@ -157,7 +158,7 @@
 
     // we run as part of the media player service; what we really want to
     // know is the app which requested the recording.
-    mMetricsItem->setUid(mClientUid);
+    mMetricsItem->setUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClient.uid)));
 
     // populate the values from the raw fields.
 
@@ -1129,7 +1130,8 @@
 }
 
 status_t StagefrightRecorder::setClientName(const String16& clientName) {
-    mClientName = clientName;
+
+    mClient.packageName = VALUE_OR_RETURN_STATUS(legacy2aidl_String16_string(clientName));
 
     return OK;
 }
@@ -1141,10 +1143,6 @@
         return INVALID_OPERATION;
     }
 
-    // Get UID and PID here for permission checking
-    mClientUid = IPCThreadState::self()->getCallingUid();
-    mClientPid = IPCThreadState::self()->getCallingPid();
-
     status_t status = OK;
 
     switch (mOutputFormat) {
@@ -1344,12 +1342,10 @@
     sp<AudioSource> audioSource =
         new AudioSource(
                 &attr,
-                mOpPackageName,
+                mClient,
                 sourceSampleRate,
                 mAudioChannels,
                 mSampleRate,
-                mClientUid,
-                mClientPid,
                 mSelectedDeviceId,
                 mSelectedMicDirection,
                 mSelectedMicFieldDimension);
@@ -1871,6 +1867,10 @@
     Size videoSize;
     videoSize.width = mVideoWidth;
     videoSize.height = mVideoHeight;
+    uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(mClient.uid));
+    pid_t pid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(mClient.pid));
+    String16 clientName = VALUE_OR_RETURN_STATUS(
+        aidl2legacy_string_view_String16(mClient.packageName.value_or("")));
     if (mCaptureFpsEnable) {
         if (!(mCaptureFps > 0.)) {
             ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps);
@@ -1878,13 +1878,13 @@
         }
 
         mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
-                mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid,
+                mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
                 videoSize, mFrameRate, mPreviewSurface,
                 std::llround(1e6 / mCaptureFps));
         *cameraSource = mCameraSourceTimeLapse;
     } else {
         *cameraSource = CameraSource::CreateFromCamera(
-                mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid,
+                mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
                 videoSize, mFrameRate,
                 mPreviewSurface);
     }
@@ -2568,6 +2568,14 @@
     return NO_INIT;
 }
 
+status_t StagefrightRecorder::getRtpDataUsage(uint64_t *bytes) {
+    if (mWriter != 0) {
+        *bytes = mWriter->getAccumulativeBytes();
+        return OK;
+    }
+    return NO_INIT;
+}
+
 status_t StagefrightRecorder::dump(
         int fd, const Vector<String16>& args) const {
     ALOGV("dump");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 0362edd..278f348 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -26,6 +26,9 @@
 #include <system/audio.h>
 
 #include <media/hardware/MetadataBufferType.h>
+#include <android/media/permission/Identity.h>
+
+using namespace android::media::permission;
 
 namespace android {
 
@@ -42,7 +45,7 @@
 struct ALooper;
 
 struct StagefrightRecorder : public MediaRecorderBase {
-    explicit StagefrightRecorder(const String16 &opPackageName);
+    explicit StagefrightRecorder(const Identity& clientIdentity);
     virtual ~StagefrightRecorder();
     virtual status_t init();
     virtual status_t setAudioSource(audio_source_t as);
@@ -82,6 +85,7 @@
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
             status_t getPortId(audio_port_handle_t *portId) const override;
+    virtual status_t getRtpDataUsage(uint64_t *bytes);
 
 private:
 
@@ -97,9 +101,6 @@
     sp<IGraphicBufferProducer> mPreviewSurface;
     sp<PersistentSurface> mPersistentSurface;
     sp<IMediaRecorderClient> mListener;
-    String16 mClientName;
-    uid_t mClientUid;
-    pid_t mClientPid;
     sp<MediaWriter> mWriter;
     int mOutputFd;
     sp<AudioSource> mAudioSourceNode;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 7bee002..af9cf45 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -62,7 +62,7 @@
             binder::Status status = mPowerManager->acquireWakeLock(
                     binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
                     String16("AWakeLock"), String16("media"),
-                    {} /* workSource */, {} /* historyTag */);
+                    {} /* workSource */, {} /* historyTag */, -1 /* displayId */);
             IPCThreadState::self()->restoreCallingIdentity(token);
             if (status.isOk()) {
                 mWakeLockToken = binder;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 344ee0b..d94cecf 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2860,23 +2860,24 @@
     in.writeInt32(payloadType);
 
     switch (payloadType) {
-        case NuPlayer::RTPSource::RTCP_TSFB:   // RTCP TSFB
-        case NuPlayer::RTPSource::RTCP_PSFB:   // RTCP PSFB
-        case NuPlayer::RTPSource::RTP_AUTODOWN:
+        case ARTPSource::RTCP_TSFB:   // RTCP TSFB
+        case ARTPSource::RTCP_PSFB:   // RTCP PSFB
+        case ARTPSource::RTP_AUTODOWN:
         {
             int32_t feedbackType, id;
             CHECK(msg->findInt32("feedback-type", &feedbackType));
             CHECK(msg->findInt32("sender", &id));
             in.writeInt32(feedbackType);
             in.writeInt32(id);
-            if (payloadType == NuPlayer::RTPSource::RTCP_TSFB) {
+            if (payloadType == ARTPSource::RTCP_TSFB) {
                 int32_t bitrate;
                 CHECK(msg->findInt32("bit-rate", &bitrate));
                 in.writeInt32(bitrate);
             }
             break;
         }
-        case NuPlayer::RTPSource::RTP_QUALITY:
+        case ARTPSource::RTP_QUALITY:
+        case ARTPSource::RTP_QUALITY_EMC:
         {
             int32_t feedbackType, bitrate;
             int32_t highestSeqNum, baseSeqNum, prevExpected;
@@ -2897,7 +2898,7 @@
             in.writeInt32(prevNumBufRecv);
             break;
         }
-        case NuPlayer::RTPSource::RTP_CVO:
+        case ARTPSource::RTP_CVO:
         {
             int32_t cvo;
             CHECK(msg->findInt32("cvo", &cvo));
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 6a8c708..4a65f71 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -1962,7 +1962,7 @@
             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
                     mime.c_str(), audioFormat);
 
-            int avgBitRate = -1;
+            int avgBitRate = 0;
             format->findInt32("bitrate", &avgBitRate);
 
             int32_t aacProfile = -1;
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index b1901e8..b43df38 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -45,8 +45,18 @@
       mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
       mEOSTimeoutAudio(0),
       mEOSTimeoutVideo(0),
-      mLastCVOUpdated(-1) {
-      ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
+      mFirstAccessUnit(true),
+      mAllTracksHaveTime(false),
+      mNTPAnchorUs(-1),
+      mMediaAnchorUs(-1),
+      mLastMediaTimeUs(-1),
+      mNumAccessUnitsReceived(0),
+      mLastCVOUpdated(-1),
+      mReceivedFirstRTCPPacket(false),
+      mReceivedFirstRTPPacket(false),
+      mPausing(false),
+      mPauseGeneration(0) {
+    ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
 }
 
 NuPlayer::RTPSource::~RTPSource() {
@@ -289,7 +299,7 @@
     if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
             cvo != mLastCVOUpdated) {
         sp<AMessage> msg = new AMessage();
-        msg->setInt32("payload-type", NuPlayer::RTPSource::RTP_CVO);
+        msg->setInt32("payload-type", ARTPSource::RTP_CVO);
         msg->setInt32("cvo", cvo);
 
         sp<AMessage> notify = dupNotify();
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
index fb2d3b9..3b4f9e9 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -33,6 +33,7 @@
 #include "AnotherPacketSource.h"
 #include "APacketSource.h"
 #include "ARTPConnection.h"
+#include "ARTPSource.h"
 #include "ASessionDescription.h"
 #include "NuPlayerSource.h"
 
@@ -51,16 +52,6 @@
             const sp<AMessage> &notify,
             const String8& rtpParams);
 
-    enum {
-        RTP_FIRST_PACKET = 100,
-        RTCP_FIRST_PACKET = 101,
-        RTP_QUALITY = 102,
-        RTCP_TSFB = 205,
-        RTCP_PSFB = 206,
-        RTP_CVO = 300,
-        RTP_AUTODOWN = 400,
-    };
-
     virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
index b84d64b..5b16911 100644
--- a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
@@ -73,6 +73,8 @@
         "libstagefright",
         "libstagefright_foundation",
         "libutils",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     cflags: [
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
index 5751631..6dea53d 100644
--- a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
@@ -59,7 +59,10 @@
     }
 
     void SetUp() override {
-        mStfRecorder = new StagefrightRecorder(String16(LOG_TAG));
+        // TODO b/182392769: use identity util
+        Identity identity;
+        identity.packageName = std::string(LOG_TAG);
+        mStfRecorder = new StagefrightRecorder(identity);
         ASSERT_NE(mStfRecorder, nullptr) << "Failed to create the instance of recorder";
 
         mOutputAudioFp = fopen(OUTPUT_FILE_NAME_AUDIO, "wb");
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 534fa91..042850c 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -82,6 +82,7 @@
     srcs: [
         "TranscoderWrapper.cpp",
         "TranscodingClientManager.cpp",
+        "TranscodingLogger.cpp",
         "TranscodingResourcePolicy.cpp",
         "TranscodingSessionController.cpp",
         "TranscodingThermalPolicy.cpp",
@@ -96,6 +97,7 @@
         "libutils",
         "libmediatranscoder",
         "libmediandk",
+        "libstatssocket#30",
     ],
     export_shared_lib_headers: [
         "libmediandk",
@@ -106,6 +108,7 @@
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
         "resourceobserver_aidl_interface-V1-ndk_platform",
+        "libstatslog_media",
     ],
 
     cflags: [
@@ -126,3 +129,43 @@
         cfi: true,
     },
 }
+
+cc_library_static {
+    name: "libstatslog_media",
+    generated_sources: ["statslog_media.cpp"],
+    generated_headers: ["statslog_media.h"],
+    min_sdk_version: "29",
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    export_generated_headers: ["statslog_media.h"],
+    apex_available: [
+        "com.android.media",
+        "test_com.android.media",
+    ],
+    shared_libs: [
+        "libcutils",
+        "liblog",
+        "libstatssocket#30",
+        "libutils",
+    ],
+}
+
+genrule {
+    name: "statslog_media.h",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --header $(genDir)/statslog_media.h --module media --namespace android,media,stats",
+    out: [
+        "statslog_media.h",
+    ],
+}
+
+genrule {
+    name: "statslog_media.cpp",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --cpp $(genDir)/statslog_media.cpp --module media --namespace android,media,stats --importHeader statslog_media.h",
+    out: [
+        "statslog_media.cpp",
+    ],
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/TEST_MAPPING b/media/libmediatranscoding/TEST_MAPPING
index f8a9db9..40f7b21 100644
--- a/media/libmediatranscoding/TEST_MAPPING
+++ b/media/libmediatranscoding/TEST_MAPPING
@@ -26,6 +26,9 @@
         },
         {
             "name": "VideoTrackTranscoderTests"
+        },
+        {
+            "name": "CtsMediaTranscodingTestCases"
         }
     ]
 }
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
index 8410850..b19e711 100644
--- a/media/libmediatranscoding/TranscoderWrapper.cpp
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -22,6 +22,7 @@
 #include <media/MediaTranscoder.h>
 #include <media/NdkCommon.h>
 #include <media/TranscoderWrapper.h>
+#include <media/TranscodingRequest.h>
 #include <utils/Log.h>
 
 #include <thread>
@@ -56,34 +57,34 @@
     }
 }
 
-static AMediaFormat* getVideoFormat(
+static std::shared_ptr<AMediaFormat> getVideoFormat(
         const char* originalMime,
         const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
     if (requestedFormat == std::nullopt) {
         return nullptr;
     }
 
-    AMediaFormat* format = AMediaFormat_new();
+    std::shared_ptr<AMediaFormat> format =
+            std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
     bool changed = false;
     if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
         strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
-        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+        AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
         changed = true;
     } else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
                strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
-        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
         changed = true;
     }
     if (requestedFormat->bitrateBps > 0) {
-        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+        AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
         changed = true;
     }
     // TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
     // Also need to determine more settings to expose in TranscodingVideoTrackFormat.
     if (!changed) {
-        AMediaFormat_delete(format);
         // Use null format for passthru.
-        format = nullptr;
+        format.reset();
     }
     return format;
 }
@@ -113,6 +114,12 @@
     case Event::Progress:
         typeStr = "Progress";
         break;
+    case Event::HeartBeat:
+        typeStr = "HeartBeat";
+        break;
+    case Event::Abandon:
+        typeStr = "Abandon";
+        break;
     default:
         return "(unknown)";
     }
@@ -154,6 +161,13 @@
         }
     }
 
+    virtual void onHeartBeat(const MediaTranscoder* transcoder __unused) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onHeartBeat(mClientId, mSessionId);
+        }
+    }
+
     virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
                                      const std::shared_ptr<ndk::ScopedAParcel>& pausedState
                                              __unused) override {
@@ -166,12 +180,20 @@
     SessionIdType mSessionId;
 };
 
-TranscoderWrapper::TranscoderWrapper() : mCurrentClientId(0), mCurrentSessionId(-1) {
-    std::thread(&TranscoderWrapper::threadLoop, this).detach();
+TranscoderWrapper::TranscoderWrapper(const std::shared_ptr<TranscoderCallbackInterface>& cb,
+                                     const std::shared_ptr<TranscodingLogger>& logger,
+                                     int64_t heartBeatIntervalUs)
+      : mCallback(cb),
+        mLogger(logger),
+        mHeartBeatIntervalUs(heartBeatIntervalUs),
+        mCurrentClientId(0),
+        mCurrentSessionId(-1),
+        mLooperReady(false) {
+    ALOGV("TranscoderWrapper CTOR: %p", this);
 }
 
-void TranscoderWrapper::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
-    mCallback = cb;
+TranscoderWrapper::~TranscoderWrapper() {
+    ALOGV("TranscoderWrapper DTOR: %p", this);
 }
 
 static bool isResourceError(media_status_t err) {
@@ -200,10 +222,11 @@
 }
 
 void TranscoderWrapper::start(ClientIdType clientId, SessionIdType sessionId,
-                              const TranscodingRequestParcel& request,
+                              const TranscodingRequestParcel& requestParcel, uid_t callingUid,
                               const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
-    queueEvent(Event::Start, clientId, sessionId, [=, &request] {
-        media_status_t err = handleStart(clientId, sessionId, request, clientCb);
+    TranscodingRequest request{requestParcel};
+    queueEvent(Event::Start, clientId, sessionId, [=] {
+        media_status_t err = handleStart(clientId, sessionId, request, callingUid, clientCb);
         if (err != AMEDIA_OK) {
             cleanup();
             reportError(clientId, sessionId, err);
@@ -234,10 +257,11 @@
 }
 
 void TranscoderWrapper::resume(ClientIdType clientId, SessionIdType sessionId,
-                               const TranscodingRequestParcel& request,
+                               const TranscodingRequestParcel& requestParcel, uid_t callingUid,
                                const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
-    queueEvent(Event::Resume, clientId, sessionId, [=, &request] {
-        media_status_t err = handleResume(clientId, sessionId, request, clientCb);
+    TranscodingRequest request{requestParcel};
+    queueEvent(Event::Resume, clientId, sessionId, [=] {
+        media_status_t err = handleResume(clientId, sessionId, request, callingUid, clientCb);
         if (err != AMEDIA_OK) {
             cleanup();
             reportError(clientId, sessionId, err);
@@ -250,7 +274,7 @@
     });
 }
 
-void TranscoderWrapper::stop(ClientIdType clientId, SessionIdType sessionId) {
+void TranscoderWrapper::stop(ClientIdType clientId, SessionIdType sessionId, bool abandon) {
     queueEvent(Event::Stop, clientId, sessionId, [=] {
         if (mTranscoder != nullptr && clientId == mCurrentClientId &&
             sessionId == mCurrentSessionId) {
@@ -261,6 +285,7 @@
             } else {
                 ALOGI("transcoder stopped");
             }
+            logSessionEnded(TranscodingLogger::SessionEndedReason::CANCELLED, err);
             cleanup();
         } else {
             // For sessions that's not currently running, release any pausedState for the session.
@@ -268,12 +293,17 @@
         }
         // No callback needed for stop.
     });
+
+    if (abandon) {
+        queueEvent(Event::Abandon, 0, 0, nullptr);
+    }
 }
 
 void TranscoderWrapper::onFinish(ClientIdType clientId, SessionIdType sessionId) {
     queueEvent(Event::Finish, clientId, sessionId, [=] {
         if (mTranscoder != nullptr && clientId == mCurrentClientId &&
             sessionId == mCurrentSessionId) {
+            logSessionEnded(TranscodingLogger::SessionEndedReason::FINISHED, AMEDIA_OK);
             cleanup();
         }
 
@@ -291,6 +321,7 @@
             [=] {
                 if (mTranscoder != nullptr && clientId == mCurrentClientId &&
                     sessionId == mCurrentSessionId) {
+                    logSessionEnded(TranscodingLogger::SessionEndedReason::ERROR, error);
                     cleanup();
                 }
                 reportError(clientId, sessionId, error);
@@ -311,9 +342,19 @@
             progress);
 }
 
+void TranscoderWrapper::onHeartBeat(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::HeartBeat, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onHeartBeat(clientId, sessionId);
+        }
+    });
+}
+
 media_status_t TranscoderWrapper::setupTranscoder(
         ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
-        const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+        uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+        TranscodingLogger::SessionEndedReason* failureReason,
         const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
     if (clientCb == nullptr) {
         ALOGE("client callback is null");
@@ -325,6 +366,12 @@
         return AMEDIA_ERROR_INVALID_OPERATION;
     }
 
+    // Unwrap the callback and send heartbeats to the client after each operation during setup.
+    auto callback = mCallback.lock();
+    if (callback == nullptr) {
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
     Status status;
     ::ndk::ScopedFileDescriptor srcFd, dstFd;
     int srcFdInt = request.sourceFd.get();
@@ -332,11 +379,14 @@
         status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
         if (!status.isOk() || srcFd.get() < 0) {
             ALOGE("failed to open source");
+            *failureReason = TranscodingLogger::SessionEndedReason::OPEN_SRC_FD_FAILED;
             return AMEDIA_ERROR_IO;
         }
         srcFdInt = srcFd.get();
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     int dstFdInt = request.destinationFd.get();
     if (dstFdInt < 0) {
         // Open dest file with "rw", as the transcoder could potentially reuse part of it
@@ -345,74 +395,99 @@
         status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
         if (!status.isOk() || dstFd.get() < 0) {
             ALOGE("failed to open destination");
+            *failureReason = TranscodingLogger::SessionEndedReason::OPEN_DST_FD_FAILED;
             return AMEDIA_ERROR_IO;
         }
         dstFdInt = dstFd.get();
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     mCurrentClientId = clientId;
     mCurrentSessionId = sessionId;
+    mCurrentCallingUid = callingUid;
     mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, sessionId);
-    mTranscoder = MediaTranscoder::create(mTranscoderCb, request.clientPid, request.clientUid,
-                                          pausedState);
+    mTranscoder = MediaTranscoder::create(mTranscoderCb, mHeartBeatIntervalUs, request.clientPid,
+                                          request.clientUid, pausedState);
     if (mTranscoder == nullptr) {
         ALOGE("failed to create transcoder");
+        *failureReason = TranscodingLogger::SessionEndedReason::CREATE_FAILED;
         return AMEDIA_ERROR_UNKNOWN;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     media_status_t err = mTranscoder->configureSource(srcFdInt);
     if (err != AMEDIA_OK) {
         ALOGE("failed to configure source: %d", err);
+        *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_SRC_FAILED;
         return err;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
     if (trackFormats.size() == 0) {
         ALOGE("failed to get track formats!");
+        *failureReason = TranscodingLogger::SessionEndedReason::NO_TRACKS;
         return AMEDIA_ERROR_MALFORMED;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     for (int i = 0; i < trackFormats.size(); ++i) {
-        AMediaFormat* format = nullptr;
+        std::shared_ptr<AMediaFormat> format;
         const char* mime = nullptr;
         AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
 
         if (!strncmp(mime, "video/", 6)) {
             format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+
+            mSrcFormat = trackFormats[i];
+            mDstFormat = format;
         }
 
-        err = mTranscoder->configureTrackFormat(i, format);
-        if (format != nullptr) {
-            AMediaFormat_delete(format);
-        }
+        err = mTranscoder->configureTrackFormat(i, format.get());
         if (err != AMEDIA_OK) {
             ALOGE("failed to configure track format for track %d: %d", i, err);
+            *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_TRACK_FAILED;
             return err;
         }
+
+        callback->onHeartBeat(clientId, sessionId);
     }
 
     err = mTranscoder->configureDestination(dstFdInt);
     if (err != AMEDIA_OK) {
         ALOGE("failed to configure dest: %d", err);
+        *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_DST_FAILED;
         return err;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     return AMEDIA_OK;
 }
 
 media_status_t TranscoderWrapper::handleStart(
         ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
-        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+        uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
     ALOGI("%s: setting up transcoder for start", __FUNCTION__);
-    media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb);
+    TranscodingLogger::SessionEndedReason reason = TranscodingLogger::SessionEndedReason::UNKNOWN;
+    media_status_t err =
+            setupTranscoder(clientId, sessionId, request, callingUid, clientCb, &reason);
     if (err != AMEDIA_OK) {
         ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+        logSessionEnded(reason, err);
         return err;
     }
 
+    mTranscodeStartTime = std::chrono::steady_clock::now();
+
     err = mTranscoder->start();
     if (err != AMEDIA_OK) {
         ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+        logSessionEnded(TranscodingLogger::SessionEndedReason::START_FAILED, err);
         return err;
     }
 
@@ -435,6 +510,7 @@
 
     std::shared_ptr<ndk::ScopedAParcel> pauseStates;
     media_status_t err = mTranscoder->pause(&pauseStates);
+    logSessionEnded(TranscodingLogger::SessionEndedReason::PAUSED, err);
     if (err != AMEDIA_OK) {
         ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
         return err;
@@ -447,7 +523,7 @@
 
 media_status_t TranscoderWrapper::handleResume(
         ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
-        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+        uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
     std::shared_ptr<ndk::ScopedAParcel> pausedState;
     auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
     if (it != mPausedStateMap.end()) {
@@ -459,15 +535,23 @@
     }
 
     ALOGI("%s: setting up transcoder for resume", __FUNCTION__);
-    media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb, pausedState);
+    TranscodingLogger::SessionEndedReason reason = TranscodingLogger::SessionEndedReason::UNKNOWN;
+    media_status_t err = setupTranscoder(clientId, sessionId, request, callingUid, clientCb,
+                                         &reason, pausedState);
     if (err != AMEDIA_OK) {
         ALOGE("%s: failed to setup transcoder: %d", __FUNCTION__, err);
+        logSessionEnded(reason, err);
         return err;
     }
 
+    // Note: For now resume() will just restart transcoding from the beginning, so there is no need
+    // to distinguish between resume and start from a performance perspective.
+    mTranscodeStartTime = std::chrono::steady_clock::now();
+
     err = mTranscoder->resume();
     if (err != AMEDIA_OK) {
         ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+        logSessionEnded(TranscodingLogger::SessionEndedReason::RESUME_FAILED, err);
         return err;
     }
 
@@ -478,14 +562,38 @@
 void TranscoderWrapper::cleanup() {
     mCurrentClientId = 0;
     mCurrentSessionId = -1;
+    mCurrentCallingUid = -1;
     mTranscoderCb = nullptr;
     mTranscoder = nullptr;
+    mSrcFormat = nullptr;
+    mDstFormat = nullptr;
+}
+
+void TranscoderWrapper::logSessionEnded(const TranscodingLogger::SessionEndedReason& reason,
+                                        int error) {
+    std::chrono::microseconds transcodeDuration(-1);
+    if (reason == TranscodingLogger::SessionEndedReason::FINISHED && error == AMEDIA_OK) {
+        transcodeDuration = std::chrono::duration_cast<std::chrono::microseconds>(
+                std::chrono::steady_clock::now() - mTranscodeStartTime);
+    }
+
+    mLogger->logSessionEnded(reason, mCurrentCallingUid, error, transcodeDuration, mSrcFormat.get(),
+                             mDstFormat.get());
 }
 
 void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
                                    const std::function<void()> runnable, int32_t arg) {
     std::scoped_lock lock{mLock};
 
+    if (!mLooperReady) {
+        // A shared_ptr to ourselves is given to the thread's stack, so that the TranscoderWrapper
+        // object doesn't go away until the thread exits. When a watchdog timeout happens, this
+        // allows the session controller to release its reference to the TranscoderWrapper object
+        // without blocking on the thread exits.
+        std::thread([owner = shared_from_this()]() { owner->threadLoop(); }).detach();
+        mLooperReady = true;
+    }
+
     mQueue.push_back({type, clientId, sessionId, runnable, arg});
     mCondition.notify_one();
 }
@@ -505,10 +613,13 @@
 
         ALOGD("%s: %s", __FUNCTION__, toString(event).c_str());
 
+        if (event.type == Event::Abandon) {
+            break;
+        }
+
         lock.unlock();
         event.runnable();
         lock.lock();
     }
 }
-
 }  // namespace android
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index 76bb33e..6dbcaf9 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -94,6 +94,12 @@
     Status getSessionWithId(int32_t /*in_sessionId*/, TranscodingSessionParcel* /*out_session*/,
                             bool* /*_aidl_return*/) override;
 
+    Status addClientUid(int32_t /*in_sessionId*/, int32_t /*in_clientUid*/,
+                        bool* /*_aidl_return*/) override;
+
+    Status getClientUids(int32_t /*in_sessionId*/,
+                         std::optional<std::vector<int32_t>>* /*_aidl_return*/) override;
+
     Status unregister() override;
 };
 
@@ -162,8 +168,8 @@
 
     int32_t sessionId = mNextSessionId.fetch_add(1);
 
-    *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, in_clientUid,
-                                                      in_request, mClientCallback);
+    *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, callingUid,
+                                                      in_clientUid, in_request, mClientCallback);
 
     if (*_aidl_return) {
         out_session->sessionId = sessionId;
@@ -217,6 +223,63 @@
     return Status::ok();
 }
 
+Status TranscodingClientManager::ClientImpl::addClientUid(int32_t in_sessionId,
+                                                          int32_t in_clientUid,
+                                                          bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    int32_t callingPid = AIBinder_getCallingPid();
+    int32_t callingUid = AIBinder_getCallingUid();
+
+    // Check if we can trust clientUid. Only privilege caller could add uid to existing sessions.
+    if (in_clientUid == IMediaTranscodingService::USE_CALLING_UID) {
+        in_clientUid = callingUid;
+    } else if (in_clientUid < 0) {
+        return Status::ok();
+    } else if (in_clientUid != callingUid && !owner->isTrustedCaller(callingPid, callingUid)) {
+        ALOGE("addClientUid rejected (clientUid %d) "
+              "(don't trust callingUid %d)",
+              in_clientUid, callingUid);
+        return STATUS_ERROR_FMT(IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+                                "addClientUid rejected (clientUid %d) "
+                                "(don't trust callingUid %d)",
+                                in_clientUid, callingUid);
+    }
+
+    *_aidl_return = owner->mSessionController->addClientUid(mClientId, in_sessionId, in_clientUid);
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::getClientUids(
+        int32_t in_sessionId, std::optional<std::vector<int32_t>>* _aidl_return) {
+    *_aidl_return = std::nullopt;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    std::vector<int32_t> result;
+
+    if (owner->mSessionController->getClientUids(mClientId, in_sessionId, &result)) {
+        *_aidl_return = result;
+    }
+    return Status::ok();
+}
+
 Status TranscodingClientManager::ClientImpl::unregister() {
     bool abandoned = mAbandoned.exchange(true);
 
diff --git a/media/libmediatranscoding/TranscodingLogger.cpp b/media/libmediatranscoding/TranscodingLogger.cpp
new file mode 100644
index 0000000..29a52b0
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingLogger.cpp
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingLogger"
+
+#include <media/NdkCommon.h>
+#include <media/TranscodingLogger.h>
+#include <statslog_media.h>
+#include <utils/Log.h>
+
+#include <cmath>
+#include <string>
+
+namespace android {
+
+static_assert(TranscodingLogger::UNKNOWN ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__UNKNOWN,
+              "Session event mismatch");
+static_assert(TranscodingLogger::FINISHED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__FINISHED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::ERROR ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__ERROR,
+              "Session event mismatch");
+static_assert(TranscodingLogger::PAUSED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__PAUSED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::CANCELLED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CANCELLED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::START_FAILED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__START_FAILED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::RESUME_FAILED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__RESUME_FAILED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::CREATE_FAILED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CREATE_FAILED,
+              "Session event mismatch");
+static_assert(
+        TranscodingLogger::CONFIG_SRC_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_SRC_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::CONFIG_DST_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_DST_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::CONFIG_TRACK_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_TRACK_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::OPEN_SRC_FD_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__OPEN_SRC_FD_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::OPEN_DST_FD_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__OPEN_DST_FD_FAILED,
+        "Session event mismatch");
+static_assert(TranscodingLogger::NO_TRACKS ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__NO_TRACKS,
+              "Session event mismatch");
+
+static inline int32_t getInt32(AMediaFormat* fmt, const char* key, int32_t defaultValue = -1) {
+    int32_t value;
+    if (fmt == nullptr || !AMediaFormat_getInt32(fmt, key, &value)) {
+        ALOGW("Unable to get %s", key);
+        value = defaultValue;
+    }
+    return value;
+}
+
+// Note: returned string is owned by format and only valid until the next getString.
+static inline const char* getString(AMediaFormat* fmt, const char* key,
+                                    const char* defaultValue = "(null)") {
+    const char* value;
+    if (fmt == nullptr || !AMediaFormat_getString(fmt, key, &value)) {
+        ALOGW("Unable to get %s", key);
+        value = defaultValue;
+    }
+    return value;
+}
+
+TranscodingLogger::TranscodingLogger()
+      : mSessionEndedAtomWriter(&android::media::stats::stats_write) {}
+
+void TranscodingLogger::logSessionEnded(enum SessionEndedReason reason, uid_t callingUid,
+                                        int status, std::chrono::microseconds duration,
+                                        AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+    logSessionEnded(std::chrono::steady_clock::now(), reason, callingUid, status, duration,
+                    srcFormat, dstFormat);
+}
+
+void TranscodingLogger::logSessionEnded(const std::chrono::steady_clock::time_point& now,
+                                        enum SessionEndedReason reason, uid_t callingUid,
+                                        int status, std::chrono::microseconds duration,
+                                        AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+    if (srcFormat == nullptr) {
+        ALOGE("Source format is null. Dropping event.");
+        return;
+    }
+
+    if (!shouldLogAtom(now, status)) {
+        ALOGD("Maximum logged event count reached. Dropping event.");
+        return;
+    }
+
+    // Extract the pieces of information to log.
+    const int32_t srcWidth = getInt32(srcFormat, AMEDIAFORMAT_KEY_WIDTH);
+    const int32_t srcHeight = getInt32(srcFormat, AMEDIAFORMAT_KEY_HEIGHT);
+    const char* srcMime = getString(srcFormat, AMEDIAFORMAT_KEY_MIME);
+    const int32_t srcProfile = getInt32(srcFormat, AMEDIAFORMAT_KEY_PROFILE);
+    const int32_t srcLevel = getInt32(srcFormat, AMEDIAFORMAT_KEY_LEVEL);
+    const int32_t srcFrameRate = getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_RATE);
+    const int32_t srcFrameCount = getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_COUNT);
+    const bool srcIsHdr = AMediaFormatUtils::VideoIsHdr(srcFormat);
+
+    int32_t dstWidth = getInt32(dstFormat, AMEDIAFORMAT_KEY_WIDTH, srcWidth);
+    int32_t dstHeight = getInt32(dstFormat, AMEDIAFORMAT_KEY_HEIGHT, srcHeight);
+    const char* dstMime = dstFormat == nullptr
+                                  ? "passthrough"
+                                  : getString(dstFormat, AMEDIAFORMAT_KEY_MIME, srcMime);
+    const bool dstIsHdr = false;  // Transcoder always request SDR output.
+
+    int64_t tmpDurationUs;
+    const int32_t srcDurationMs =
+            AMediaFormat_getInt64(srcFormat, AMEDIAFORMAT_KEY_DURATION, &tmpDurationUs)
+                    ? static_cast<int32_t>(tmpDurationUs / 1000)
+                    : -1;
+
+    int32_t transcodeFrameRate = -1;
+    if (status == 0 && srcFrameCount > 0 && duration.count() > 0) {
+        std::chrono::duration<double> seconds{duration};
+        transcodeFrameRate = static_cast<int32_t>(
+                std::round(static_cast<double>(srcFrameCount) / seconds.count()));
+    }
+
+    // Write the atom.
+    mSessionEndedAtomWriter(android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED,
+                            static_cast<int>(reason), callingUid, status, transcodeFrameRate,
+                            srcWidth, srcHeight, srcMime, srcProfile, srcLevel, srcFrameRate,
+                            srcDurationMs, srcIsHdr, dstWidth, dstHeight, dstMime, dstIsHdr);
+}
+
+bool TranscodingLogger::shouldLogAtom(const std::chrono::steady_clock::time_point& now,
+                                      int status) {
+    std::scoped_lock lock{mLock};
+    static const std::chrono::hours oneDay(24);
+
+    // Remove events older than one day.
+    while (mLastLoggedAtoms.size() > 0 && (now - mLastLoggedAtoms.front().first) >= oneDay) {
+        if (mLastLoggedAtoms.front().second == AMEDIA_OK) {
+            --mSuccessfulCount;
+        }
+        mLastLoggedAtoms.pop();
+    }
+
+    // Don't log if maximum number of events is reached.
+    if (mLastLoggedAtoms.size() >= kMaxAtomsPerDay) {
+        return false;
+    }
+
+    // Don't log if the event is successful and the maximum number of successful events is reached.
+    if (status == AMEDIA_OK && mSuccessfulCount >= kMaxSuccessfulAtomsPerDay) {
+        return false;
+    }
+
+    // Record the event.
+    if (status == AMEDIA_OK) {
+        ++mSuccessfulCount;
+    }
+    mLastLoggedAtoms.emplace(now, status);
+    return true;
+}
+
+void TranscodingLogger::setSessionEndedAtomWriter(const SessionEndedAtomWriter& writer) {
+    mSessionEndedAtomWriter = writer;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
index 09ad3cd..68e2875 100644
--- a/media/libmediatranscoding/TranscodingSessionController.cpp
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -24,6 +24,7 @@
 #include <media/TranscodingUidPolicy.h>
 #include <utils/Log.h>
 
+#include <thread>
 #include <utility>
 
 namespace android {
@@ -60,12 +61,234 @@
     return "(unknown)";
 }
 
+///////////////////////////////////////////////////////////////////////////////
+struct TranscodingSessionController::Watchdog {
+    Watchdog(TranscodingSessionController* owner, int64_t timeoutUs);
+    ~Watchdog();
+
+    // Starts monitoring the session.
+    void start(const SessionKeyType& key);
+    // Stops monitoring the session.
+    void stop();
+    // Signals that the session is still alive. Must be sent at least every mTimeoutUs.
+    // (Timeout will happen if no ping in mTimeoutUs since the last ping.)
+    void keepAlive();
+
+private:
+    void threadLoop();
+    void updateTimer_l();
+
+    TranscodingSessionController* mOwner;
+    const int64_t mTimeoutUs;
+    mutable std::mutex mLock;
+    std::condition_variable mCondition GUARDED_BY(mLock);
+    // Whether watchdog is monitoring a session for timeout.
+    bool mActive GUARDED_BY(mLock);
+    // Whether watchdog is aborted and the monitoring thread should exit.
+    bool mAbort GUARDED_BY(mLock);
+    // When watchdog is active, the next timeout time point.
+    std::chrono::steady_clock::time_point mNextTimeoutTime GUARDED_BY(mLock);
+    // When watchdog is active, the session being watched.
+    SessionKeyType mSessionToWatch GUARDED_BY(mLock);
+    std::thread mThread;
+};
+
+TranscodingSessionController::Watchdog::Watchdog(TranscodingSessionController* owner,
+                                                 int64_t timeoutUs)
+      : mOwner(owner),
+        mTimeoutUs(timeoutUs),
+        mActive(false),
+        mAbort(false),
+        mThread(&Watchdog::threadLoop, this) {
+    ALOGV("Watchdog CTOR: %p", this);
+}
+
+TranscodingSessionController::Watchdog::~Watchdog() {
+    ALOGV("Watchdog DTOR: %p", this);
+
+    {
+        // Exit the looper thread.
+        std::scoped_lock lock{mLock};
+
+        mAbort = true;
+        mCondition.notify_one();
+    }
+
+    mThread.join();
+    ALOGV("Watchdog DTOR: %p, done.", this);
+}
+
+void TranscodingSessionController::Watchdog::start(const SessionKeyType& key) {
+    std::scoped_lock lock{mLock};
+
+    if (!mActive) {
+        ALOGI("Watchdog start: %s", sessionToString(key).c_str());
+
+        mActive = true;
+        mSessionToWatch = key;
+        updateTimer_l();
+        mCondition.notify_one();
+    }
+}
+
+void TranscodingSessionController::Watchdog::stop() {
+    std::scoped_lock lock{mLock};
+
+    if (mActive) {
+        ALOGI("Watchdog stop: %s", sessionToString(mSessionToWatch).c_str());
+
+        mActive = false;
+        mCondition.notify_one();
+    }
+}
+
+void TranscodingSessionController::Watchdog::keepAlive() {
+    std::scoped_lock lock{mLock};
+
+    if (mActive) {
+        ALOGI("Watchdog keepAlive: %s", sessionToString(mSessionToWatch).c_str());
+
+        updateTimer_l();
+        mCondition.notify_one();
+    }
+}
+
+// updateTimer_l() is only called with lock held.
+void TranscodingSessionController::Watchdog::updateTimer_l() NO_THREAD_SAFETY_ANALYSIS {
+    std::chrono::microseconds timeout(mTimeoutUs);
+    mNextTimeoutTime = std::chrono::steady_clock::now() + timeout;
+}
+
+// Unfortunately std::unique_lock is incompatible with -Wthread-safety.
+void TranscodingSessionController::Watchdog::threadLoop() NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock<std::mutex> lock{mLock};
+
+    while (!mAbort) {
+        if (!mActive) {
+            mCondition.wait(lock);
+            continue;
+        }
+        // Watchdog active, wait till next timeout time.
+        if (mCondition.wait_until(lock, mNextTimeoutTime) == std::cv_status::timeout) {
+            // If timeout happens, report timeout and deactivate watchdog.
+            mActive = false;
+            // Make a copy of session key, as once we unlock, it could be unprotected.
+            SessionKeyType sessionKey = mSessionToWatch;
+
+            ALOGE("Watchdog timeout: %s", sessionToString(sessionKey).c_str());
+
+            lock.unlock();
+            mOwner->onError(sessionKey.first, sessionKey.second,
+                            TranscodingErrorCode::kWatchdogTimeout);
+            lock.lock();
+        }
+    }
+}
+///////////////////////////////////////////////////////////////////////////////
+struct TranscodingSessionController::Pacer {
+    Pacer(const ControllerConfig& config)
+          : mBurstThresholdMs(config.pacerBurstThresholdMs),
+            mBurstCountQuota(config.pacerBurstCountQuota),
+            mBurstTimeQuotaSec(config.pacerBurstTimeQuotaSeconds) {}
+
+    ~Pacer() = default;
+
+    bool onSessionStarted(uid_t uid);
+    void onSessionCompleted(uid_t uid, std::chrono::microseconds runningTime);
+    void onSessionCancelled(uid_t uid);
+
+private:
+    // Threshold of time between finish/start below which a back-to-back start is counted.
+    int32_t mBurstThresholdMs;
+    // Maximum allowed back-to-back start count.
+    int32_t mBurstCountQuota;
+    // Maximum allowed back-to-back running time.
+    int32_t mBurstTimeQuotaSec;
+
+    struct UidHistoryEntry {
+        bool sessionActive = false;
+        int32_t burstCount = 0;
+        std::chrono::steady_clock::duration burstDuration{0};
+        std::chrono::steady_clock::time_point lastCompletedTime;
+    };
+    std::map<uid_t, UidHistoryEntry> mUidHistoryMap;
+};
+
+bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid) {
+    // If uid doesn't exist, only insert the entry and mark session active. Skip quota checking.
+    if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
+        mUidHistoryMap.emplace(uid, UidHistoryEntry{});
+        mUidHistoryMap[uid].sessionActive = true;
+        ALOGV("Pacer::onSessionStarted: uid %d: new", uid);
+        return true;
+    }
+
+    // TODO: if Thermal throttling or resoure lost happened to occurr between this start
+    // and the previous completion, we should deduct the paused time from the elapsed time.
+    // (Individual session's pause time, on the other hand, doesn't need to be deducted
+    // because it doesn't affect the gap between last completion and the start.
+    auto timeSinceLastComplete =
+            std::chrono::steady_clock::now() - mUidHistoryMap[uid].lastCompletedTime;
+    if (mUidHistoryMap[uid].burstCount >= mBurstCountQuota &&
+        mUidHistoryMap[uid].burstDuration >= std::chrono::seconds(mBurstTimeQuotaSec)) {
+        ALOGW("Pacer::onSessionStarted: uid %d: over quota, burst count %d, time %lldms", uid,
+              mUidHistoryMap[uid].burstCount,
+              (long long)mUidHistoryMap[uid].burstDuration.count() / 1000000);
+        return false;
+    }
+
+    // If not over quota, allow the session, and reset as long as this is not too close
+    // to previous completion.
+    if (timeSinceLastComplete > std::chrono::milliseconds(mBurstThresholdMs)) {
+        ALOGV("Pacer::onSessionStarted: uid %d: reset quota", uid);
+        mUidHistoryMap[uid].burstCount = 0;
+        mUidHistoryMap[uid].burstDuration = std::chrono::milliseconds(0);
+    } else {
+        ALOGV("Pacer::onSessionStarted: uid %d: burst count %d, time %lldms", uid,
+              mUidHistoryMap[uid].burstCount,
+              (long long)mUidHistoryMap[uid].burstDuration.count() / 1000000);
+    }
+
+    mUidHistoryMap[uid].sessionActive = true;
+    return true;
+}
+
+void TranscodingSessionController::Pacer::onSessionCompleted(
+        uid_t uid, std::chrono::microseconds runningTime) {
+    // Skip quota update if this uid missed the start. (Could happen if the uid is added via
+    // addClientUid() after the session start.)
+    if (mUidHistoryMap.find(uid) == mUidHistoryMap.end() || !mUidHistoryMap[uid].sessionActive) {
+        ALOGV("Pacer::onSessionCompleted: uid %d: not started", uid);
+        return;
+    }
+    ALOGV("Pacer::onSessionCompleted: uid %d: runningTime %lld", uid, runningTime.count() / 1000);
+    mUidHistoryMap[uid].sessionActive = false;
+    mUidHistoryMap[uid].burstCount++;
+    mUidHistoryMap[uid].burstDuration += runningTime;
+    mUidHistoryMap[uid].lastCompletedTime = std::chrono::steady_clock::now();
+}
+
+void TranscodingSessionController::Pacer::onSessionCancelled(uid_t uid) {
+    if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
+        ALOGV("Pacer::onSessionCancelled: uid %d: not present", uid);
+        return;
+    }
+    // This is only called if a uid is removed from a session (due to it being killed
+    // or the original submitting client was gone but session was kept for offline use).
+    // Since the uid is going to miss the onSessionCompleted(), we can't track this
+    // session, and have to check back at next onSessionStarted().
+    mUidHistoryMap[uid].sessionActive = false;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
 TranscodingSessionController::TranscodingSessionController(
-        const std::shared_ptr<TranscoderInterface>& transcoder,
+        const TranscoderFactoryType& transcoderFactory,
         const std::shared_ptr<UidPolicyInterface>& uidPolicy,
         const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy,
-        const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy)
-      : mTranscoder(transcoder),
+        const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy,
+        const ControllerConfig* config)
+      : mTranscoderFactory(transcoderFactory),
         mUidPolicy(uidPolicy),
         mResourcePolicy(resourcePolicy),
         mThermalPolicy(thermalPolicy),
@@ -77,6 +300,13 @@
     mSessionQueues.emplace(OFFLINE_UID, SessionQueueType());
     mUidPackageNames[OFFLINE_UID] = "(offline)";
     mThermalThrottling = thermalPolicy->getThrottlingStatus();
+    if (config != nullptr) {
+        mConfig = *config;
+    }
+    mPacer.reset(new Pacer(mConfig));
+    ALOGD("@@@ watchdog %lld, burst count %d, burst time %d, burst threshold %d",
+          (long long)mConfig.watchdogTimeoutUs, mConfig.pacerBurstCountQuota,
+          mConfig.pacerBurstTimeQuotaSeconds, mConfig.pacerBurstThresholdMs);
 }
 
 TranscodingSessionController::~TranscodingSessionController() {}
@@ -151,22 +381,62 @@
     write(fd, result.string(), result.size());
 }
 
+/*
+ * Returns nullptr if there is no session, or we're paused globally (due to resource lost,
+ * thermal throttling, etc.). Otherwise, return the session that should be run next.
+ */
 TranscodingSessionController::Session* TranscodingSessionController::getTopSession_l() {
     if (mSessionMap.empty()) {
         return nullptr;
     }
+
+    // Return nullptr if we're paused globally due to resource lost or thermal throttling.
+    if (((mResourcePolicy != nullptr && mResourceLost) ||
+         (mThermalPolicy != nullptr && mThermalThrottling))) {
+        return nullptr;
+    }
+
     uid_t topUid = *mUidSortedList.begin();
+    // If the current session is running, and it's in the topUid's queue, let it continue
+    // to run even if it's not the earliest in that uid's queue.
+    // For example, uid(B) is added to a session while it's pending in uid(A)'s queue, then
+    // B is brought to front which caused the session to run, then user switches back to A.
+    if (mCurrentSession != nullptr && mCurrentSession->getState() == Session::RUNNING &&
+        mCurrentSession->allClientUids.count(topUid) > 0) {
+        return mCurrentSession;
+    }
     SessionKeyType topSessionKey = *mSessionQueues[topUid].begin();
     return &mSessionMap[topSessionKey];
 }
 
+void TranscodingSessionController::setSessionState_l(Session* session, Session::State state) {
+    bool wasRunning = (session->getState() == Session::RUNNING);
+    session->setState(state);
+    bool isRunning = (session->getState() == Session::RUNNING);
+
+    if (wasRunning == isRunning) {
+        return;
+    }
+
+    // Currently we only have 1 running session, and we always put the previous
+    // session in non-running state before we run the new session, so it's okay
+    // to start/stop the watchdog here. If this assumption changes, we need to
+    // track the number of running sessions and start/stop watchdog based on that.
+    if (isRunning) {
+        mWatchdog->start(session->key);
+    } else {
+        mWatchdog->stop();
+    }
+}
+
 void TranscodingSessionController::Session::setState(Session::State newState) {
     if (state == newState) {
         return;
     }
-    auto nowTime = std::chrono::system_clock::now();
+    auto nowTime = std::chrono::steady_clock::now();
     if (state != INVALID) {
-        std::chrono::microseconds elapsedTime = (nowTime - stateEnterTime);
+        std::chrono::microseconds elapsedTime =
+                std::chrono::duration_cast<std::chrono::microseconds>(nowTime - stateEnterTime);
         switch (state) {
         case PAUSED:
             pausedTime = pausedTime + elapsedTime;
@@ -189,48 +459,102 @@
 }
 
 void TranscodingSessionController::updateCurrentSession_l() {
-    Session* topSession = getTopSession_l();
     Session* curSession = mCurrentSession;
-    ALOGV("updateCurrentSession: topSession is %s, curSession is %s",
-          topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
-          curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
+    Session* topSession = nullptr;
 
-    if (topSession == nullptr) {
-        mCurrentSession = nullptr;
-        return;
+    // Delayed init of transcoder and watchdog.
+    if (mTranscoder == nullptr) {
+        mTranscoder = mTranscoderFactory(shared_from_this());
+        mWatchdog = std::make_shared<Watchdog>(this, mConfig.watchdogTimeoutUs);
     }
 
-    bool shouldBeRunning = !((mResourcePolicy != nullptr && mResourceLost) ||
-                             (mThermalPolicy != nullptr && mThermalThrottling));
-    // If we found a topSession that should be run, and it's not already running,
-    // take some actions to ensure it's running.
-    if (topSession != curSession ||
-        (shouldBeRunning ^ (topSession->getState() == Session::RUNNING))) {
-        // If current session is running, pause it first. Note this is true for either
-        // cases: 1) If top session is changing, or 2) if top session is not changing but
-        // the topSession's state is changing.
+    // If we found a different top session, or the top session's running state is not
+    // correct. Take some actions to ensure it's correct.
+    while ((topSession = getTopSession_l()) != curSession ||
+           (topSession != nullptr && !topSession->isRunning())) {
+        ALOGV("updateCurrentSession_l: topSession is %s, curSession is %s",
+              topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
+              curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
+
+        // If current session is running, pause it first. Note this is needed for either
+        // cases: 1) Top session is changing to another session, or 2) Top session is
+        // changing to null (which means we should be globally paused).
         if (curSession != nullptr && curSession->getState() == Session::RUNNING) {
             mTranscoder->pause(curSession->key.first, curSession->key.second);
-            curSession->setState(Session::PAUSED);
+            setSessionState_l(curSession, Session::PAUSED);
         }
-        // If we are not experiencing resource loss nor thermal throttling, we can start
-        // or resume the topSession now.
-        if (shouldBeRunning) {
-            if (topSession->getState() == Session::NOT_STARTED) {
-                mTranscoder->start(topSession->key.first, topSession->key.second,
-                                   topSession->request, topSession->callback.lock());
-            } else if (topSession->getState() == Session::PAUSED) {
-                mTranscoder->resume(topSession->key.first, topSession->key.second,
-                                    topSession->request, topSession->callback.lock());
+
+        if (topSession == nullptr) {
+            // Nothing more to run (either no session or globally paused).
+            break;
+        }
+
+        // Otherwise, ensure topSession is running.
+        if (topSession->getState() == Session::NOT_STARTED) {
+            // Check if at least one client has quota to start the session.
+            bool keepForClient = false;
+            for (uid_t uid : topSession->allClientUids) {
+                if (mPacer->onSessionStarted(uid)) {
+                    keepForClient = true;
+                    // DO NOT break here, because book-keeping still needs to happen
+                    // for the other uids.
+                }
             }
-            topSession->setState(Session::RUNNING);
+            if (!keepForClient) {
+                // Unfortunately all uids requesting this session are out of quota.
+                // Drop this session and try the next one.
+                {
+                    auto clientCallback = mSessionMap[topSession->key].callback.lock();
+                    if (clientCallback != nullptr) {
+                        clientCallback->onTranscodingFailed(
+                                topSession->key.second, TranscodingErrorCode::kDroppedByService);
+                    }
+                }
+                removeSession_l(topSession->key, Session::DROPPED_BY_PACER);
+                continue;
+            }
+            mTranscoder->start(topSession->key.first, topSession->key.second, topSession->request,
+                               topSession->callingUid, topSession->callback.lock());
+            setSessionState_l(topSession, Session::RUNNING);
+        } else if (topSession->getState() == Session::PAUSED) {
+            mTranscoder->resume(topSession->key.first, topSession->key.second, topSession->request,
+                                topSession->callingUid, topSession->callback.lock());
+            setSessionState_l(topSession, Session::RUNNING);
         }
+        break;
     }
     mCurrentSession = topSession;
 }
 
-void TranscodingSessionController::removeSession_l(const SessionKeyType& sessionKey,
-                                                   Session::State finalState) {
+void TranscodingSessionController::addUidToSession_l(uid_t clientUid,
+                                                     const SessionKeyType& sessionKey) {
+    // If it's an offline session, the queue was already added in constructor.
+    // If it's a real-time sessions, check if a queue is already present for the uid,
+    // and add a new queue if needed.
+    if (clientUid != OFFLINE_UID) {
+        if (mSessionQueues.count(clientUid) == 0) {
+            mUidPolicy->registerMonitorUid(clientUid);
+            if (mUidPolicy->isUidOnTop(clientUid)) {
+                mUidSortedList.push_front(clientUid);
+            } else {
+                // Shouldn't be submitting real-time requests from non-top app,
+                // put it in front of the offline queue.
+                mUidSortedList.insert(mOfflineUidIterator, clientUid);
+            }
+        } else if (clientUid != *mUidSortedList.begin()) {
+            if (mUidPolicy->isUidOnTop(clientUid)) {
+                mUidSortedList.remove(clientUid);
+                mUidSortedList.push_front(clientUid);
+            }
+        }
+    }
+    // Append this session to the uid's queue.
+    mSessionQueues[clientUid].push_back(sessionKey);
+}
+
+void TranscodingSessionController::removeSession_l(
+        const SessionKeyType& sessionKey, Session::State finalState,
+        const std::shared_ptr<std::function<bool(uid_t uid)>>& keepUid) {
     ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
 
     if (mSessionMap.count(sessionKey) == 0) {
@@ -239,32 +563,61 @@
     }
 
     // Remove session from uid's queue.
-    const uid_t uid = mSessionMap[sessionKey].uid;
-    SessionQueueType& sessionQueue = mSessionQueues[uid];
-    auto it = std::find(sessionQueue.begin(), sessionQueue.end(), sessionKey);
-    if (it == sessionQueue.end()) {
-        ALOGE("couldn't find session %s in queue for uid %d", sessionToString(sessionKey).c_str(),
-              uid);
-        return;
+    bool uidQueueRemoved = false;
+    std::unordered_set<uid_t> remainingUids;
+    for (uid_t uid : mSessionMap[sessionKey].allClientUids) {
+        if (keepUid != nullptr) {
+            if ((*keepUid)(uid)) {
+                remainingUids.insert(uid);
+                continue;
+            }
+            // If we have uids to keep, the session is not going to any final
+            // state we can't use onSessionCompleted as the running time will
+            // not be valid. Only notify pacer to stop tracking this session.
+            mPacer->onSessionCancelled(uid);
+        }
+        SessionQueueType& sessionQueue = mSessionQueues[uid];
+        auto it = std::find(sessionQueue.begin(), sessionQueue.end(), sessionKey);
+        if (it == sessionQueue.end()) {
+            ALOGW("couldn't find session %s in queue for uid %d",
+                  sessionToString(sessionKey).c_str(), uid);
+            continue;
+        }
+        sessionQueue.erase(it);
+
+        // If this is the last session in a real-time queue, remove this uid's queue.
+        if (uid != OFFLINE_UID && sessionQueue.empty()) {
+            mUidSortedList.remove(uid);
+            mSessionQueues.erase(uid);
+            mUidPolicy->unregisterMonitorUid(uid);
+
+            uidQueueRemoved = true;
+        }
     }
-    sessionQueue.erase(it);
 
-    // If this is the last session in a real-time queue, remove this uid's queue.
-    if (uid != OFFLINE_UID && sessionQueue.empty()) {
-        mUidSortedList.remove(uid);
-        mSessionQueues.erase(uid);
-        mUidPolicy->unregisterMonitorUid(uid);
-
+    if (uidQueueRemoved) {
         std::unordered_set<uid_t> topUids = mUidPolicy->getTopUids();
         moveUidsToTop_l(topUids, false /*preserveTopUid*/);
     }
 
+    if (keepUid != nullptr) {
+        mSessionMap[sessionKey].allClientUids = remainingUids;
+        return;
+    }
+
     // Clear current session.
     if (mCurrentSession == &mSessionMap[sessionKey]) {
         mCurrentSession = nullptr;
     }
 
-    mSessionMap[sessionKey].setState(finalState);
+    setSessionState_l(&mSessionMap[sessionKey], finalState);
+
+    // We can use onSessionCompleted() even for CANCELLED, because runningTime is
+    // now updated by setSessionState_l().
+    for (uid_t uid : mSessionMap[sessionKey].allClientUids) {
+        mPacer->onSessionCompleted(uid, mSessionMap[sessionKey].runningTime);
+    }
+
     mSessionHistory.push_back(mSessionMap[sessionKey]);
     if (mSessionHistory.size() > kSessionHistoryMax) {
         mSessionHistory.erase(mSessionHistory.begin());
@@ -328,13 +681,13 @@
 }
 
 bool TranscodingSessionController::submit(
-        ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+        ClientIdType clientId, SessionIdType sessionId, uid_t callingUid, uid_t clientUid,
         const TranscodingRequestParcel& request,
         const std::weak_ptr<ITranscodingClientCallback>& callback) {
     SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
 
     ALOGV("%s: session %s, uid %d, prioirty %d", __FUNCTION__, sessionToString(sessionKey).c_str(),
-          uid, (int32_t)request.priority);
+          clientUid, (int32_t)request.priority);
 
     std::scoped_lock lock{mLock};
 
@@ -344,47 +697,25 @@
     }
 
     // Add the uid package name to the store of package names we already know.
-    if (mUidPackageNames.count(uid) == 0) {
-        mUidPackageNames.emplace(uid, request.clientPackageName);
+    if (mUidPackageNames.count(clientUid) == 0) {
+        mUidPackageNames.emplace(clientUid, request.clientPackageName);
     }
 
     // TODO(chz): only support offline vs real-time for now. All kUnspecified sessions
     // go to offline queue.
     if (request.priority == TranscodingSessionPriority::kUnspecified) {
-        uid = OFFLINE_UID;
+        clientUid = OFFLINE_UID;
     }
 
     // Add session to session map.
     mSessionMap[sessionKey].key = sessionKey;
-    mSessionMap[sessionKey].uid = uid;
-    mSessionMap[sessionKey].lastProgress = 0;
-    mSessionMap[sessionKey].pauseCount = 0;
+    mSessionMap[sessionKey].callingUid = callingUid;
+    mSessionMap[sessionKey].allClientUids.insert(clientUid);
     mSessionMap[sessionKey].request = request;
     mSessionMap[sessionKey].callback = callback;
-    mSessionMap[sessionKey].setState(Session::NOT_STARTED);
+    setSessionState_l(&mSessionMap[sessionKey], Session::NOT_STARTED);
 
-    // If it's an offline session, the queue was already added in constructor.
-    // If it's a real-time sessions, check if a queue is already present for the uid,
-    // and add a new queue if needed.
-    if (uid != OFFLINE_UID) {
-        if (mSessionQueues.count(uid) == 0) {
-            mUidPolicy->registerMonitorUid(uid);
-            if (mUidPolicy->isUidOnTop(uid)) {
-                mUidSortedList.push_front(uid);
-            } else {
-                // Shouldn't be submitting real-time requests from non-top app,
-                // put it in front of the offline queue.
-                mUidSortedList.insert(mOfflineUidIterator, uid);
-            }
-        } else if (uid != *mUidSortedList.begin()) {
-            if (mUidPolicy->isUidOnTop(uid)) {
-                mUidSortedList.remove(uid);
-                mUidSortedList.push_front(uid);
-            }
-        }
-    }
-    // Append this session to the uid's queue.
-    mSessionQueues[uid].push_back(sessionKey);
+    addUidToSession_l(clientUid, sessionKey);
 
     updateCurrentSession_l();
 
@@ -397,14 +728,20 @@
 
     ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
 
-    std::list<SessionKeyType> sessionsToRemove;
+    std::list<SessionKeyType> sessionsToRemove, sessionsForOffline;
 
     std::scoped_lock lock{mLock};
 
     if (sessionId < 0) {
         for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
-            if (it->first.first == clientId && it->second.uid != OFFLINE_UID) {
-                sessionsToRemove.push_back(it->first);
+            if (it->first.first == clientId) {
+                // If there is offline request, only keep the offline client;
+                // otherwise remove the session.
+                if (it->second.allClientUids.count(OFFLINE_UID) > 0) {
+                    sessionsForOffline.push_back(it->first);
+                } else {
+                    sessionsToRemove.push_back(it->first);
+                }
             }
         }
     } else {
@@ -428,6 +765,12 @@
         removeSession_l(*it, Session::CANCELED);
     }
 
+    auto keepUid = std::make_shared<std::function<bool(uid_t)>>(
+            [](uid_t uid) { return uid == OFFLINE_UID; });
+    for (auto it = sessionsForOffline.begin(); it != sessionsForOffline.end(); ++it) {
+        removeSession_l(*it, Session::CANCELED, keepUid);
+    }
+
     // Start next session.
     updateCurrentSession_l();
 
@@ -435,6 +778,51 @@
     return true;
 }
 
+bool TranscodingSessionController::addClientUid(ClientIdType clientId, SessionIdType sessionId,
+                                                uid_t clientUid) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    if (mSessionMap[sessionKey].allClientUids.count(clientUid) > 0) {
+        ALOGE("session %s already has uid %d", sessionToString(sessionKey).c_str(), clientUid);
+        return false;
+    }
+
+    mSessionMap[sessionKey].allClientUids.insert(clientUid);
+    addUidToSession_l(clientUid, sessionKey);
+
+    updateCurrentSession_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingSessionController::getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                                                 std::vector<int32_t>* out_clientUids) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    out_clientUids->clear();
+    for (uid_t uid : mSessionMap[sessionKey].allClientUids) {
+        if (uid != OFFLINE_UID) {
+            out_clientUids->push_back(uid);
+        }
+    }
+    return true;
+}
+
 bool TranscodingSessionController::getSession(ClientIdType clientId, SessionIdType sessionId,
                                               TranscodingRequestParcel* request) {
     SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
@@ -527,6 +915,15 @@
 void TranscodingSessionController::onError(ClientIdType clientId, SessionIdType sessionId,
                                            TranscodingErrorCode err) {
     notifyClient(clientId, sessionId, "error", [=](const SessionKeyType& sessionKey) {
+        if (err == TranscodingErrorCode::kWatchdogTimeout) {
+            // Abandon the transcoder, as its handler thread might be stuck in some call to
+            // MediaTranscoder altogether, and may not be able to handle any new tasks.
+            mTranscoder->stop(clientId, sessionId, true /*abandon*/);
+            // Clear the last ref count before we create new transcoder.
+            mTranscoder = nullptr;
+            mTranscoder = mTranscoderFactory(shared_from_this());
+        }
+
         {
             auto clientCallback = mSessionMap[sessionKey].callback.lock();
             if (clientCallback != nullptr) {
@@ -555,6 +952,11 @@
     });
 }
 
+void TranscodingSessionController::onHeartBeat(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "heart-beat",
+                 [=](const SessionKeyType& /*sessionKey*/) { mWatchdog->keepAlive(); });
+}
+
 void TranscodingSessionController::onResourceLost(ClientIdType clientId, SessionIdType sessionId) {
     ALOGI("%s", __FUNCTION__);
 
@@ -572,7 +974,7 @@
         // If we receive a resource loss event, the transcoder already paused the transcoding,
         // so we don't need to call onPaused() to pause it. However, we still need to notify
         // the client and update the session state here.
-        resourceLostSession->setState(Session::PAUSED);
+        setSessionState_l(resourceLostSession, Session::PAUSED);
         // Notify the client as a paused event.
         auto clientCallback = resourceLostSession->callback.lock();
         if (clientCallback != nullptr) {
@@ -612,6 +1014,58 @@
     validateState_l();
 }
 
+void TranscodingSessionController::onUidGone(uid_t goneUid) {
+    ALOGD("%s: gone uid %u", __FUNCTION__, goneUid);
+
+    std::list<SessionKeyType> sessionsToRemove, sessionsForOtherUids;
+
+    std::scoped_lock lock{mLock};
+
+    for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
+        if (it->second.allClientUids.count(goneUid) > 0) {
+            // If goneUid is the only uid, remove the session; otherwise, only
+            // remove the uid from the session.
+            if (it->second.allClientUids.size() > 1) {
+                sessionsForOtherUids.push_back(it->first);
+            } else {
+                sessionsToRemove.push_back(it->first);
+            }
+        }
+    }
+
+    for (auto it = sessionsToRemove.begin(); it != sessionsToRemove.end(); ++it) {
+        // If the session has ever been started, stop it now.
+        // Note that stop() is needed even if the session is currently paused. This instructs
+        // the transcoder to discard any states for the session, otherwise the states may
+        // never be discarded.
+        if (mSessionMap[*it].getState() != Session::NOT_STARTED) {
+            mTranscoder->stop(it->first, it->second);
+        }
+
+        {
+            auto clientCallback = mSessionMap[*it].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(it->second,
+                                                    TranscodingErrorCode::kUidGoneCancelled);
+            }
+        }
+
+        // Remove the session.
+        removeSession_l(*it, Session::CANCELED);
+    }
+
+    auto keepUid = std::make_shared<std::function<bool(uid_t)>>(
+            [goneUid](uid_t uid) { return uid != goneUid; });
+    for (auto it = sessionsForOtherUids.begin(); it != sessionsForOtherUids.end(); ++it) {
+        removeSession_l(*it, Session::CANCELED, keepUid);
+    }
+
+    // Start next session.
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
 void TranscodingSessionController::onResourceAvailable() {
     std::scoped_lock lock{mLock};
 
@@ -664,7 +1118,8 @@
     LOG_ALWAYS_FATAL_IF(*mOfflineUidIterator != OFFLINE_UID,
                         "mOfflineUidIterator not pointing to offline uid");
     LOG_ALWAYS_FATAL_IF(mUidSortedList.size() != mSessionQueues.size(),
-                        "mUidList and mSessionQueues size mismatch");
+                        "mUidSortedList and mSessionQueues size mismatch, %zu vs %zu",
+                        mUidSortedList.size(), mSessionQueues.size());
 
     int32_t totalSessions = 0;
     for (auto uid : mUidSortedList) {
@@ -678,8 +1133,14 @@
 
         totalSessions += mSessionQueues[uid].size();
     }
-    LOG_ALWAYS_FATAL_IF(mSessionMap.size() != totalSessions,
-                        "mSessions size doesn't match total sessions counted from uid queues");
+    int32_t totalSessionsAlternative = 0;
+    for (auto const& s : mSessionMap) {
+        totalSessionsAlternative += s.second.allClientUids.size();
+    }
+    LOG_ALWAYS_FATAL_IF(totalSessions != totalSessionsAlternative,
+                        "session count (including dup) from mSessionQueues doesn't match that from "
+                        "mSessionMap, %d vs %d",
+                        totalSessions, totalSessionsAlternative);
 #endif  // VALIDATE_STATE
 }
 
diff --git a/media/libmediatranscoding/TranscodingUidPolicy.cpp b/media/libmediatranscoding/TranscodingUidPolicy.cpp
index b5eb028..0a1ffbc 100644
--- a/media/libmediatranscoding/TranscodingUidPolicy.cpp
+++ b/media/libmediatranscoding/TranscodingUidPolicy.cpp
@@ -141,38 +141,34 @@
 }
 
 void TranscodingUidPolicy::onUidStateChanged(uid_t uid, int32_t procState) {
-    ALOGV("onUidStateChanged: %u, procState %d", uid, procState);
+    ALOGV("onUidStateChanged: uid %u, procState %d", uid, procState);
 
     bool topUidSetChanged = false;
+    bool isUidGone = false;
     std::unordered_set<uid_t> topUids;
     {
         Mutex::Autolock _l(mUidLock);
         auto it = mUidStateMap.find(uid);
         if (it != mUidStateMap.end() && it->second != procState) {
-            // Top set changed if 1) the uid is in the current top uid set, or 2) the
-            // new procState is at least the same priority as the current top uid state.
-            bool isUidCurrentTop =
-                    mTopUidState != IMPORTANCE_UNKNOWN && mStateUidMap[mTopUidState].count(uid) > 0;
-            bool isNewStateHigherThanTop =
-                    procState != IMPORTANCE_UNKNOWN &&
-                    (procState <= mTopUidState || mTopUidState == IMPORTANCE_UNKNOWN);
-            topUidSetChanged = (isUidCurrentTop || isNewStateHigherThanTop);
+            isUidGone = (procState == AACTIVITYMANAGER_IMPORTANCE_GONE);
+
+            topUids = mStateUidMap[mTopUidState];
 
             // Move uid to the new procState.
             mStateUidMap[it->second].erase(uid);
             mStateUidMap[procState].insert(uid);
             it->second = procState;
 
-            if (topUidSetChanged) {
-                updateTopUid_l();
-
+            updateTopUid_l();
+            if (topUids != mStateUidMap[mTopUidState]) {
                 // Make a copy of the uid set for callback.
                 topUids = mStateUidMap[mTopUidState];
+                topUidSetChanged = true;
             }
         }
     }
 
-    ALOGV("topUidSetChanged: %d", topUidSetChanged);
+    ALOGV("topUidSetChanged: %d, isUidGone %d", topUidSetChanged, isUidGone);
 
     if (topUidSetChanged) {
         auto callback = mUidPolicyCallback.lock();
@@ -180,6 +176,12 @@
             callback->onTopUidsChanged(topUids);
         }
     }
+    if (isUidGone) {
+        auto callback = mUidPolicyCallback.lock();
+        if (callback != nullptr) {
+            callback->onUidGone(uid);
+        }
+    }
 }
 
 void TranscodingUidPolicy::updateTopUid_l() {
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
index 151e3d0..9ef9052 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
@@ -55,6 +55,32 @@
     boolean getSessionWithId(in int sessionId, out TranscodingSessionParcel session);
 
     /**
+     * Add an additional client uid requesting a session.
+     *
+     * @sessionId the session id to which to add the additional client uid.
+     * @clientUid the additional client uid to be added.
+     * @return false if the session doesn't exist or the client is already requesting the
+     * session, true otherwise.
+     */
+    boolean addClientUid(in int sessionId, int clientUid);
+
+    /**
+     * Retrieves the (unsorted) list of all clients requesting a session.
+     *
+     * Note that if a session was submitted with offline priority (
+     * TranscodingSessionPriority::kUnspecified), it initially will not be considered requested
+     * by any particular client, because the client could go away any time after the submission.
+     * However, additional uids could be added via addClientUid() after the submission, which
+     * essentially make the request a real-time request instead of an offline request.
+     *
+     * @sessionId the session id for which to retrieve the client uid list.
+     * @clientUids array to hold the retrieved client uid list.
+     * @return false if the session doesn't exist, true otherwise.
+     */
+    @nullable
+    int[] getClientUids(in int sessionId);
+
+    /**
     * Unregister the client with the MediaTranscodingService.
     *
     * Client will not be able to perform any more transcoding after unregister.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index b044d41..fdd86c7 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,12 +23,20 @@
  */
 @Backing(type = "int")
 enum TranscodingErrorCode {
+    // Errors exposed to client side.
     kNoError = 0,
-    kUnknown = 1,
-    kMalformed = 2,
-    kUnsupported = 3,
-    kInvalidParameter = 4,
-    kInvalidOperation = 5,
-    kErrorIO = 6,
-    kInsufficientResources = 7,
+    kDroppedByService = 1,
+    kServiceUnavailable = 2,
+
+    // Other private errors.
+    kPrivateErrorFirst     = 1000,
+    kUnknown               = kPrivateErrorFirst + 0,
+    kMalformed             = kPrivateErrorFirst + 1,
+    kUnsupported           = kPrivateErrorFirst + 2,
+    kInvalidParameter      = kPrivateErrorFirst + 3,
+    kInvalidOperation      = kPrivateErrorFirst + 4,
+    kErrorIO               = kPrivateErrorFirst + 5,
+    kInsufficientResources = kPrivateErrorFirst + 6,
+    kWatchdogTimeout       = kPrivateErrorFirst + 7,
+    kUidGoneCancelled      = kPrivateErrorFirst + 8,
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
index 12e0e94..6727974 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
@@ -23,12 +23,6 @@
   */
 parcelable TranscodingTestConfig {
     /**
-     * Whether to use SimulatedTranscoder for testing. Note that SimulatedTranscoder does not send
-     * transcoding sessions to real MediaTranscoder.
-     */
-    boolean useSimulatedTranscoder = false;
-
-    /**
      * Passthrough mode used for testing. The transcoding service will assume the destination
      * path already contains the transcoding of the source file and return it to client directly.
      */
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
index 3fd4f0c..9311e2e 100644
--- a/media/libmediatranscoding/include/media/ControllerClientInterface.h
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -36,8 +36,8 @@
      * Returns true on success and false on failure. This call will fail is a session identified
      * by <clientId, sessionId> already exists.
      */
-    virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
-                        const TranscodingRequestParcel& request,
+    virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t callingUid,
+                        uid_t clientUid, const TranscodingRequestParcel& request,
                         const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
 
     /**
@@ -60,6 +60,29 @@
     virtual bool getSession(ClientIdType clientId, SessionIdType sessionId,
                             TranscodingRequestParcel* request) = 0;
 
+    /**
+     * Add an additional client uid requesting the session identified by <clientId, sessionId>.
+     *
+     * Returns false if the session doesn't exist, or the client is already requesting the
+     * session. Returns true otherwise.
+     */
+    virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid);
+
+    /**
+     * Retrieves the (unsorted) list of all clients requesting the session identified by
+     * <clientId, sessionId>.
+     *
+     * Note that if a session was submitted with offline priority (
+     * TranscodingSessionPriority::kUnspecified), it initially will not be considered requested
+     * by any particular client, because the client could go away any time after the submission.
+     * However, additional uids could be added via addClientUid() after the submission, which
+     * essentially make the request a real-time request instead of an offline request.
+     *
+     * Returns false if the session doesn't exist. Returns true otherwise.
+     */
+    virtual bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                               std::vector<int32_t>* out_clientUids);
+
 protected:
     virtual ~ControllerClientInterface() = default;
 };
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
index 6268aa5..3b0bd3b 100644
--- a/media/libmediatranscoding/include/media/TranscoderInterface.h
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -32,15 +32,16 @@
 // Interface for the controller to call the transcoder to take actions.
 class TranscoderInterface {
 public:
-    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) = 0;
     virtual void start(ClientIdType clientId, SessionIdType sessionId,
-                       const TranscodingRequestParcel& request,
+                       const TranscodingRequestParcel& request, uid_t callingUid,
                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
     virtual void pause(ClientIdType clientId, SessionIdType sessionId) = 0;
     virtual void resume(ClientIdType clientId, SessionIdType sessionId,
-                        const TranscodingRequestParcel& request,
+                        const TranscodingRequestParcel& request, uid_t callingUid,
                         const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
-    virtual void stop(ClientIdType clientId, SessionIdType sessionId) = 0;
+    // Stop the specified session. If abandon is true, the transcoder wrapper will be discarded
+    // after the session stops.
+    virtual void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) = 0;
 
 protected:
     virtual ~TranscoderInterface() = default;
@@ -59,6 +60,7 @@
                          TranscodingErrorCode err) = 0;
     virtual void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
                                   int32_t progress) = 0;
+    virtual void onHeartBeat(ClientIdType clientId, SessionIdType sessionId) = 0;
 
     // Called when transcoding becomes temporarily inaccessible due to loss of resource.
     // If there is any session currently running, it will be paused. When resource contention
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
index 02beede..d3d4c86 100644
--- a/media/libmediatranscoding/include/media/TranscoderWrapper.h
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -18,8 +18,11 @@
 #define ANDROID_TRANSCODER_WRAPPER_H
 
 #include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
 #include <media/TranscoderInterface.h>
+#include <media/TranscodingLogger.h>
 
+#include <chrono>
 #include <list>
 #include <map>
 #include <mutex>
@@ -36,22 +39,37 @@
 class TranscoderWrapper : public TranscoderInterface,
                           public std::enable_shared_from_this<TranscoderWrapper> {
 public:
-    TranscoderWrapper();
+    TranscoderWrapper(const std::shared_ptr<TranscoderCallbackInterface>& cb,
+                      const std::shared_ptr<TranscodingLogger>& logger,
+                      int64_t heartBeatIntervalUs);
+    ~TranscoderWrapper();
 
-    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
-    virtual void start(ClientIdType clientId, SessionIdType sessionId,
-                       const TranscodingRequestParcel& request,
-                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
-    virtual void pause(ClientIdType clientId, SessionIdType sessionId) override;
-    virtual void resume(ClientIdType clientId, SessionIdType sessionId,
-                        const TranscodingRequestParcel& request,
-                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
-    virtual void stop(ClientIdType clientId, SessionIdType sessionId) override;
+    // TranscoderInterface
+    void start(ClientIdType clientId, SessionIdType sessionId,
+               const TranscodingRequestParcel& request, uid_t callingUid,
+               const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void pause(ClientIdType clientId, SessionIdType sessionId) override;
+    void resume(ClientIdType clientId, SessionIdType sessionId,
+                const TranscodingRequestParcel& request, uid_t callingUid,
+                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) override;
+    // ~TranscoderInterface
 
 private:
     class CallbackImpl;
     struct Event {
-        enum Type { NoEvent, Start, Pause, Resume, Stop, Finish, Error, Progress } type;
+        enum Type {
+            NoEvent,
+            Start,
+            Pause,
+            Resume,
+            Stop,
+            Finish,
+            Error,
+            Progress,
+            HeartBeat,
+            Abandon
+        } type;
         ClientIdType clientId;
         SessionIdType sessionId;
         std::function<void()> runnable;
@@ -62,31 +80,43 @@
     std::shared_ptr<CallbackImpl> mTranscoderCb;
     std::shared_ptr<MediaTranscoder> mTranscoder;
     std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::shared_ptr<TranscodingLogger> mLogger;
+    std::shared_ptr<AMediaFormat> mSrcFormat;
+    std::shared_ptr<AMediaFormat> mDstFormat;
+    int64_t mHeartBeatIntervalUs;
     std::mutex mLock;
     std::condition_variable mCondition;
     std::list<Event> mQueue;  // GUARDED_BY(mLock);
     std::map<SessionKeyType, std::shared_ptr<ndk::ScopedAParcel>> mPausedStateMap;
     ClientIdType mCurrentClientId;
     SessionIdType mCurrentSessionId;
+    uid_t mCurrentCallingUid;
+    std::chrono::steady_clock::time_point mTranscodeStartTime;
+
+    // Whether the looper has been created.
+    bool mLooperReady;
 
     static std::string toString(const Event& event);
     void onFinish(ClientIdType clientId, SessionIdType sessionId);
     void onError(ClientIdType clientId, SessionIdType sessionId, media_status_t status);
     void onProgress(ClientIdType clientId, SessionIdType sessionId, int32_t progress);
+    void onHeartBeat(ClientIdType clientId, SessionIdType sessionId);
 
     media_status_t handleStart(ClientIdType clientId, SessionIdType sessionId,
-                               const TranscodingRequestParcel& request,
+                               const TranscodingRequestParcel& request, uid_t callingUid,
                                const std::shared_ptr<ITranscodingClientCallback>& callback);
     media_status_t handlePause(ClientIdType clientId, SessionIdType sessionId);
     media_status_t handleResume(ClientIdType clientId, SessionIdType sessionId,
-                                const TranscodingRequestParcel& request,
+                                const TranscodingRequestParcel& request, uid_t callingUid,
                                 const std::shared_ptr<ITranscodingClientCallback>& callback);
     media_status_t setupTranscoder(
             ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
-            const std::shared_ptr<ITranscodingClientCallback>& callback,
+            uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& callback,
+            TranscodingLogger::SessionEndedReason* failureReason /* nonnull */,
             const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
 
     void cleanup();
+    void logSessionEnded(const TranscodingLogger::SessionEndedReason& reason, int error);
     void reportError(ClientIdType clientId, SessionIdType sessionId, media_status_t err);
     void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
                     const std::function<void()> runnable, int32_t arg = 0);
diff --git a/media/libmediatranscoding/include/media/TranscodingLogger.h b/media/libmediatranscoding/include/media/TranscodingLogger.h
new file mode 100644
index 0000000..dc24551
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingLogger.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_LOGGER_H
+#define ANDROID_MEDIA_TRANSCODING_LOGGER_H
+
+#include <media/NdkMediaFormat.h>
+#include <utils/Condition.h>
+
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <queue>
+
+namespace android {
+
+/** Class for logging transcoding events. */
+class TranscodingLogger {
+public:
+    /** The maximum number of atoms pushed to statsd per day. */
+    static constexpr int kMaxAtomsPerDay = 50;
+
+    /** The maximum number of successful transcoding atoms pushed to statsd per day. */
+    static constexpr int kMaxSuccessfulAtomsPerDay = 35;
+
+    /** Reason transcoding session ended. Maps to MediaTranscodingSessionEnded atom's Reason. */
+    enum SessionEndedReason {
+        UNKNOWN = 0,
+        FINISHED,
+        ERROR,
+        PAUSED,
+        CANCELLED,
+        START_FAILED,
+        RESUME_FAILED,
+        CREATE_FAILED,
+        CONFIG_SRC_FAILED,
+        CONFIG_DST_FAILED,
+        CONFIG_TRACK_FAILED,
+        OPEN_SRC_FD_FAILED,
+        OPEN_DST_FD_FAILED,
+        NO_TRACKS,
+    };
+
+    TranscodingLogger();
+    ~TranscodingLogger() = default;
+
+    /**
+     * Logs a transcoding session ended event (MediaTranscodingSessionEnded atom).
+     * @param reason Reason for the transcoding session to end.
+     * @param callingUid UID of the caller connecting to the transcoding service.
+     * @param status Status (error code) of the transcoding session.
+     * @param duration Duration of the transcoding session.
+     * @param srcFormat The source video track format.
+     * @param dstFormat The destination video track format.
+     */
+    void logSessionEnded(enum SessionEndedReason reason, uid_t callingUid, int status,
+                         std::chrono::microseconds duration, AMediaFormat* srcFormat,
+                         AMediaFormat* dstFormat);
+
+private:
+    friend class TranscodingLoggerTest;
+
+    // Function prototype for writing out the session ended atom.
+    using SessionEndedAtomWriter = std::function<int(
+            int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, char const*, int32_t,
+            int32_t, int32_t, int32_t, bool arg12, int32_t, int32_t, char const*, bool)>;
+
+    std::mutex mLock;
+    std::queue<std::pair<std::chrono::steady_clock::time_point, int>> mLastLoggedAtoms
+            GUARDED_BY(mLock);
+    uint32_t mSuccessfulCount = 0;
+    SessionEndedAtomWriter mSessionEndedAtomWriter;
+
+    void logSessionEnded(const std::chrono::steady_clock::time_point& now,
+                         enum SessionEndedReason reason, uid_t callingUid, int status,
+                         std::chrono::microseconds duration, AMediaFormat* srcFormat,
+                         AMediaFormat* dstFormat);
+    bool shouldLogAtom(const std::chrono::steady_clock::time_point& now, int status);
+    // Used for testing to validate what gets sent to statsd.
+    void setSessionEndedAtomWriter(const SessionEndedAtomWriter& writer);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_LOGGER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
index 16f4cc0..d38fc59 100644
--- a/media/libmediatranscoding/include/media/TranscodingRequest.h
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -18,11 +18,15 @@
 #define ANDROID_MEDIA_TRANSCODING_REQUEST_H
 
 #include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android/binder_parcel.h>
 
 namespace android {
 
 using ::aidl::android::media::TranscodingRequestParcel;
 
+// TODO: replace __ANDROID_API_FUTURE__with 31 when it's official (b/178144708)
+#define __TRANSCODING_MIN_API__ __ANDROID_API_FUTURE__
+
 // Helper class for duplicating a TranscodingRequestParcel
 class TranscodingRequest : public TranscodingRequestParcel {
 public:
@@ -36,20 +40,28 @@
 
 private:
     void setTo(const TranscodingRequestParcel& parcel) {
-        sourceFilePath = parcel.sourceFilePath;
-        sourceFd = ndk::ScopedFileDescriptor(dup(parcel.sourceFd.get()));
-        destinationFilePath = parcel.destinationFilePath;
-        destinationFd = ndk::ScopedFileDescriptor(dup(parcel.destinationFd.get()));
-        clientUid = parcel.clientUid;
-        clientPid = parcel.clientPid;
-        clientPackageName = parcel.clientPackageName;
-        transcodingType = parcel.transcodingType;
-        requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
-        priority = parcel.priority;
-        requestProgressUpdate = parcel.requestProgressUpdate;
-        requestSessionEventUpdate = parcel.requestSessionEventUpdate;
-        isForTesting = parcel.isForTesting;
-        testConfig = parcel.testConfig;
+        if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+            AParcel* p = AParcel_create();
+            parcel.writeToParcel(p);
+            AParcel_setDataPosition(p, 0);
+            readFromParcel(p);
+            AParcel_delete(p);
+        } else {
+            sourceFilePath = parcel.sourceFilePath;
+            sourceFd = ndk::ScopedFileDescriptor(dup(parcel.sourceFd.get()));
+            destinationFilePath = parcel.destinationFilePath;
+            destinationFd = ndk::ScopedFileDescriptor(dup(parcel.destinationFd.get()));
+            clientUid = parcel.clientUid;
+            clientPid = parcel.clientPid;
+            clientPackageName = parcel.clientPackageName;
+            transcodingType = parcel.transcodingType;
+            requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
+            priority = parcel.priority;
+            requestProgressUpdate = parcel.requestProgressUpdate;
+            requestSessionEventUpdate = parcel.requestSessionEventUpdate;
+            isForTesting = parcel.isForTesting;
+            testConfig = parcel.testConfig;
+        }
     }
 };
 
diff --git a/media/libmediatranscoding/include/media/TranscodingSessionController.h b/media/libmediatranscoding/include/media/TranscodingSessionController.h
index 4fcc423..2691201 100644
--- a/media/libmediatranscoding/include/media/TranscodingSessionController.h
+++ b/media/libmediatranscoding/include/media/TranscodingSessionController.h
@@ -28,6 +28,7 @@
 #include <utils/Vector.h>
 
 #include <chrono>
+#include <functional>
 #include <list>
 #include <map>
 #include <mutex>
@@ -36,21 +37,26 @@
 using ::aidl::android::media::TranscodingResultParcel;
 using ::aidl::android::media::TranscodingSessionPriority;
 
-class TranscodingSessionController : public UidPolicyCallbackInterface,
-                                     public ControllerClientInterface,
-                                     public TranscoderCallbackInterface,
-                                     public ResourcePolicyCallbackInterface,
-                                     public ThermalPolicyCallbackInterface {
+class TranscodingSessionController
+      : public UidPolicyCallbackInterface,
+        public ControllerClientInterface,
+        public TranscoderCallbackInterface,
+        public ResourcePolicyCallbackInterface,
+        public ThermalPolicyCallbackInterface,
+        public std::enable_shared_from_this<TranscodingSessionController> {
 public:
     virtual ~TranscodingSessionController();
 
     // ControllerClientInterface
-    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t callingUid, uid_t clientUid,
                 const TranscodingRequestParcel& request,
                 const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
     bool cancel(ClientIdType clientId, SessionIdType sessionId) override;
     bool getSession(ClientIdType clientId, SessionIdType sessionId,
                     TranscodingRequestParcel* request) override;
+    bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) override;
+    bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                       std::vector<int32_t>* out_clientUids) override;
     // ~ControllerClientInterface
 
     // TranscoderCallbackInterface
@@ -61,11 +67,13 @@
     void onError(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) override;
     void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
                           int32_t progress) override;
+    void onHeartBeat(ClientIdType clientId, SessionIdType sessionId) override;
     void onResourceLost(ClientIdType clientId, SessionIdType sessionId) override;
     // ~TranscoderCallbackInterface
 
     // UidPolicyCallbackInterface
     void onTopUidsChanged(const std::unordered_set<uid_t>& uids) override;
+    void onUidGone(uid_t goneUid) override;
     // ~UidPolicyCallbackInterface
 
     // ResourcePolicyCallbackInterface
@@ -88,6 +96,19 @@
 
     using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
     using SessionQueueType = std::list<SessionKeyType>;
+    using TranscoderFactoryType = std::function<std::shared_ptr<TranscoderInterface>(
+            const std::shared_ptr<TranscoderCallbackInterface>&)>;
+
+    struct ControllerConfig {
+        // Watchdog timeout.
+        int64_t watchdogTimeoutUs = 3000000LL;
+        // Threshold of time between finish/start below which a back-to-back start is counted.
+        int32_t pacerBurstThresholdMs = 1000;
+        // Maximum allowed back-to-back start count.
+        int32_t pacerBurstCountQuota = 10;
+        // Maximum allowed back-to-back running time.
+        int32_t pacerBurstTimeQuotaSeconds = 180;  // 3-min
+    };
 
     struct Session {
         enum State {
@@ -100,15 +121,17 @@
             FINISHED,
             CANCELED,
             ERROR,
+            DROPPED_BY_PACER,
         };
         SessionKeyType key;
-        uid_t uid;
-        int32_t lastProgress;
-        int32_t pauseCount;
-        std::chrono::time_point<std::chrono::system_clock> stateEnterTime;
-        std::chrono::microseconds waitingTime;
-        std::chrono::microseconds runningTime;
-        std::chrono::microseconds pausedTime;
+        uid_t callingUid;
+        std::unordered_set<uid_t> allClientUids;
+        int32_t lastProgress = 0;
+        int32_t pauseCount = 0;
+        std::chrono::time_point<std::chrono::steady_clock> stateEnterTime;
+        std::chrono::microseconds waitingTime{0};
+        std::chrono::microseconds runningTime{0};
+        std::chrono::microseconds pausedTime{0};
 
         TranscodingRequest request;
         std::weak_ptr<ITranscodingClientCallback> callback;
@@ -116,11 +139,17 @@
         // Must use setState to change state.
         void setState(Session::State state);
         State getState() const { return state; }
+        bool isRunning() { return state == RUNNING; }
 
     private:
         State state = INVALID;
     };
 
+    struct Watchdog;
+    struct Pacer;
+
+    ControllerConfig mConfig;
+
     // TODO(chz): call transcoder without global lock.
     // Use mLock for all entrypoints for now.
     mutable std::mutex mLock;
@@ -136,6 +165,7 @@
     std::list<uid_t>::iterator mOfflineUidIterator;
     std::map<uid_t, std::string> mUidPackageNames;
 
+    TranscoderFactoryType mTranscoderFactory;
     std::shared_ptr<TranscoderInterface> mTranscoder;
     std::shared_ptr<UidPolicyInterface> mUidPolicy;
     std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
@@ -145,18 +175,24 @@
     bool mResourceLost;
     bool mThermalThrottling;
     std::list<Session> mSessionHistory;
+    std::shared_ptr<Watchdog> mWatchdog;
+    std::shared_ptr<Pacer> mPacer;
 
     // Only allow MediaTranscodingService and unit tests to instantiate.
-    TranscodingSessionController(const std::shared_ptr<TranscoderInterface>& transcoder,
+    TranscodingSessionController(const TranscoderFactoryType& transcoderFactory,
                                  const std::shared_ptr<UidPolicyInterface>& uidPolicy,
                                  const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy,
-                                 const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy);
+                                 const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy,
+                                 const ControllerConfig* config = nullptr);
 
     void dumpSession_l(const Session& session, String8& result, bool closedSession = false);
     Session* getTopSession_l();
     void updateCurrentSession_l();
-    void removeSession_l(const SessionKeyType& sessionKey, Session::State finalState);
+    void addUidToSession_l(uid_t uid, const SessionKeyType& sessionKey);
+    void removeSession_l(const SessionKeyType& sessionKey, Session::State finalState,
+                         const std::shared_ptr<std::function<bool(uid_t uid)>>& keepUid = nullptr);
     void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
+    void setSessionState_l(Session* session, Session::State state);
     void notifyClient(ClientIdType clientId, SessionIdType sessionId, const char* reason,
                       std::function<void(const SessionKeyType&)> func);
     // Internal state verifier (debug only)
diff --git a/media/libmediatranscoding/include/media/UidPolicyInterface.h b/media/libmediatranscoding/include/media/UidPolicyInterface.h
index 05d8db0..445a2ff 100644
--- a/media/libmediatranscoding/include/media/UidPolicyInterface.h
+++ b/media/libmediatranscoding/include/media/UidPolicyInterface.h
@@ -48,6 +48,9 @@
     // has changed. The receiver of this callback should adjust accordingly.
     virtual void onTopUidsChanged(const std::unordered_set<uid_t>& uids) = 0;
 
+    // Called when a uid is gone.
+    virtual void onUidGone(uid_t goneUid) = 0;
+
 protected:
     virtual ~UidPolicyCallbackInterface() = default;
 };
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index ff5a9fa..603611a 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -1,5 +1,4 @@
 // Build the unit tests for libmediatranscoding.
-
 package {
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
@@ -73,3 +72,15 @@
 
     srcs: ["AdjustableMaxPriorityQueue_tests.cpp"],
 }
+
+//
+// TranscodingLogger unit test
+//
+cc_test {
+    name: "TranscodingLogger_tests",
+    defaults: ["libmediatranscoding_test_defaults"],
+    shared_libs: ["libmediandk", "libstatssocket#30"],
+    static_libs: ["libmediatranscoder", "libstatslog_media"],
+
+    srcs: ["TranscodingLogger_tests.cpp"],
+}
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index 1a50923..9233410 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -50,6 +50,7 @@
 
 constexpr const char* kClientName = "TestClientName";
 constexpr const char* kClientPackage = "TestClientPackage";
+constexpr uid_t OFFLINE_UID = -1;
 
 #define SESSION(n) (n)
 
@@ -135,8 +136,8 @@
 
     virtual ~TestController() { ALOGI("TestController Destroyed"); }
 
-    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*uid*/,
-                const TranscodingRequestParcel& request,
+    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*callingUid*/,
+                uid_t clientUid, const TranscodingRequestParcel& request,
                 const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override {
         SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
         if (mSessions.count(sessionKey) > 0) {
@@ -149,13 +150,47 @@
             return false;
         }
 
+        if (request.priority == TranscodingSessionPriority::kUnspecified) {
+            clientUid = OFFLINE_UID;
+        }
+
         mSessions[sessionKey].request = request;
         mSessions[sessionKey].callback = clientCallback;
+        mSessions[sessionKey].allClientUids.insert(clientUid);
 
         mLastSession = sessionKey;
         return true;
     }
 
+    bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+        if (mSessions[sessionKey].allClientUids.count(clientUid) > 0) {
+            return false;
+        }
+        mSessions[sessionKey].allClientUids.insert(clientUid);
+        return true;
+    }
+
+    bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                       std::vector<int32_t>* out_clientUids) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+        out_clientUids->clear();
+        for (uid_t uid : mSessions[sessionKey].allClientUids) {
+            if (uid != OFFLINE_UID) {
+                out_clientUids->push_back(uid);
+            }
+        }
+        return true;
+    }
+
     bool cancel(ClientIdType clientId, SessionIdType sessionId) override {
         SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
 
@@ -211,6 +246,7 @@
     struct Session {
         TranscodingRequest request;
         std::weak_ptr<ITranscodingClientCallback> callback;
+        std::unordered_set<uid_t> allClientUids;
     };
 
     typedef std::pair<ClientIdType, SessionIdType> SessionKeyType;
@@ -537,4 +573,93 @@
     EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
 }
 
+TEST_F(TranscodingClientManagerTest, TestAddGetClientUidsInvalidArgs) {
+    addMultipleClients();
+
+    bool result;
+    std::optional<std::vector<int32_t>> clientUids;
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    uid_t ownUid = ::getuid();
+
+    // Add/Get clients with invalid session id fails.
+    EXPECT_TRUE(mClient1->addClientUid(-1, ownUid, &result).isOk());
+    EXPECT_FALSE(result);
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(0), ownUid, &result).isOk());
+    EXPECT_FALSE(result);
+    EXPECT_TRUE(mClient1->getClientUids(-1, &clientUids).isOk());
+    EXPECT_EQ(clientUids, std::nullopt);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids).isOk());
+    EXPECT_EQ(clientUids, std::nullopt);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(TranscodingClientManagerTest, TestAddGetClientUids) {
+    addMultipleClients();
+
+    bool result;
+    std::optional<std::vector<int32_t>> clientUids;
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    uid_t ownUid = ::getuid();
+
+    // Submit one real-time session.
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_desintaion_file_0";
+    request.priority = TranscodingSessionPriority::kNormal;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Should have own uid in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 1);
+    EXPECT_EQ((*clientUids)[0], ownUid);
+
+    // Adding invalid client uid should fail.
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(0), kInvalidClientUid, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Adding own uid again should fail.
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(0), ownUid, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Submit one offline session.
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_desintaion_file_1";
+    request.priority = TranscodingSessionPriority::kUnspecified;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Should not have own uid in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 0);
+
+    // Add own uid (with IMediaTranscodingService::USE_CALLING_UID) again, should succeed.
+    EXPECT_TRUE(
+            mClient1->addClientUid(SESSION(1), IMediaTranscodingService::USE_CALLING_UID, &result)
+                    .isOk());
+    EXPECT_TRUE(result);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 1);
+    EXPECT_EQ((*clientUids)[0], ownUid);
+
+    // Add more uids, should succeed.
+    int32_t kFakeUid = ::getuid() ^ 0x1;
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(1), kFakeUid, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    std::unordered_set<uid_t> uidSet;
+    uidSet.insert(clientUids->begin(), clientUids->end());
+    EXPECT_EQ(uidSet.size(), 2);
+    EXPECT_EQ(uidSet.count(ownUid), 1);
+    EXPECT_EQ(uidSet.count(kFakeUid), 1);
+
+    unregisterMultipleClients();
+}
+
 }  // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp b/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
new file mode 100644
index 0000000..39e5cd4
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingLogger
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingLoggerTest"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <media/NdkCommon.h>
+#include <media/TranscodingLogger.h>
+#include <statslog_media.h>
+#include <utils/Log.h>
+
+#include <chrono>
+
+namespace android {
+
+using Reason = TranscodingLogger::SessionEndedReason;
+
+// Data structure corresponding to MediaTranscodingEnded atom.
+struct SessionEndedAtom {
+    SessionEndedAtom(int32_t atomCode, int32_t reason, int32_t callingUid, int32_t status,
+                     int32_t transcoderFps, int32_t srcWidth, int32_t srcHeight,
+                     char const* srcMime, int32_t srcProfile, int32_t srcLevel, int32_t srcFps,
+                     int32_t srcDurationMs, bool srcIsHdr, int32_t dstWidth, int32_t dstHeight,
+                     char const* dstMime, bool dstIsHdr)
+          : atomCode(atomCode),
+            reason(reason),
+            callingUid(callingUid),
+            status(status),
+            transcoderFps(transcoderFps),
+            srcWidth(srcWidth),
+            srcHeight(srcHeight),
+            srcMime(srcMime),
+            srcProfile(srcProfile),
+            srcLevel(srcLevel),
+            srcFps(srcFps),
+            srcDurationMs(srcDurationMs),
+            srcIsHdr(srcIsHdr),
+            dstWidth(dstWidth),
+            dstHeight(dstHeight),
+            dstMime(dstMime),
+            dstIsHdr(dstIsHdr) {}
+
+    int32_t atomCode;
+    int32_t reason;
+    int32_t callingUid;
+    int32_t status;
+    int32_t transcoderFps;
+    int32_t srcWidth;
+    int32_t srcHeight;
+    std::string srcMime;
+    int32_t srcProfile;
+    int32_t srcLevel;
+    int32_t srcFps;
+    int32_t srcDurationMs;
+    bool srcIsHdr;
+    int32_t dstWidth;
+    int32_t dstHeight;
+    std::string dstMime;
+    bool dstIsHdr;
+};
+
+// Default configuration values.
+static constexpr int32_t kDefaultCallingUid = 1;
+static constexpr std::chrono::microseconds kDefaultTranscodeDuration = std::chrono::seconds{2};
+
+static constexpr int32_t kDefaultSrcWidth = 1920;
+static constexpr int32_t kDefaultSrcHeight = 1080;
+static const std::string kDefaultSrcMime{AMEDIA_MIMETYPE_VIDEO_HEVC};
+static constexpr int32_t kDefaultSrcProfile = 1;    // HEVC Main
+static constexpr int32_t kDefaultSrcLevel = 65536;  // HEVCMainTierLevel51
+static constexpr int32_t kDefaultSrcFps = 30;
+static constexpr int32_t kDefaultSrcFrameCount = 120;
+static constexpr int64_t kDefaultSrcDurationUs = 1000000 * kDefaultSrcFrameCount / kDefaultSrcFps;
+
+static constexpr int32_t kDefaultDstWidth = 1280;
+static constexpr int32_t kDefaultDstHeight = 720;
+static const std::string kDefaultDstMime{AMEDIA_MIMETYPE_VIDEO_AVC};
+
+// Util for creating a default source video format.
+static AMediaFormat* CreateSrcFormat() {
+    AMediaFormat* fmt = AMediaFormat_new();
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, kDefaultSrcWidth);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, kDefaultSrcHeight);
+    AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, kDefaultSrcMime.c_str());
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_PROFILE, kDefaultSrcProfile);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_LEVEL, kDefaultSrcLevel);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_FRAME_RATE, kDefaultSrcFps);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_FRAME_COUNT, kDefaultSrcFrameCount);
+    AMediaFormat_setInt64(fmt, AMEDIAFORMAT_KEY_DURATION, kDefaultSrcDurationUs);
+    return fmt;
+}
+
+// Util for creating a default destination video format.
+static AMediaFormat* CreateDstFormat() {
+    AMediaFormat* fmt = AMediaFormat_new();
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, kDefaultDstWidth);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, kDefaultDstHeight);
+    AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, kDefaultDstMime.c_str());
+    return fmt;
+}
+
+class TranscodingLoggerTest : public ::testing::Test {
+public:
+    TranscodingLoggerTest() { ALOGI("TranscodingLoggerTest created"); }
+
+    void SetUp() override {
+        ALOGI("TranscodingLoggerTest set up");
+        mLogger.reset(new TranscodingLogger());
+        mLoggedAtoms.clear();
+        mSrcFormat.reset();
+        mDstFormat.reset();
+
+        // Set a custom atom writer that saves all data, so the test can validate it afterwards.
+        mLogger->setSessionEndedAtomWriter(
+                [=](int32_t atomCode, int32_t reason, int32_t callingUid, int32_t status,
+                    int32_t transcoderFps, int32_t srcWidth, int32_t srcHeight, char const* srcMime,
+                    int32_t srcProfile, int32_t srcLevel, int32_t srcFps, int32_t srcDurationMs,
+                    bool srcIsHdr, int32_t dstWidth, int32_t dstHeight, char const* dstMime,
+                    bool dstIsHdr) -> int {
+                    mLoggedAtoms.emplace_back(atomCode, reason, callingUid, status, transcoderFps,
+                                              srcWidth, srcHeight, srcMime, srcProfile, srcLevel,
+                                              srcFps, srcDurationMs, srcIsHdr, dstWidth, dstHeight,
+                                              dstMime, dstIsHdr);
+                    return 0;
+                });
+    }
+
+    void logSession(const std::chrono::steady_clock::time_point& time, Reason reason, int status,
+                    AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+        mLogger->logSessionEnded(time, reason, kDefaultCallingUid, status,
+                                 kDefaultTranscodeDuration, srcFormat, dstFormat);
+    }
+
+    void logSession(const std::chrono::steady_clock::time_point& time, Reason reason, int status) {
+        if (!mSrcFormat) {
+            mSrcFormat = std::shared_ptr<AMediaFormat>(CreateSrcFormat(), &AMediaFormat_delete);
+        }
+        if (!mDstFormat) {
+            mDstFormat = std::shared_ptr<AMediaFormat>(CreateDstFormat(), &AMediaFormat_delete);
+        }
+        logSession(time, reason, status, mSrcFormat.get(), mDstFormat.get());
+    }
+
+    void logSessionFinished(const std::chrono::steady_clock::time_point& time) {
+        logSession(time, Reason::FINISHED, 0);
+    }
+
+    void logSessionFailed(const std::chrono::steady_clock::time_point& time) {
+        logSession(time, Reason::ERROR, AMEDIA_ERROR_UNKNOWN);
+    }
+
+    int logCount() const { return mLoggedAtoms.size(); }
+
+    void validateLatestAtom(Reason reason, int status, bool passthrough = false) {
+        const SessionEndedAtom& atom = mLoggedAtoms.back();
+
+        EXPECT_EQ(atom.atomCode, android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED);
+        EXPECT_EQ(atom.reason, static_cast<int>(reason));
+        EXPECT_EQ(atom.callingUid, kDefaultCallingUid);
+        EXPECT_EQ(atom.status, status);
+        EXPECT_EQ(atom.srcWidth, kDefaultSrcWidth);
+        EXPECT_EQ(atom.srcHeight, kDefaultSrcHeight);
+        EXPECT_EQ(atom.srcMime, kDefaultSrcMime);
+        EXPECT_EQ(atom.srcProfile, kDefaultSrcProfile);
+        EXPECT_EQ(atom.srcLevel, kDefaultSrcLevel);
+        EXPECT_EQ(atom.srcFps, kDefaultSrcFps);
+        EXPECT_EQ(atom.srcDurationMs, kDefaultSrcDurationUs / 1000);
+        EXPECT_FALSE(atom.srcIsHdr);
+        EXPECT_EQ(atom.dstWidth, passthrough ? kDefaultSrcWidth : kDefaultDstWidth);
+        EXPECT_EQ(atom.dstHeight, passthrough ? kDefaultSrcHeight : kDefaultDstHeight);
+        EXPECT_EQ(atom.dstMime, passthrough ? "passthrough" : kDefaultDstMime);
+        EXPECT_FALSE(atom.dstIsHdr);
+
+        // Transcoder frame rate is only present on successful sessions.
+        if (status == AMEDIA_OK) {
+            std::chrono::duration<double> seconds{kDefaultTranscodeDuration};
+            const int32_t transcoderFps =
+                    static_cast<int32_t>(kDefaultSrcFrameCount / seconds.count());
+            EXPECT_EQ(atom.transcoderFps, transcoderFps);
+        } else {
+            EXPECT_EQ(atom.transcoderFps, -1);
+        }
+    }
+
+    void TearDown() override { ALOGI("TranscodingLoggerTest tear down"); }
+    ~TranscodingLoggerTest() { ALOGD("TranscodingLoggerTest destroyed"); }
+
+    std::shared_ptr<TranscodingLogger> mLogger;
+    std::vector<SessionEndedAtom> mLoggedAtoms;
+
+    std::shared_ptr<AMediaFormat> mSrcFormat;
+    std::shared_ptr<AMediaFormat> mDstFormat;
+};
+
+TEST_F(TranscodingLoggerTest, TestDailyLogQuota) {
+    ALOGD("TestDailyLogQuota");
+    auto start = std::chrono::steady_clock::now();
+
+    EXPECT_LT(TranscodingLogger::kMaxSuccessfulAtomsPerDay, TranscodingLogger::kMaxAtomsPerDay);
+
+    // 1. Check that the first kMaxSuccessfulAtomsPerDay successful atoms are logged.
+    for (int i = 0; i < TranscodingLogger::kMaxSuccessfulAtomsPerDay; ++i) {
+        logSessionFinished(start + std::chrono::seconds{i});
+        EXPECT_EQ(logCount(), i + 1);
+    }
+
+    // 2. Check that subsequent successful atoms within the same 24h interval are not logged.
+    for (int i = 1; i < 24; ++i) {
+        logSessionFinished(start + std::chrono::hours{i});
+        EXPECT_EQ(logCount(), TranscodingLogger::kMaxSuccessfulAtomsPerDay);
+    }
+
+    // 3. Check that failed atoms are logged up to kMaxAtomsPerDay.
+    for (int i = TranscodingLogger::kMaxSuccessfulAtomsPerDay;
+         i < TranscodingLogger::kMaxAtomsPerDay; ++i) {
+        logSessionFailed(start + std::chrono::seconds{i});
+        EXPECT_EQ(logCount(), i + 1);
+    }
+
+    // 4. Check that subsequent failed atoms within the same 24h interval are not logged.
+    for (int i = 1; i < 24; ++i) {
+        logSessionFailed(start + std::chrono::hours{i});
+        EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay);
+    }
+
+    // 5. Check that failed and successful atoms are logged again after 24h.
+    logSessionFinished(start + std::chrono::hours{24});
+    EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay + 1);
+
+    logSessionFailed(start + std::chrono::hours{24} + std::chrono::seconds{1});
+    EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay + 2);
+}
+
+TEST_F(TranscodingLoggerTest, TestNullFormats) {
+    ALOGD("TestNullFormats");
+    auto srcFormat = std::shared_ptr<AMediaFormat>(CreateSrcFormat(), &AMediaFormat_delete);
+    auto dstFormat = std::shared_ptr<AMediaFormat>(CreateDstFormat(), &AMediaFormat_delete);
+    auto now = std::chrono::steady_clock::now();
+
+    // Source format null, should not log.
+    logSession(now, Reason::FINISHED, AMEDIA_OK, nullptr /*srcFormat*/, dstFormat.get());
+    EXPECT_EQ(logCount(), 0);
+
+    // Both formats null, should not log.
+    logSession(now, Reason::FINISHED, AMEDIA_OK, nullptr /*srcFormat*/, nullptr /*dstFormat*/);
+    EXPECT_EQ(logCount(), 0);
+
+    // Destination format null (passthrough mode), should log.
+    logSession(now, Reason::FINISHED, AMEDIA_OK, srcFormat.get(), nullptr /*dstFormat*/);
+    EXPECT_EQ(logCount(), 1);
+    validateLatestAtom(Reason::FINISHED, AMEDIA_OK, true /*passthrough*/);
+}
+
+TEST_F(TranscodingLoggerTest, TestAtomContentCorrectness) {
+    ALOGD("TestAtomContentCorrectness");
+    auto now = std::chrono::steady_clock::now();
+
+    // Log and validate a failure.
+    logSession(now, Reason::ERROR, AMEDIA_ERROR_MALFORMED);
+    EXPECT_EQ(logCount(), 1);
+    validateLatestAtom(Reason::ERROR, AMEDIA_ERROR_MALFORMED);
+
+    // Log and validate a success.
+    logSession(now, Reason::FINISHED, AMEDIA_OK);
+    EXPECT_EQ(logCount(), 2);
+    validateLatestAtom(Reason::FINISHED, AMEDIA_OK);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
index 9a1c272..9e7fa95 100644
--- a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -118,46 +118,42 @@
 
 class TestTranscoder : public TranscoderInterface {
 public:
-    TestTranscoder() : mLastError(TranscodingErrorCode::kUnknown) {}
+    TestTranscoder() : mGeneration(0) {}
     virtual ~TestTranscoder() {}
 
     // TranscoderInterface
-    void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) override {}
-
     void start(ClientIdType clientId, SessionIdType sessionId,
-               const TranscodingRequestParcel& /*request*/,
+               const TranscodingRequestParcel& /*request*/, uid_t /*callingUid*/,
                const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
-        mEventQueue.push_back(Start(clientId, sessionId));
+        append(Start(clientId, sessionId));
     }
     void pause(ClientIdType clientId, SessionIdType sessionId) override {
-        mEventQueue.push_back(Pause(clientId, sessionId));
+        append(Pause(clientId, sessionId));
     }
     void resume(ClientIdType clientId, SessionIdType sessionId,
-                const TranscodingRequestParcel& /*request*/,
+                const TranscodingRequestParcel& /*request*/, uid_t /*callingUid*/,
                 const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
-        mEventQueue.push_back(Resume(clientId, sessionId));
+        append(Resume(clientId, sessionId));
     }
-    void stop(ClientIdType clientId, SessionIdType sessionId) override {
-        mEventQueue.push_back(Stop(clientId, sessionId));
+    void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon) override {
+        append(abandon ? Abandon(clientId, sessionId) : Stop(clientId, sessionId));
     }
 
     void onFinished(ClientIdType clientId, SessionIdType sessionId) {
-        mEventQueue.push_back(Finished(clientId, sessionId));
+        append(Finished(clientId, sessionId));
     }
 
     void onFailed(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) {
-        mLastError = err;
-        mEventQueue.push_back(Failed(clientId, sessionId));
+        append(Failed(clientId, sessionId), err);
     }
 
-    TranscodingErrorCode getLastError() {
-        TranscodingErrorCode result = mLastError;
-        mLastError = TranscodingErrorCode::kUnknown;
-        return result;
+    void onCreated() {
+        std::scoped_lock lock{mLock};
+        mGeneration++;
     }
 
     struct Event {
-        enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+        enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed, Abandon } type;
         ClientIdType clientId;
         SessionIdType sessionId;
     };
@@ -175,21 +171,62 @@
     DECLARE_EVENT(Stop);
     DECLARE_EVENT(Finished);
     DECLARE_EVENT(Failed);
+    DECLARE_EVENT(Abandon);
 
-    const Event& popEvent() {
+    // Push 1 event to back.
+    void append(const Event& event,
+                const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        // Error is sticky, non-error event will not erase it, only getLastError()
+        // clears last error.
+        if (err != TranscodingErrorCode::kNoError) {
+            mLastErrorQueue.push_back(err);
+        }
+        mCondition.notify_one();
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& popEvent(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
         if (mEventQueue.empty()) {
             mPoppedEvent = NoEvent;
         } else {
             mPoppedEvent = *mEventQueue.begin();
             mEventQueue.pop_front();
         }
+
         return mPoppedEvent;
     }
 
+    TranscodingErrorCode getLastError() {
+        std::scoped_lock lock{mLock};
+        if (mLastErrorQueue.empty()) {
+            return TranscodingErrorCode::kNoError;
+        }
+        TranscodingErrorCode err = mLastErrorQueue.front();
+        mLastErrorQueue.pop_front();
+        return err;
+    }
+
+    int32_t getGeneration() {
+        std::scoped_lock lock{mLock};
+        return mGeneration;
+    }
+
 private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
     Event mPoppedEvent;
     std::list<Event> mEventQueue;
-    TranscodingErrorCode mLastError;
+    std::list<TranscodingErrorCode> mLastErrorQueue;
+    int32_t mGeneration;
 };
 
 bool operator==(const TestTranscoder::Event& lhs, const TestTranscoder::Event& rhs) {
@@ -197,11 +234,14 @@
 }
 
 struct TestClientCallback : public BnTranscodingClientCallback {
-    TestClientCallback(TestTranscoder* owner, int64_t clientId)
-          : mOwner(owner), mClientId(clientId) {
+    TestClientCallback(TestTranscoder* owner, ClientIdType clientId, uid_t clientUid)
+          : mOwner(owner), mClientId(clientId), mClientUid(clientUid) {
         ALOGD("TestClient Created");
     }
 
+    ClientIdType clientId() const { return mClientId; }
+    uid_t clientUid() const { return mClientUid; }
+
     Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
                               ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
         return Status::ok();
@@ -240,7 +280,8 @@
 
 private:
     TestTranscoder* mOwner;
-    int64_t mClientId;
+    ClientIdType mClientId;
+    uid_t mClientUid;
     TestClientCallback(const TestClientCallback&) = delete;
     TestClientCallback& operator=(const TestClientCallback&) = delete;
 };
@@ -248,6 +289,7 @@
 class TranscodingSessionControllerTest : public ::testing::Test {
 public:
     TranscodingSessionControllerTest() { ALOGI("TranscodingSessionControllerTest created"); }
+    ~TranscodingSessionControllerTest() { ALOGD("TranscodingSessionControllerTest destroyed"); }
 
     void SetUp() override {
         ALOGI("TranscodingSessionControllerTest set up");
@@ -255,26 +297,98 @@
         mUidPolicy.reset(new TestUidPolicy());
         mResourcePolicy.reset(new TestResourcePolicy());
         mThermalPolicy.reset(new TestThermalPolicy());
-        mController.reset(new TranscodingSessionController(mTranscoder, mUidPolicy, mResourcePolicy,
-                                                           mThermalPolicy));
+        // Overrid default burst params with shorter values for testing.
+        TranscodingSessionController::ControllerConfig config = {
+                .pacerBurstThresholdMs = 500,
+                .pacerBurstCountQuota = 10,
+                .pacerBurstTimeQuotaSeconds = 3,
+        };
+        mController.reset(new TranscodingSessionController(
+                [this](const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) {
+                    // Here we require that the SessionController clears out all its refcounts of
+                    // the transcoder object when it calls create.
+                    EXPECT_EQ(mTranscoder.use_count(), 1);
+                    mTranscoder->onCreated();
+                    return mTranscoder;
+                },
+                mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
         mUidPolicy->setCallback(mController);
 
         // Set priority only, ignore other fields for now.
         mOfflineRequest.priority = TranscodingSessionPriority::kUnspecified;
         mRealtimeRequest.priority = TranscodingSessionPriority::kHigh;
-        mClientCallback0 =
-                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(0));
-        mClientCallback1 =
-                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(1));
-        mClientCallback2 =
-                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(2));
-        mClientCallback3 =
-                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(3));
+        mClientCallback0 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(0), UID(0));
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(1), UID(1));
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(2), UID(2));
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(3), UID(3));
     }
 
     void TearDown() override { ALOGI("TranscodingSessionControllerTest tear down"); }
 
-    ~TranscodingSessionControllerTest() { ALOGD("TranscodingSessionControllerTest destroyed"); }
+    void expectTimeout(int64_t clientId, int32_t sessionId, int32_t generation) {
+        EXPECT_EQ(mTranscoder->popEvent(2900000), TestTranscoder::NoEvent);
+        EXPECT_EQ(mTranscoder->popEvent(200000), TestTranscoder::Abandon(clientId, sessionId));
+        EXPECT_EQ(mTranscoder->popEvent(100000), TestTranscoder::Failed(clientId, sessionId));
+        EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kWatchdogTimeout);
+        // Should have created new transcoder.
+        EXPECT_EQ(mTranscoder->getGeneration(), generation);
+        EXPECT_EQ(mTranscoder.use_count(), 2);
+    }
+
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
+                         bool pauseLastSuccessSession = false) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        pauseLastSuccessSession);
+    }
+
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
+                         const std::shared_ptr<TestClientCallback>& client,
+                         const std::vector<int>& additionalClientUids,
+                         bool pauseLastSuccessSession) {
+        for (int i = 0; i < numSubmits; i++) {
+            mController->submit(client->clientId(), SESSION(i), client->clientUid(),
+                                client->clientUid(), mRealtimeRequest, client);
+            for (int additionalUid : additionalClientUids) {
+                mController->addClientUid(client->clientId(), SESSION(i), additionalUid);
+            }
+        }
+        for (int i = 0; i < expectedSuccess; i++) {
+            EXPECT_EQ(mTranscoder->popEvent(),
+                      TestTranscoder::Start(client->clientId(), SESSION(i)));
+            if ((i == expectedSuccess - 1) && pauseLastSuccessSession) {
+                // Insert a pause of 3 sec to the last success running session
+                mController->onThrottlingStarted();
+                EXPECT_EQ(mTranscoder->popEvent(),
+                          TestTranscoder::Pause(client->clientId(), SESSION(i)));
+                sleep(3);
+                mController->onThrottlingStopped();
+                EXPECT_EQ(mTranscoder->popEvent(),
+                          TestTranscoder::Resume(client->clientId(), SESSION(i)));
+            }
+            usleep(sessionDurationMs * 1000);
+            // Test half of Finish and half of Error, both should be counted as burst runs.
+            if (i & 1) {
+                mController->onFinish(client->clientId(), SESSION(i));
+                EXPECT_EQ(mTranscoder->popEvent(),
+                          TestTranscoder::Finished(client->clientId(), SESSION(i)));
+            } else {
+                mController->onError(client->clientId(), SESSION(i),
+                                     TranscodingErrorCode::kUnknown);
+                EXPECT_EQ(mTranscoder->popEvent(100000),
+                          TestTranscoder::Failed(client->clientId(), SESSION(i)));
+                EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+            }
+        }
+        for (int i = expectedSuccess; i < numSubmits; i++) {
+            EXPECT_EQ(mTranscoder->popEvent(),
+                      TestTranscoder::Failed(client->clientId(), SESSION(i)));
+            EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kDroppedByService);
+        }
+    }
 
     std::shared_ptr<TestTranscoder> mTranscoder;
     std::shared_ptr<TestUidPolicy> mUidPolicy;
@@ -297,32 +411,32 @@
 
     // Submit offline session to CLIENT(0) in UID(0).
     // Should start immediately (because this is the only session).
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
 
     // Submit real-time session to CLIENT(0).
     // Should pause offline session and start new session,  even if UID(0) is not on top.
-    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
 
     // Submit real-time session to CLIENT(0), should be queued after the previous session.
-    mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Submit real-time session to CLIENT(1) in same uid, should be queued after the previous
     // session.
-    mController->submit(CLIENT(1), SESSION(0), UID(0), mRealtimeRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mRealtimeRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Submit real-time session to CLIENT(2) in UID(1).
     // Should pause previous session and start new session, because UID(1) is (has been) top.
-    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
 
     // Submit offline session, shouldn't generate any event.
-    mController->submit(CLIENT(2), SESSION(1), UID(1), mOfflineRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(1), UID(2), UID(1), mOfflineRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Bring UID(0) to top.
@@ -336,15 +450,15 @@
     ALOGD("TestCancelSession");
 
     // Submit real-time session SESSION(0), should start immediately.
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit real-time session SESSION(1), should not start.
-    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Submit offline session SESSION(2), should not start.
-    mController->submit(CLIENT(0), SESSION(2), UID(0), mOfflineRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Cancel queued real-time session.
@@ -356,7 +470,7 @@
     EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
 
     // Submit offline session SESSION(3), shouldn't cause any event.
-    mController->submit(CLIENT(0), SESSION(3), UID(0), mOfflineRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(3), UID(0), UID(0), mOfflineRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Cancel running real-time session SESSION(0).
@@ -368,7 +482,7 @@
 
     // Submit real-time session SESSION(4), offline SESSION(3) should pause and SESSION(4)
     // should start.
-    mController->submit(CLIENT(0), SESSION(4), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(4), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
 
@@ -377,6 +491,83 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(3)));
 }
 
+TEST_F(TranscodingSessionControllerTest, TestCancelSessionWithMultipleUids) {
+    ALOGD("TestCancelSessionWithMultipleUids");
+    std::vector<int32_t> clientUids;
+
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+
+    // Add UID(1) to the offline SESSION(2), SESSION(2) should start and SESSION(0) should pause.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+    // Add UID(1) to SESSION(1) as well.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Cancel SESSION(2), should be cancelled and SESSION(1) should start.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Cancel SESSION(1), should be cancelled and SESSION(0) should resume.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(1)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestCancelAllSessionsForClient) {
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    std::vector<int32_t> clientUids;
+    // Make some more uids blocked on the sessions.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_EQ(clientUids.size(), 2);
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(clientUids.size(), 2);
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+    EXPECT_EQ(clientUids.size(), 1);
+
+    // Cancel all sessions for CLIENT(0) with -1.
+    // Expect SESSION(0) and SESSION(1) to be gone.
+    // Expect SESSION(2) still there with empty client uid list (only kept for offline) and start.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), -1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+    EXPECT_EQ(clientUids.size(), 0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+}
+
 TEST_F(TranscodingSessionControllerTest, TestFinishSession) {
     ALOGD("TestFinishSession");
 
@@ -386,16 +577,16 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Submit offline session SESSION(0), should start immediately.
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit real-time session SESSION(1), should pause offline session and start immediately.
-    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
 
     // Submit real-time session SESSION(2), should not start.
-    mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Finish when the session never started, should be ignored.
@@ -406,7 +597,7 @@
     mUidPolicy->setTop(UID(1));
     // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
     // new session.
-    mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mRealtimeRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
 
@@ -434,6 +625,45 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 }
 
+TEST_F(TranscodingSessionControllerTest, TestFinishSessionWithMultipleUids) {
+    ALOGD("TestFinishSessionWithMultipleUids");
+    std::vector<int32_t> clientUids;
+
+    // Start with unspecified top uid.
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(2)));
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // SESSION(0) should pause, SESSION(1) should start.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Finish SESSION(1), SESSION(2) (next in line for UID(1)) should start.
+    mController->onFinish(CLIENT(0), SESSION(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+
+    // Finish SESSION(2), SESSION(0) should resume.
+    mController->onFinish(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+}
+
 TEST_F(TranscodingSessionControllerTest, TestFailSession) {
     ALOGD("TestFailSession");
 
@@ -443,16 +673,16 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Submit offline session SESSION(0), should start immediately.
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit real-time session SESSION(1), should pause offline session and start immediately.
-    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
 
     // Submit real-time session SESSION(2), should not start.
-    mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Fail when the session never started, should be ignored.
@@ -463,7 +693,7 @@
     mUidPolicy->setTop(UID(1));
     // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
     // new session.
-    mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mRealtimeRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
 
@@ -471,15 +701,18 @@
     // Should still be propagated to client, but shouldn't trigger any new start.
     mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
 
     // Fail running real-time session, should start next real-time session in queue.
     mController->onError(CLIENT(1), SESSION(0), TranscodingErrorCode::kUnknown);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
 
     // Fail running real-time session, should resume next session (offline session) in queue.
     mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
 
     // Fail running offline session, and test error code propagation.
@@ -492,16 +725,59 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 }
 
+TEST_F(TranscodingSessionControllerTest, TestFailSessionWithMultipleUids) {
+    ALOGD("TestFailSessionWithMultipleUids");
+    std::vector<int32_t> clientUids;
+
+    // Start with unspecified top uid.
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // SESSION(0) should pause, SESSION(1) should start.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Add UID(1) and UID(2) to SESSION(2).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(2)));
+
+    // Fail SESSION(1), SESSION(2) (next in line for UID(1)) should start.
+    mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+
+    // Fail SESSION(2), SESSION(0) should resume.
+    mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+}
+
 TEST_F(TranscodingSessionControllerTest, TestTopUidChanged) {
     ALOGD("TestTopUidChanged");
 
     // Start with unspecified top UID.
     // Submit real-time session to CLIENT(0), session should start immediately.
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit offline session to CLIENT(0), should not start.
-    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Move UID(1) to top.
@@ -510,7 +786,7 @@
 
     // Submit real-time session to CLIENT(2) in different uid UID(1).
     // Should pause previous session and start new session.
-    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
 
@@ -534,16 +810,67 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
 }
 
-TEST_F(TranscodingSessionControllerTest, TestTopUidSetChanged) {
-    ALOGD("TestTopUidChanged_MultipleUids");
+TEST_F(TranscodingSessionControllerTest, TestTopUidChangedMultipleUids) {
+    ALOGD("TestTopUidChangedMultipleUids");
 
     // Start with unspecified top UID.
     // Submit real-time session to CLIENT(0), session should start immediately.
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit offline session to CLIENT(0), should not start.
-    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Add UID(1) to SESSION(0), SESSION(0) should continue to run
+    // (no pause&resume of the same session).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) back to top, SESSION(0) should continue to run
+    // (no pause&resume of the same session).
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(2) to top.
+    mUidPolicy->setTop(UID(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Add UID(2) to the offline session, it should be started.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(1), SESSION(0), UID(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+    // ADD UID(3) to SESSION(0).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Bring UID(3) to top, SESSION(0) should resume.
+    mUidPolicy->setTop(UID(3));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Now make UID(2) also blocked on CLIENT(0), SESSION(0).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(2)));
+
+    // Bring UID(2) back to top, CLIENT(0), SESSION(0) should continue to run (even if it's
+    // added to UID(2)'s queue later than CLIENT(1)'s SESSION(0)).
+    mUidPolicy->setTop(UID(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidSetChanged) {
+    ALOGD("TestTopUidSetChanged");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Set UID(0), UID(1) to top set.
@@ -553,7 +880,7 @@
 
     // Submit real-time session to CLIENT(2) in different uid UID(1).
     // UID(0) should pause and UID(1) should start.
-    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
 
@@ -588,6 +915,100 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
 }
 
+TEST_F(TranscodingSessionControllerTest, TestUidGone) {
+    ALOGD("TestUidGone");
+
+    mUidPolicy->setTop(UID(0));
+    // Start with unspecified top UID.
+    // Submit real-time sessions to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Submit real-time session to CLIENT(1), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(1), SESSION(0), UID(1)));
+
+    // Tell the controller that UID(0) is gone.
+    mUidPolicy->setTop(UID(1));
+    // CLIENT(0)'s SESSION(1) should start, SESSION(0) should be cancelled.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+    mController->onUidGone(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    std::vector<int32_t> clientUids;
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(clientUids.size(), 1);
+    EXPECT_EQ(clientUids[0], UID(1));
+
+    // Tell the controller that UID(1) is gone too.
+    mController->onUidGone(UID(1));
+    // CLIENT(1)'s SESSION(0) should start, CLIENT(0)'s SESSION(1) should be cancelled.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+    // CLIENT(1) SESSION(0) should not have any client uids as it's only kept for offline.
+    EXPECT_TRUE(mController->getClientUids(CLIENT(1), SESSION(0), &clientUids));
+    EXPECT_EQ(clientUids.size(), 0);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestAddGetClientUids) {
+    ALOGD("TestAddGetClientUids");
+
+    // Add/get client uids with non-existent session, should fail.
+    std::vector<int32_t> clientUids;
+    uid_t ownUid = ::getuid();
+    EXPECT_FALSE(mController->addClientUid(CLIENT(0), SESSION(0), ownUid));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+
+    // Submit a real-time request.
+    EXPECT_TRUE(mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest,
+                                    mClientCallback0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Should have own uid in client uids.
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_EQ(clientUids.size(), 1);
+    EXPECT_EQ(clientUids[0], UID(0));
+
+    // Add UID(0) again should fail.
+    EXPECT_FALSE(mController->addClientUid(CLIENT(0), SESSION(0), UID(0)));
+
+    // Add own uid should succeed.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), ownUid));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    std::unordered_set<uid_t> uidSet;
+    uidSet.insert(clientUids.begin(), clientUids.end());
+    EXPECT_EQ(uidSet.size(), 2);
+    EXPECT_EQ(uidSet.count(UID(0)), 1);
+    EXPECT_EQ(uidSet.count(ownUid), 1);
+
+    // Submit an offline request.
+    EXPECT_TRUE(mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mOfflineRequest,
+                                    mClientCallback0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Should not have own uid in client uids.
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(clientUids.size(), 0);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    // Add UID(1) to offline session, offline session should start and SESSION(0) should pause.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+}
+
 /* Test resource lost without thermal throttling */
 TEST_F(TranscodingSessionControllerTest, TestResourceLost) {
     ALOGD("TestResourceLost");
@@ -595,12 +1016,12 @@
     // Start with unspecified top UID.
     // Submit real-time session to CLIENT(0), session should start immediately.
     mRealtimeRequest.clientPid = PID(0);
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit offline session to CLIENT(0), should not start.
     mOfflineRequest.clientPid = PID(0);
-    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Move UID(1) to top.
@@ -610,7 +1031,7 @@
     // Submit real-time session to CLIENT(2) in different uid UID(1).
     // Should pause previous session and start new session.
     mRealtimeRequest.clientPid = PID(1);
-    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
 
@@ -667,7 +1088,7 @@
 
     // Submit real-time session to CLIENT(3) in UID(2), session shouldn't start due to no resource.
     mRealtimeRequest.clientPid = PID(2);
-    mController->submit(CLIENT(3), SESSION(0), UID(2), mRealtimeRequest, mClientCallback3);
+    mController->submit(CLIENT(3), SESSION(0), UID(3), UID(2), mRealtimeRequest, mClientCallback3);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Signal resource available, CLIENT(3)'s session should start.
@@ -682,12 +1103,12 @@
     // Start with unspecified top UID.
     // Submit real-time session to CLIENT(0), session should start immediately.
     mRealtimeRequest.clientPid = PID(0);
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit offline session to CLIENT(0), should not start.
     mOfflineRequest.clientPid = PID(0);
-    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Move UID(1) to top.
@@ -697,7 +1118,7 @@
     // Submit real-time session to CLIENT(2) in different uid UID(1).
     // Should pause previous session and start new session.
     mRealtimeRequest.clientPid = PID(1);
-    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
 
@@ -736,7 +1157,7 @@
     mUidPolicy->setTop(UID(2));
     // Submit real-time session to CLIENT(3) in UID(2), session shouldn't start during throttling.
     mRealtimeRequest.clientPid = PID(2);
-    mController->submit(CLIENT(3), SESSION(0), UID(2), mRealtimeRequest, mClientCallback3);
+    mController->submit(CLIENT(3), SESSION(0), UID(3), UID(2), mRealtimeRequest, mClientCallback3);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
     // Throttling stops, CLIENT(3)'s session should start.
     mController->onThrottlingStopped();
@@ -750,12 +1171,12 @@
     // Start with unspecified top UID.
     // Submit real-time session to CLIENT(0), session should start immediately.
     mRealtimeRequest.clientPid = PID(0);
-    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
 
     // Submit offline session to CLIENT(0), should not start.
     mOfflineRequest.clientPid = PID(0);
-    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
 
     // Move UID(1) to top.
@@ -765,7 +1186,7 @@
     // Submit real-time session to CLIENT(2) in different uid UID(1).
     // Should pause previous session and start new session.
     mRealtimeRequest.clientPid = PID(1);
-    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
 
@@ -802,4 +1223,99 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
 }
 
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogNoHeartbeat) {
+    ALOGD("TestTranscoderWatchdogTimeout");
+
+    // Submit session to CLIENT(0) in UID(0).
+    // Should start immediately (because this is the only session).
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Test 1: If not sending keep-alive at all, timeout after 3 seconds.
+    expectTimeout(CLIENT(0), SESSION(0), 2);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogHeartbeat) {
+    // Test 2: No timeout as long as keep-alive coming; timeout after keep-alive stops.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    for (int i = 0; i < 5; i++) {
+        EXPECT_EQ(mTranscoder->popEvent(1000000), TestTranscoder::NoEvent);
+        mController->onHeartBeat(CLIENT(0), SESSION(1));
+    }
+    expectTimeout(CLIENT(0), SESSION(1), 2);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogDuringPause) {
+    int expectedGen = 2;
+
+    // Test 3a: No timeout for paused session even if no keep-alive is sent.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+    // Trigger a pause by sending a resource lost.
+    mController->onResourceLost(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(3100000), TestTranscoder::NoEvent);
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(100000), TestTranscoder::Resume(CLIENT(0), SESSION(2)));
+    expectTimeout(CLIENT(0), SESSION(2), expectedGen++);
+
+    // Test 3b: No timeout for paused session even if no keep-alive is sent.
+    mController->submit(CLIENT(0), SESSION(3), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(3)));
+    // Let the session run almost to timeout, to test timeout reset after pause.
+    EXPECT_EQ(mTranscoder->popEvent(2900000), TestTranscoder::NoEvent);
+    // Trigger a pause by submitting a higher-priority request.
+    mController->submit(CLIENT(0), SESSION(4), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
+    // Finish the higher-priority session, lower-priority session should resume,
+    // and the timeout should reset to full value.
+    mController->onFinish(CLIENT(0), SESSION(4));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(4)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(3)));
+    expectTimeout(CLIENT(0), SESSION(3), expectedGen++);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverCountOnly) {
+    ALOGD("TestTranscoderPacerOverCountOnly");
+    testPacerHelper(12 /*numSubmits*/, 100 /*sessionDurationMs*/, 12 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverTimeOnly) {
+    ALOGD("TestTranscoderPacerOverTimeOnly");
+    testPacerHelper(5 /*numSubmits*/, 1000 /*sessionDurationMs*/, 5 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverQuota) {
+    ALOGD("TestTranscoderPacerOverQuota");
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerWithPause) {
+    ALOGD("TestTranscoderPacerDuringPause");
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
+                    true /*pauseLastSuccessSession*/);
+}
+
+/*
+ * Test the case where multiple client uids request the same session. Session should only
+ * be dropped when all clients are over quota.
+ */
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerMultipleUids) {
+    ALOGD("TestTranscoderPacerMultipleUids");
+    // First, run mClientCallback0 to the point of no quota.
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
+                    mClientCallback0, {}, false /*pauseLastSuccessSession*/);
+    // Make UID(0) block on Client1's sessions too, Client1's quota should not be affected.
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
+                    mClientCallback1, {UID(0)}, false /*pauseLastSuccessSession*/);
+    // Make UID(10) block on Client2's sessions. We expect to see 11 succeeds (instead of 10),
+    // because the addClientUid() is called after the submit, and first session is already
+    // started by the time UID(10) is added. UID(10) allowed us to run the 11th session,
+    // after that both UID(10) and UID(2) are out of quota.
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 11 /*expectedSuccess*/,
+                    mClientCallback2, {UID(10)}, false /*pauseLastSuccessSession*/);
+}
+
 }  // namespace android
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
new file mode 100644
index 0000000..17150d4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
index 5db9258..3cbf1dd 100644
--- a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -32,3 +32,7 @@
 echo "testing TranscodingSessionController"
 #adb shell /data/nativetest64/TranscodingSessionController_tests/TranscodingSessionController_tests
 adb shell /data/nativetest/TranscodingSessionController_tests/TranscodingSessionController_tests
+
+echo "testing TranscodingLogger"
+#adb shell /data/nativetest64/TranscodingLogger_tests/TranscodingLogger_tests
+adb shell /data/nativetest/TranscodingLogger_tests/TranscodingLogger_tests
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
index bebe6b1..ccb2ec2 100644
--- a/media/libmediatranscoding/transcoder/Android.bp
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "mediatranscoder_defaults",
 
@@ -72,4 +81,3 @@
     name: "libmediatranscoder",
     defaults: ["mediatranscoder_defaults"],
 }
-
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
index 389b941..10b2e80 100644
--- a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -83,12 +83,14 @@
     }
 }
 
-bool MediaSampleWriter::init(int fd, const std::weak_ptr<CallbackInterface>& callbacks) {
-    return init(DefaultMuxer::create(fd), callbacks);
+bool MediaSampleWriter::init(int fd, const std::weak_ptr<CallbackInterface>& callbacks,
+                             int64_t heartBeatIntervalUs) {
+    return init(DefaultMuxer::create(fd), callbacks, heartBeatIntervalUs);
 }
 
 bool MediaSampleWriter::init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer,
-                             const std::weak_ptr<CallbackInterface>& callbacks) {
+                             const std::weak_ptr<CallbackInterface>& callbacks,
+                             int64_t heartBeatIntervalUs) {
     if (callbacks.lock() == nullptr) {
         LOG(ERROR) << "Callback object cannot be null";
         return false;
@@ -106,6 +108,7 @@
     mState = INITIALIZED;
     mMuxer = muxer;
     mCallbacks = callbacks;
+    mHeartBeatIntervalUs = heartBeatIntervalUs;
     return true;
 }
 
@@ -219,6 +222,7 @@
 media_status_t MediaSampleWriter::runWriterLoop(bool* wasStopped) NO_THREAD_SAFETY_ANALYSIS {
     AMediaCodecBufferInfo bufferInfo;
     int32_t lastProgressUpdate = 0;
+    bool progressSinceLastReport = false;
     int trackEosCount = 0;
 
     // Set the "primary" track that will be used to determine progress to the track with longest
@@ -232,6 +236,10 @@
         }
     }
 
+    std::chrono::microseconds updateInterval(mHeartBeatIntervalUs);
+    std::chrono::steady_clock::time_point nextUpdateTime =
+            std::chrono::steady_clock::now() + updateInterval;
+
     while (true) {
         if (trackEosCount >= mTracks.size()) {
             break;
@@ -242,7 +250,21 @@
         {
             std::unique_lock lock(mMutex);
             while (mSampleQueue.empty() && mState == STARTED) {
-                mSampleSignal.wait(lock);
+                if (mHeartBeatIntervalUs <= 0) {
+                    mSampleSignal.wait(lock);
+                    continue;
+                }
+
+                if (mSampleSignal.wait_until(lock, nextUpdateTime) == std::cv_status::timeout) {
+                    // Send heart-beat if there is any progress since last update time.
+                    if (progressSinceLastReport) {
+                        if (auto callbacks = mCallbacks.lock()) {
+                            callbacks->onHeartBeat(this);
+                        }
+                        progressSinceLastReport = false;
+                    }
+                    nextUpdateTime += updateInterval;
+                }
             }
 
             if (mState == STOPPED) {
@@ -307,6 +329,7 @@
                 lastProgressUpdate = progress;
             }
         }
+        progressSinceLastReport = true;
     }
 
     return AMEDIA_OK;
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
index d58d88d..879241e 100644
--- a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -29,24 +29,37 @@
 
 namespace android {
 
-static AMediaFormat* mergeMediaFormats(AMediaFormat* base, AMediaFormat* overlay) {
-    if (base == nullptr || overlay == nullptr) {
+static std::shared_ptr<AMediaFormat> createVideoTrackFormat(AMediaFormat* srcFormat,
+                                                            AMediaFormat* options) {
+    if (srcFormat == nullptr || options == nullptr) {
         LOG(ERROR) << "Cannot merge null formats";
         return nullptr;
     }
 
-    AMediaFormat* format = AMediaFormat_new();
-    if (AMediaFormat_copy(format, base) != AMEDIA_OK) {
-        AMediaFormat_delete(format);
-        return nullptr;
+    // ------- Define parameters to copy from the source track format -------
+    std::vector<AMediaFormatUtils::EntryCopier> srcParamsToCopy{
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_MIME, String),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_DURATION, Int64),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_WIDTH, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_HEIGHT, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_FRAME_RATE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+    };
+
+    // If the destination codec is the same as the source codec, we can preserve profile and level
+    // from the source track as default values. Otherwise leave them unspecified.
+    const char *srcMime, *dstMime;
+    AMediaFormat_getString(srcFormat, AMEDIAFORMAT_KEY_MIME, &srcMime);
+    if (!AMediaFormat_getString(options, AMEDIAFORMAT_KEY_MIME, &dstMime) ||
+        strcmp(srcMime, dstMime) == 0) {
+        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, String));
+        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, String));
     }
 
-    // Note: AMediaFormat does not expose a function for appending values from another format or for
-    // iterating over all values and keys in a format. Instead we define a static list of known keys
-    // along with their value types and copy the ones that are present. A better solution would be
-    // to either implement required functions in NDK or to parse the overlay format's string
-    // representation and copy all existing keys.
-    static const AMediaFormatUtils::EntryCopier kSupportedFormatEntries[] = {
+    // ------- Define parameters to copy from the caller's options -------
+    static const std::vector<AMediaFormatUtils::EntryCopier> kSupportedOptions{
             ENTRY_COPIER(AMEDIAFORMAT_KEY_MIME, String),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_DURATION, Int64),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_WIDTH, Int32),
@@ -54,7 +67,6 @@
             ENTRY_COPIER(AMEDIAFORMAT_KEY_BIT_RATE, Int32),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, Int32),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, Int32),
-            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_FORMAT, Int32),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
@@ -63,10 +75,12 @@
             ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
             ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
     };
-    const size_t entryCount = sizeof(kSupportedFormatEntries) / sizeof(kSupportedFormatEntries[0]);
 
-    AMediaFormatUtils::CopyFormatEntries(overlay, format, kSupportedFormatEntries, entryCount);
-    return format;
+    // ------- Copy parameters from source and options to the destination -------
+    auto trackFormat = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    AMediaFormatUtils::CopyFormatEntries(srcFormat, trackFormat.get(), srcParamsToCopy);
+    AMediaFormatUtils::CopyFormatEntries(options, trackFormat.get(), kSupportedOptions);
+    return trackFormat;
 }
 
 void MediaTranscoder::onThreadFinished(const void* thread, media_status_t threadStatus,
@@ -144,6 +158,11 @@
         return;
     }
 
+    // The sample writer is not yet started so notify the caller that progress is still made.
+    if (mHeartBeatIntervalUs > 0) {
+        mCallbacks->onHeartBeat(this);
+    }
+
     MediaTrackTranscoder* mutableTranscoder = const_cast<MediaTrackTranscoder*>(transcoder);
     mutableTranscoder->setSampleConsumer(consumer);
 
@@ -206,13 +225,18 @@
     mCallbacks->onProgressUpdate(this, progress);
 }
 
-MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks, pid_t pid,
-                                 uid_t uid)
-      : mCallbacks(callbacks), mPid(pid), mUid(uid) {}
+void MediaTranscoder::onHeartBeat(const MediaSampleWriter* writer __unused) {
+    // Signal heart-beat to the client.
+    mCallbacks->onHeartBeat(this);
+}
+
+MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks,
+                                 int64_t heartBeatIntervalUs, pid_t pid, uid_t uid)
+      : mCallbacks(callbacks), mHeartBeatIntervalUs(heartBeatIntervalUs), mPid(pid), mUid(uid) {}
 
 std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
-        const std::shared_ptr<CallbackInterface>& callbacks, pid_t pid, uid_t uid,
-        const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+        const std::shared_ptr<CallbackInterface>& callbacks, int64_t heartBeatIntervalUs, pid_t pid,
+        uid_t uid, const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
     if (pausedState != nullptr) {
         LOG(INFO) << "Initializing from paused state.";
     }
@@ -221,7 +245,8 @@
         return nullptr;
     }
 
-    return std::shared_ptr<MediaTranscoder>(new MediaTranscoder(callbacks, pid, uid));
+    return std::shared_ptr<MediaTranscoder>(
+            new MediaTranscoder(callbacks, heartBeatIntervalUs, pid, uid));
 }
 
 media_status_t MediaTranscoder::configureSource(int fd) {
@@ -264,7 +289,8 @@
     return trackFormats;
 }
 
-media_status_t MediaTranscoder::configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat) {
+media_status_t MediaTranscoder::configureTrackFormat(size_t trackIndex,
+                                                     AMediaFormat* destinationOptions) {
     if (mSampleReader == nullptr) {
         LOG(ERROR) << "Source must be configured before tracks";
         return AMEDIA_ERROR_INVALID_OPERATION;
@@ -275,14 +301,15 @@
     }
 
     std::shared_ptr<MediaTrackTranscoder> transcoder;
-    std::shared_ptr<AMediaFormat> format;
+    std::shared_ptr<AMediaFormat> trackFormat;
 
-    if (trackFormat == nullptr) {
+    if (destinationOptions == nullptr) {
         transcoder = std::make_shared<PassthroughTrackTranscoder>(shared_from_this());
     } else {
+        AMediaFormat* srcTrackFormat = mSourceTrackFormats[trackIndex].get();
+
         const char* srcMime = nullptr;
-        if (!AMediaFormat_getString(mSourceTrackFormats[trackIndex].get(), AMEDIAFORMAT_KEY_MIME,
-                                    &srcMime)) {
+        if (!AMediaFormat_getString(srcTrackFormat, AMEDIAFORMAT_KEY_MIME, &srcMime)) {
             LOG(ERROR) << "Source track #" << trackIndex << " has no mime type";
             return AMEDIA_ERROR_MALFORMED;
         }
@@ -295,7 +322,7 @@
         }
 
         const char* dstMime = nullptr;
-        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &dstMime)) {
+        if (AMediaFormat_getString(destinationOptions, AMEDIAFORMAT_KEY_MIME, &dstMime)) {
             if (strncmp(dstMime, "video/", 6) != 0) {
                 LOG(ERROR) << "Unable to convert media types for track #" << trackIndex << ", from "
                            << srcMime << " to " << dstMime;
@@ -305,14 +332,11 @@
 
         transcoder = VideoTrackTranscoder::create(shared_from_this(), mPid, mUid);
 
-        AMediaFormat* mergedFormat =
-                mergeMediaFormats(mSourceTrackFormats[trackIndex].get(), trackFormat);
-        if (mergedFormat == nullptr) {
-            LOG(ERROR) << "Unable to merge source and destination formats";
+        trackFormat = createVideoTrackFormat(srcTrackFormat, destinationOptions);
+        if (trackFormat == nullptr) {
+            LOG(ERROR) << "Unable to create video track format";
             return AMEDIA_ERROR_UNKNOWN;
         }
-
-        format = std::shared_ptr<AMediaFormat>(mergedFormat, &AMediaFormat_delete);
     }
 
     media_status_t status = mSampleReader->selectTrack(trackIndex);
@@ -321,7 +345,7 @@
         return status;
     }
 
-    status = transcoder->configure(mSampleReader, trackIndex, format);
+    status = transcoder->configure(mSampleReader, trackIndex, trackFormat);
     if (status != AMEDIA_OK) {
         LOG(ERROR) << "Configure track transcoder for track #" << trackIndex << " returned error "
                    << status;
@@ -348,7 +372,7 @@
     }
 
     mSampleWriter = MediaSampleWriter::Create();
-    const bool initOk = mSampleWriter->init(fd, shared_from_this());
+    const bool initOk = mSampleWriter->init(fd, shared_from_this(), mHeartBeatIntervalUs);
 
     if (!initOk) {
         LOG(ERROR) << "Unable to initialize sample writer with destination fd: " << fd;
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
index f5c9594..2d85df7 100644
--- a/media/libmediatranscoding/transcoder/NdkCommon.cpp
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -60,19 +60,19 @@
 DEFINE_FORMAT_VALUE_COPY_FUNC(int32_t, Int32);
 DEFINE_FORMAT_VALUE_COPY_FUNC(float, Float);
 
-void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to, const EntryCopier* entries,
-                       size_t entryCount) {
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to,
+                       const std::vector<EntryCopier>& entries) {
     if (from == nullptr || to == nullptr) {
         LOG(ERROR) << "Cannot copy null formats";
         return;
-    } else if (entries == nullptr || entryCount < 1) {
+    } else if (entries.empty()) {
         LOG(WARNING) << "No entries to copy";
         return;
     }
 
-    for (size_t i = 0; i < entryCount; ++i) {
-        if (!entries[i].copy(entries[i].key, from, to) && entries[i].copy2 != nullptr) {
-            entries[i].copy2(entries[i].key, from, to);
+    for (auto& entry : entries) {
+        if (!entry.copy(entry.key, from, to) && entry.copy2 != nullptr) {
+            entry.copy2(entry.key, from, to);
         }
     }
 }
@@ -90,4 +90,29 @@
 DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(float, Float);
 DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(int32_t, Int32);
 
-}  // namespace AMediaFormatUtils
\ No newline at end of file
+// Determines whether a track format describes HDR video content or not. The
+// logic is based on isHdr() in libstagefright/Utils.cpp.
+bool VideoIsHdr(AMediaFormat* format) {
+    // If VUI signals HDR content, this internal flag is set by the extractor.
+    int32_t isHdr;
+    if (AMediaFormat_getInt32(format, "android._is-hdr", &isHdr)) {
+        return isHdr;
+    }
+
+    // If container supplied HDR static info without transfer set, assume HDR.
+    const char* hdrInfo;
+    int32_t transfer;
+    if ((AMediaFormat_getString(format, AMEDIAFORMAT_KEY_HDR_STATIC_INFO, &hdrInfo) ||
+         AMediaFormat_getString(format, "hdr10-plus-info", &hdrInfo)) &&
+        !AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
+        return true;
+    }
+
+    // Otherwise, check if an HDR transfer function is set.
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
+        return transfer == COLOR_TRANSFER_ST2084 || transfer == COLOR_TRANSFER_HLG;
+    }
+
+    return false;
+}
+}  // namespace AMediaFormatUtils
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 04a513e..d56bec0 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -51,32 +51,6 @@
 // Default frame rate.
 static constexpr int32_t kDefaultFrameRate = 30;
 
-// Determines whether a track format describes HDR video content or not. The
-// logic is based on isHdr() in libstagefright/Utils.cpp.
-static bool isHdr(AMediaFormat* format) {
-    // If VUI signals HDR content, this internal flag is set by the extractor.
-    int32_t isHdr;
-    if (AMediaFormat_getInt32(format, "android._is-hdr", &isHdr)) {
-        return isHdr;
-    }
-
-    // If container supplied HDR static info without transfer set, assume HDR.
-    const char* hdrInfo;
-    int32_t transfer;
-    if ((AMediaFormat_getString(format, AMEDIAFORMAT_KEY_HDR_STATIC_INFO, &hdrInfo) ||
-         AMediaFormat_getString(format, "hdr10-plus-info", &hdrInfo)) &&
-        !AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
-        return true;
-    }
-
-    // Otherwise, check if an HDR transfer function is set.
-    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
-        return transfer == COLOR_TRANSFER_ST2084 || transfer == COLOR_TRANSFER_HLG;
-    }
-
-    return false;
-}
-
 template <typename T>
 void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
     {
@@ -171,12 +145,12 @@
         VideoTrackTranscoder::CodecWrapper* wrapper =
                 static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
         if (auto transcoder = wrapper->getTranscoder()) {
-            const char* kCodecName = (codec == transcoder->mDecoder ? "Decoder" : "Encoder");
-            LOG(DEBUG) << kCodecName << " format changed: " << AMediaFormat_toString(format);
-            if (codec == transcoder->mEncoder->getCodec()) {
-                transcoder->mCodecMessageQueue.push(
-                        [transcoder, format] { transcoder->updateTrackFormat(format); });
-            }
+            const bool isDecoder = codec == transcoder->mDecoder;
+            const char* kCodecName = (isDecoder ? "Decoder" : "Encoder");
+            LOG(INFO) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+            transcoder->mCodecMessageQueue.push([transcoder, format, isDecoder] {
+                transcoder->updateTrackFormat(format, isDecoder);
+            });
         }
     }
 
@@ -246,16 +220,15 @@
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
-    int32_t bitrate;
-    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrate)) {
-        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &bitrate);
+    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &mConfiguredBitrate)) {
+        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &mConfiguredBitrate);
         if (status != AMEDIA_OK) {
             LOG(ERROR) << "Unable to estimate bitrate. Using default " << kDefaultBitrateMbps;
-            bitrate = kDefaultBitrateMbps;
+            mConfiguredBitrate = kDefaultBitrateMbps;
         }
 
-        LOG(INFO) << "Configuring bitrate " << bitrate;
-        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+        LOG(INFO) << "Configuring bitrate " << mConfiguredBitrate;
+        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, mConfiguredBitrate);
     }
 
     SetDefaultFormatValueFloat(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, encoderFormat,
@@ -306,7 +279,7 @@
     }
     mEncoder = std::make_shared<CodecWrapper>(encoder, shared_from_this());
 
-    LOG(DEBUG) << "Configuring encoder with: " << AMediaFormat_toString(mDestinationFormat.get());
+    LOG(INFO) << "Configuring encoder with: " << AMediaFormat_toString(mDestinationFormat.get());
     status = AMediaCodec_configure(mEncoder->getCodec(), mDestinationFormat.get(),
                                    NULL /* surface */, NULL /* crypto */,
                                    AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
@@ -347,7 +320,7 @@
     }
 
     // Request decoder to convert HDR content to SDR.
-    const bool sourceIsHdr = isHdr(mSourceFormat.get());
+    const bool sourceIsHdr = VideoIsHdr(mSourceFormat.get());
     if (sourceIsHdr) {
         AMediaFormat_setInt32(decoderFormat.get(),
                               TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
@@ -358,15 +331,13 @@
     AMediaFormat_setInt32(decoderFormat.get(), TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP, 0);
 
     // Copy over configurations that apply to both encoder and decoder.
-    static const EntryCopier kEncoderEntriesToCopy[] = {
+    static const std::vector<EntryCopier> kEncoderEntriesToCopy{
             ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
             ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
     };
-    const size_t entryCount = sizeof(kEncoderEntriesToCopy) / sizeof(kEncoderEntriesToCopy[0]);
-    CopyFormatEntries(mDestinationFormat.get(), decoderFormat.get(), kEncoderEntriesToCopy,
-                      entryCount);
+    CopyFormatEntries(mDestinationFormat.get(), decoderFormat.get(), kEncoderEntriesToCopy);
 
-    LOG(DEBUG) << "Configuring decoder with: " << AMediaFormat_toString(decoderFormat.get());
+    LOG(INFO) << "Configuring decoder with: " << AMediaFormat_toString(decoderFormat.get());
     status = AMediaCodec_configure(mDecoder, decoderFormat.get(), mSurface, NULL /* crypto */,
                                    0 /* flags */);
     if (status != AMEDIA_OK) {
@@ -513,9 +484,6 @@
         onOutputSampleAvailable(sample);
 
         mLastSampleWasSync = sample->info.flags & SAMPLE_FLAG_SYNC_SAMPLE;
-    } else if (bufferIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
-        AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder->getCodec());
-        LOG(DEBUG) << "Encoder output format changed: " << AMediaFormat_toString(newFormat);
     }
 
     if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
@@ -533,7 +501,24 @@
     }
 }
 
-void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat) {
+void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat, bool fromDecoder) {
+    if (fromDecoder) {
+        static const std::vector<AMediaFormatUtils::EntryCopier> kValuesToCopy{
+                ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+                ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+                ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+        };
+        AMediaFormat* params = AMediaFormat_new();
+        if (params != nullptr) {
+            AMediaFormatUtils::CopyFormatEntries(outputFormat, params, kValuesToCopy);
+            if (AMediaCodec_setParameters(mEncoder->getCodec(), params) != AMEDIA_OK) {
+                LOG(WARNING) << "Unable to update encoder with color information";
+            }
+            AMediaFormat_delete(params);
+        }
+        return;
+    }
+
     if (mActualOutputFormat != nullptr) {
         LOG(WARNING) << "Ignoring duplicate format change.";
         return;
@@ -597,6 +582,7 @@
     // TODO: transfer other fields as required.
 
     mActualOutputFormat = std::shared_ptr<AMediaFormat>(formatCopy, &AMediaFormat_delete);
+    LOG(INFO) << "Actual output format: " << AMediaFormat_toString(formatCopy);
 
     notifyTrackFormatAvailable();
 }
diff --git a/media/libmediatranscoding/transcoder/benchmark/Android.bp b/media/libmediatranscoding/transcoder/benchmark/Android.bp
index 74f65b9..459f0ae 100644
--- a/media/libmediatranscoding/transcoder/benchmark/Android.bp
+++ b/media/libmediatranscoding/transcoder/benchmark/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "benchmarkdefaults",
     shared_libs: [
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
index 712f8fc..ac3b2c0 100644
--- a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
@@ -60,6 +60,8 @@
     virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
                                   int32_t progress __unused) override {}
 
+    virtual void onHeartBeat(const MediaTranscoder* transcoder __unused) override {}
+
     virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
                                      const std::shared_ptr<ndk::ScopedAParcel>& pausedState
                                              __unused) override {}
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
index 080f2b7..23a234b 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
@@ -90,6 +90,9 @@
         /** Sample writer progress update in percent. */
         virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) = 0;
 
+        /** Sample writer heart-beat signal. */
+        virtual void onHeartBeat(const MediaSampleWriter* writer) = 0;
+
         virtual ~CallbackInterface() = default;
     };
 
@@ -101,18 +104,25 @@
      * @param fd An open file descriptor to write to. The caller is responsible for closing this
      *        file descriptor and it is safe to do so once this method returns.
      * @param callbacks Client callback object that gets called by the sample writer.
+     * @param heartBeatIntervalUs Interval (in microsecond) at which the sample writer should send a
+     *        heart-beat to onProgressUpdate() to indicate it's making progress. Value <=0 indicates
+     *        that the heartbeat is not required.
      * @return True if the writer was successfully initialized.
      */
-    bool init(int fd, const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+    bool init(int fd, const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */,
+              int64_t heartBeatIntervalUs = -1);
 
     /**
      * Initializes the sample writer with a custom muxer interface implementation.
      * @param muxer The custom muxer interface implementation.
      * @param @param callbacks Client callback object that gets called by the sample writer.
+     * @param heartBeatIntervalUs Interval (in microsecond) at which the sample writer should send a
+     *        heart-beat to onProgressUpdate() to indicate it's making progress.
      * @return True if the writer was successfully initialized.
      */
     bool init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer /* nonnull */,
-              const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+              const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */,
+              int64_t heartBeatIntervalUs = -1);
 
     /**
      * Adds a new track to the sample writer. Tracks must be added after the sample writer has been
@@ -185,6 +195,7 @@
 
     std::weak_ptr<CallbackInterface> mCallbacks;
     std::shared_ptr<MediaSampleWriterMuxerInterface> mMuxer;
+    int64_t mHeartBeatIntervalUs;
 
     std::mutex mMutex;  // Protects sample queue and state.
     std::condition_variable mSampleSignal;
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
index 4e11ef5..8776dc9 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -50,6 +50,9 @@
         /** Transcoder progress update reported in percent from 0 to 100. */
         virtual void onProgressUpdate(const MediaTranscoder* transcoder, int32_t progress) = 0;
 
+        /** Transcoder heart-beat signal. */
+        virtual void onHeartBeat(const MediaTranscoder* transcoder) = 0;
+
         /**
          * Transcoder lost codec resources and paused operations. The client can resume transcoding
          * again when resources are available by either:
@@ -70,7 +73,7 @@
      * possible to change any configurations on a paused transcoder.
      */
     static std::shared_ptr<MediaTranscoder> create(
-            const std::shared_ptr<CallbackInterface>& callbacks,
+            const std::shared_ptr<CallbackInterface>& callbacks, int64_t heartBeatIntervalUs = -1,
             pid_t pid = AMEDIACODEC_CALLING_PID, uid_t uid = AMEDIACODEC_CALLING_UID,
             const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
 
@@ -120,7 +123,8 @@
     virtual ~MediaTranscoder() = default;
 
 private:
-    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks, pid_t pid, uid_t uid);
+    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks,
+                    int64_t heartBeatIntervalUs, pid_t pid, uid_t uid);
 
     // MediaTrackTranscoderCallback
     virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) override;
@@ -134,6 +138,7 @@
     virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) override;
     virtual void onStopped(const MediaSampleWriter* writer) override;
     virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) override;
+    virtual void onHeartBeat(const MediaSampleWriter* writer) override;
     // ~MediaSampleWriter::CallbackInterface
 
     void onThreadFinished(const void* thread, media_status_t threadStatus, bool threadStopped);
@@ -147,6 +152,7 @@
     std::vector<std::shared_ptr<MediaTrackTranscoder>> mTrackTranscoders;
     std::mutex mTracksAddedMutex;
     std::unordered_set<const MediaTrackTranscoder*> mTracksAdded GUARDED_BY(mTracksAddedMutex);
+    int64_t mHeartBeatIntervalUs;
     pid_t mPid;
     uid_t mUid;
 
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
index cc3399a..c5547c6 100644
--- a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -19,6 +19,8 @@
 
 #include <media/NdkMediaFormat.h>
 
+#include <vector>
+
 extern const char* AMEDIA_MIMETYPE_VIDEO_VP8;
 extern const char* AMEDIA_MIMETYPE_VIDEO_VP9;
 extern const char* AMEDIA_MIMETYPE_VIDEO_AV1;
@@ -82,11 +84,13 @@
 bool CopyFormatEntryInt32(const char* key, AMediaFormat* from, AMediaFormat* to);
 bool CopyFormatEntryFloat(const char* key, AMediaFormat* from, AMediaFormat* to);
 
-void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to, const EntryCopier* entries,
-                       size_t entryCount);
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to,
+                       const std::vector<EntryCopier>& entries);
 
 bool SetDefaultFormatValueFloat(const char* key, AMediaFormat* format, float value);
 bool SetDefaultFormatValueInt32(const char* key, AMediaFormat* format, int32_t value);
 
+bool VideoIsHdr(AMediaFormat* format);
+
 }  // namespace AMediaFormatUtils
 #endif  // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
index 4413a6c..3e72882 100644
--- a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -45,6 +45,7 @@
 
 private:
     friend struct AsyncCodecCallbackDispatch;
+    friend class VideoTrackTranscoderTests;
 
     // Minimal blocking queue used as a message queue by VideoTrackTranscoder.
     template <typename T>
@@ -83,8 +84,8 @@
     // Dequeues an encoded buffer from the encoder and adds it to the output queue.
     void dequeueOutputSample(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
 
-    // Updates the video track's actual format based on encoder output format.
-    void updateTrackFormat(AMediaFormat* outputFormat);
+    // Updates the video track's actual format based on encoder and decoder output format.
+    void updateTrackFormat(AMediaFormat* outputFormat, bool fromDecoder);
 
     AMediaCodec* mDecoder = nullptr;
     std::shared_ptr<CodecWrapper> mEncoder;
@@ -101,6 +102,7 @@
     uid_t mUid;
     uint64_t mInputFrameCount = 0;
     uint64_t mOutputFrameCount = 0;
+    int32_t mConfiguredBitrate = 0;
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/setloglevel.sh b/media/libmediatranscoding/transcoder/setloglevel.sh
index 5eb7b67..b0f0a2e 100755
--- a/media/libmediatranscoding/transcoder/setloglevel.sh
+++ b/media/libmediatranscoding/transcoder/setloglevel.sh
@@ -2,19 +2,29 @@
 
 if [ $# -ne 1 ]
 then
-    echo Usage: $0 loglevel
+    echo "Usage 1: $0 <loglevel>"
+    echo "  Set all transcoder log tags to <loglevel>"
+    echo "Usage 2: $0 -l"
+    echo "  List all transcoder log tags and exit"
     exit 1
 fi
 
-level=$1
-echo Setting transcoder log level to $level
-
 # List all log tags
 declare -a tags=(
   MediaTranscoder MediaTrackTranscoder VideoTrackTranscoder PassthroughTrackTranscoder
   MediaSampleWriter MediaSampleReader MediaSampleQueue MediaTranscoderTests
   MediaTrackTranscoderTests VideoTrackTranscoderTests PassthroughTrackTranscoderTests
-  MediaSampleWriterTests MediaSampleReaderNDKTests MediaSampleQueueTests)
+  MediaSampleWriterTests MediaSampleReaderNDKTests MediaSampleQueueTests HdrTranscodeTests)
+
+if [ "$1" == "-l" ]; then
+  echo "Transcoder log tags:"
+  for tag in "${tags[@]}"; do echo -n "$tag "; done
+  echo
+  exit 0
+fi
+
+level=$1
+echo Setting transcoder log level to $level
 
 # Set log level for all tags
 for tag in "${tags[@]}"
diff --git a/media/libmediatranscoding/transcoder/tests/Android.bp b/media/libmediatranscoding/transcoder/tests/Android.bp
index d0ea802..11b19c9 100644
--- a/media/libmediatranscoding/transcoder/tests/Android.bp
+++ b/media/libmediatranscoding/transcoder/tests/Android.bp
@@ -1,4 +1,13 @@
 // Unit tests for libmediatranscoder.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "testdefaults",
 
@@ -80,6 +89,13 @@
     srcs: ["MediaSampleWriterTests.cpp"],
 }
 
+// HDR Transcode unit test
+cc_test {
+    name: "HdrTranscodeTests",
+    defaults: ["testdefaults"],
+    srcs: ["HdrTranscodeTests.cpp"],
+}
+
 // MediaTranscoder unit test
 cc_test {
     name: "MediaTranscoderTests",
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index e40a507..c3a0ced 100644
--- a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -24,7 +24,7 @@
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="{MODULE}" />
-        <option name="native-test-timeout" value="10m" />
+        <option name="native-test-timeout" value="30m" />
     </test>
 </configuration>
 
diff --git a/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp b/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
new file mode 100644
index 0000000..3a2882b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for HDR to SDR transcoding.
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "HdrTranscodeTests"
+
+#include <android-base/logging.h>
+#include <android-base/properties.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+#include "TranscoderTestUtils.h"
+
+namespace android {
+
+// Debug property to load the sample HDR plugin.
+static const std::string kLoadSamplePluginProperty{"debug.codec2.force-sample-plugin"};
+
+// SDR color standard, from MediaFormat.
+static constexpr int COLOR_STANDARD_BT709 = 1;
+
+class HdrTranscodeTests : public ::testing::Test {
+public:
+    HdrTranscodeTests() { LOG(DEBUG) << "HdrTranscodeTests created"; }
+    ~HdrTranscodeTests() { LOG(DEBUG) << "HdrTranscodeTests destroyed"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "HdrTranscodeTests set up";
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+        ABinderProcess_startThreadPool();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "HdrTranscodeTests tear down";
+        mCallbacks.reset();
+    }
+
+    media_status_t transcode(const char* srcFile, const char* dstFile, const char* dstMime) {
+        std::string srcPath = mSrcDir + srcFile;
+        std::string dstPath = mDstDir + dstFile;
+
+        auto transcoder = MediaTranscoder::create(mCallbacks, -1 /*heartBeatIntervalUs*/);
+        EXPECT_NE(transcoder, nullptr);
+
+        const int srcFd = open(srcPath.c_str(), O_RDONLY);
+        EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
+        close(srcFd);
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        EXPECT_GT(trackFormats.size(), 0);
+
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            std::shared_ptr<AMediaFormat> format;
+            const char* mime = nullptr;
+
+            AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                format = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+                AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, dstMime);
+            }
+
+            media_status_t status = transcoder->configureTrackFormat(i, format.get());
+            if (status != AMEDIA_OK) {
+                return status;
+            }
+        }
+
+        const int dstFd = open(dstPath.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
+        EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
+        close(dstFd);
+
+        media_status_t startStatus = transcoder->start();
+        EXPECT_EQ(startStatus, AMEDIA_OK);
+        if (startStatus != AMEDIA_OK) {
+            return startStatus;
+        }
+
+        mCallbacks->waitForTranscodingFinished();
+        return mCallbacks->mStatus;
+    }
+
+    media_status_t validateOutput(const char* dstFile __unused) {
+        std::string path = mDstDir + dstFile;
+
+        auto format = TranscoderTestUtils::GetVideoFormat(path);
+        EXPECT_NE(format.get(), nullptr);
+
+        int32_t value;
+        EXPECT_TRUE(AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_STANDARD, &value));
+        EXPECT_EQ(value, COLOR_STANDARD_BT709);
+
+        EXPECT_TRUE(AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_TRANSFER, &value));
+        EXPECT_EQ(value, COLOR_TRANSFER_SDR_VIDEO);
+
+        // TODO(lnilsson): Validate decoded pixels as well. Either by comparing similarity against a
+        //  known good "golden master" corresponding SDR video, or by looking at the histogram.
+        return AMEDIA_OK;
+    }
+
+    bool hdrToSdrConversionSupported(const char* hdrFile) {
+        std::string srcPath = mSrcDir + hdrFile;
+
+        std::string mime;
+        auto format = TranscoderTestUtils::GetVideoFormat(srcPath, &mime);
+        EXPECT_NE(format.get(), nullptr);
+
+        AMediaCodec* decoder = AMediaCodec_createDecoderByType(mime.c_str());
+        EXPECT_NE(decoder, nullptr);
+
+        AMediaFormat_setInt32(format.get(), TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
+                              COLOR_TRANSFER_SDR_VIDEO);
+
+        EXPECT_EQ(AMediaCodec_configure(decoder, format.get(), nullptr /*surface*/,
+                                        nullptr /*crypto*/, 0 /*flags*/),
+                  AMEDIA_OK);
+
+        AMediaFormat* inputFormat = AMediaCodec_getInputFormat(decoder);
+        EXPECT_NE(inputFormat, nullptr);
+
+        int32_t transferFunc;
+        bool conversionSupported =
+                AMediaFormat_getInt32(inputFormat,
+                                      TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
+                                      &transferFunc) &&
+                transferFunc == COLOR_TRANSFER_SDR_VIDEO;
+
+        AMediaFormat_delete(inputFormat);
+        AMediaCodec_delete(decoder);
+
+        return conversionSupported;
+    }
+
+    std::shared_ptr<TestTranscoderCallbacks> mCallbacks;
+    const std::string mSrcDir{"/data/local/tmp/TranscodingTestAssets/"};
+    const std::string mDstDir{"/data/local/tmp/"};
+};
+
+TEST_F(HdrTranscodeTests, TestHdrSamplePluginTranscode) {
+    const char* hdrFile = "video_1280x720_hevc_hdr10_static_3mbps.mp4";
+    const char* dstFile = "video_1280x720_hevc_hdr10_static_3mbps_transcoded.mp4";
+
+    EXPECT_TRUE(android::base::SetProperty(kLoadSamplePluginProperty, "true"));
+
+    if (hdrToSdrConversionSupported(hdrFile)) {
+        LOG(INFO) << "HDR -> SDR supported, validating output..";
+        EXPECT_EQ(transcode(hdrFile, dstFile, AMEDIA_MIMETYPE_VIDEO_AVC), AMEDIA_OK);
+        EXPECT_EQ(validateOutput(dstFile), AMEDIA_OK);
+    } else {
+        LOG(INFO) << "HDR -> SDR *not* supported";
+        EXPECT_EQ(transcode(hdrFile, dstFile, AMEDIA_MIMETYPE_VIDEO_AVC), AMEDIA_ERROR_UNSUPPORTED);
+    }
+
+    EXPECT_TRUE(android::base::SetProperty(kLoadSamplePluginProperty, "false"));
+}
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
index 0a41b00..8b3905c 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
@@ -210,6 +210,8 @@
         mLastProgress = progress;
         mProgressUpdateCount++;
     }
+
+    virtual void onHeartBeat(const MediaSampleWriter* writer __unused) override {}
     // ~MediaSampleWriter::CallbackInterface
 
     void waitForWritingFinished() {
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
index 21f0b86..791e983 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
@@ -28,7 +28,7 @@
 #include <media/PassthroughTrackTranscoder.h>
 #include <media/VideoTrackTranscoder.h>
 
-#include "TrackTranscoderTestUtils.h"
+#include "TranscoderTestUtils.h"
 
 namespace android {
 
@@ -49,7 +49,7 @@
         // (b/155663561).
         ABinderProcess_startThreadPool();
 
-        mCallback = std::make_shared<TestCallback>();
+        mCallback = std::make_shared<TestTrackTranscoderCallback>();
 
         switch (GetParam()) {
         case VIDEO:
@@ -134,7 +134,7 @@
 
 protected:
     std::shared_ptr<MediaTrackTranscoder> mTranscoder;
-    std::shared_ptr<TestCallback> mCallback;
+    std::shared_ptr<TestTrackTranscoderCallback> mCallback;
 
     std::shared_ptr<MediaSampleReader> mMediaSampleReader;
     int mTrackIndex;
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
index 54d8b89..b7c7bd8 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -27,6 +27,8 @@
 #include <media/MediaTranscoder.h>
 #include <media/NdkCommon.h>
 
+#include "TranscoderTestUtils.h"
+
 namespace android {
 
 #define DEFINE_FORMAT_VALUE_EQUAL_FUNC(_type, _typeName)                                  \
@@ -53,60 +55,6 @@
         {AMEDIAFORMAT_KEY_SAR_HEIGHT, equalInt32},     {AMEDIAFORMAT_KEY_ROTATION, equalInt32},
 };
 
-class TestCallbacks : public MediaTranscoder::CallbackInterface {
-public:
-    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
-        std::unique_lock<std::mutex> lock(mMutex);
-        EXPECT_FALSE(mFinished);
-        mFinished = true;
-        mCondition.notify_all();
-    }
-
-    virtual void onError(const MediaTranscoder* transcoder __unused,
-                         media_status_t error) override {
-        std::unique_lock<std::mutex> lock(mMutex);
-        EXPECT_NE(error, AMEDIA_OK);
-        EXPECT_FALSE(mFinished);
-        mFinished = true;
-        mStatus = error;
-        mCondition.notify_all();
-    }
-
-    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
-                                  int32_t progress) override {
-        std::unique_lock<std::mutex> lock(mMutex);
-        if (progress > 0 && !mProgressMade) {
-            mProgressMade = true;
-            mCondition.notify_all();
-        }
-    }
-
-    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
-                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
-                                             __unused) override {}
-
-    void waitForTranscodingFinished() {
-        std::unique_lock<std::mutex> lock(mMutex);
-        while (!mFinished) {
-            mCondition.wait(lock);
-        }
-    }
-
-    void waitForProgressMade() {
-        std::unique_lock<std::mutex> lock(mMutex);
-        while (!mProgressMade && !mFinished) {
-            mCondition.wait(lock);
-        }
-    }
-    media_status_t mStatus = AMEDIA_OK;
-    bool mFinished = false;
-
-private:
-    std::mutex mMutex;
-    std::condition_variable mCondition;
-    bool mProgressMade = false;
-};
-
 // Write-only, create file if non-existent, don't overwrite existing file.
 static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
 // User R+W permission.
@@ -119,7 +67,7 @@
 
     void SetUp() override {
         LOG(DEBUG) << "MediaTranscoderTests set up";
-        mCallbacks = std::make_shared<TestCallbacks>();
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
         ABinderProcess_startThreadPool();
     }
 
@@ -143,6 +91,7 @@
 
     typedef enum {
         kRunToCompletion,
+        kCheckHeartBeat,
         kCancelAfterProgress,
         kCancelAfterStart,
         kPauseAfterProgress,
@@ -152,8 +101,9 @@
     using FormatConfigurationCallback = std::function<AMediaFormat*(AMediaFormat*)>;
     media_status_t transcodeHelper(const char* srcPath, const char* destPath,
                                    FormatConfigurationCallback formatCallback,
-                                   TranscodeExecutionControl executionControl = kRunToCompletion) {
-        auto transcoder = MediaTranscoder::create(mCallbacks);
+                                   TranscodeExecutionControl executionControl = kRunToCompletion,
+                                   int64_t heartBeatIntervalUs = -1) {
+        auto transcoder = MediaTranscoder::create(mCallbacks, heartBeatIntervalUs);
         EXPECT_NE(transcoder, nullptr);
 
         const int srcFd = open(srcPath, O_RDONLY);
@@ -200,6 +150,18 @@
             case kPauseAfterStart:
                 transcoder->pause(&pausedState);
                 break;
+            case kCheckHeartBeat: {
+                mCallbacks->waitForProgressMade();
+                auto startTime = std::chrono::system_clock::now();
+                mCallbacks->waitForTranscodingFinished();
+                auto finishTime = std::chrono::system_clock::now();
+                int32_t expectedCount =
+                        (finishTime - startTime) / std::chrono::microseconds(heartBeatIntervalUs);
+                // Here we relax the expected count by 1, in case the last heart-beat just
+                // missed the window, other than that the count should be exact.
+                EXPECT_GE(mCallbacks->mHeartBeatCount, expectedCount - 1);
+                break;
+            }
             case kRunToCompletion:
             default:
                 mCallbacks->waitForTranscodingFinished();
@@ -305,7 +267,7 @@
         close(dstFd);
     }
 
-    std::shared_ptr<TestCallbacks> mCallbacks;
+    std::shared_ptr<TestTranscoderCallbacks> mCallbacks;
     std::shared_ptr<AMediaFormat> mSourceVideoFormat;
 };
 
@@ -360,7 +322,7 @@
     const char* destPath1 = "/data/local/tmp/MediaTranscoder_CustomBitrate_2Mbps.MP4";
     const char* destPath2 = "/data/local/tmp/MediaTranscoder_CustomBitrate_8Mbps.MP4";
     testTranscodeVideo(srcPath, destPath1, AMEDIA_MIMETYPE_VIDEO_AVC, 2 * 1000 * 1000);
-    mCallbacks = std::make_shared<TestCallbacks>();
+    mCallbacks = std::make_shared<TestTranscoderCallbacks>();
     testTranscodeVideo(srcPath, destPath2, AMEDIA_MIMETYPE_VIDEO_AVC, 8 * 1000 * 1000);
 
     // The source asset is very short and heavily compressed from the beginning so don't expect the
@@ -390,7 +352,7 @@
         EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterProgress),
                   AMEDIA_OK);
         EXPECT_FALSE(mCallbacks->mFinished);
-        mCallbacks = std::make_shared<TestCallbacks>();
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
     }
 }
 
@@ -402,7 +364,7 @@
         EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterStart),
                   AMEDIA_OK);
         EXPECT_FALSE(mCallbacks->mFinished);
-        mCallbacks = std::make_shared<TestCallbacks>();
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
     }
 }
 
@@ -414,7 +376,7 @@
         EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kPauseAfterProgress),
                   AMEDIA_OK);
         EXPECT_FALSE(mCallbacks->mFinished);
-        mCallbacks = std::make_shared<TestCallbacks>();
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
     }
 }
 
@@ -426,10 +388,22 @@
         EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kPauseAfterStart),
                   AMEDIA_OK);
         EXPECT_FALSE(mCallbacks->mFinished);
-        mCallbacks = std::make_shared<TestCallbacks>();
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
     }
 }
 
+TEST_F(MediaTranscoderTests, TestHeartBeat) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_HeartBeat.MP4";
+
+    // Use a shorter value of 500ms than the default 1000ms to get more heart beat for testing.
+    const int64_t heartBeatIntervalUs = 500000LL;
+    EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCheckHeartBeat,
+                              heartBeatIntervalUs),
+              AMEDIA_OK);
+    EXPECT_TRUE(mCallbacks->mFinished);
+}
+
 }  // namespace android
 
 int main(int argc, char** argv) {
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
index 5071efd..fdbf535 100644
--- a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
@@ -29,7 +29,7 @@
 
 #include <vector>
 
-#include "TrackTranscoderTestUtils.h"
+#include "TranscoderTestUtils.h"
 
 namespace android {
 
@@ -152,7 +152,7 @@
     }
 
     // Create and start the transcoder.
-    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
     PassthroughTrackTranscoder transcoder{callback};
 
     std::shared_ptr<MediaSampleReader> mediaSampleReader =
diff --git a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
deleted file mode 100644
index a782f71..0000000
--- a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/MediaTrackTranscoder.h>
-#include <media/MediaTrackTranscoderCallback.h>
-
-#include <condition_variable>
-#include <memory>
-#include <mutex>
-
-namespace android {
-
-//
-// This file contains test utilities used by more than one track transcoder test.
-//
-
-class TrackTranscoderTestUtils {
-public:
-    static std::shared_ptr<AMediaFormat> getDefaultVideoDestinationFormat(
-            AMediaFormat* sourceFormat, bool includeBitrate = true) {
-        // Default video destination format setup.
-        static constexpr float kFrameRate = 30.0f;
-        static constexpr int32_t kBitRate = 2 * 1000 * 1000;
-
-        AMediaFormat* destinationFormat = AMediaFormat_new();
-        AMediaFormat_copy(destinationFormat, sourceFormat);
-        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_FRAME_RATE, kFrameRate);
-        if (includeBitrate) {
-            AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
-        }
-
-        return std::shared_ptr<AMediaFormat>(destinationFormat, &AMediaFormat_delete);
-    }
-};
-
-class TestCallback : public MediaTrackTranscoderCallback {
-public:
-    TestCallback() = default;
-    ~TestCallback() = default;
-
-    // MediaTrackTranscoderCallback
-    void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {
-        std::unique_lock<std::mutex> lock(mMutex);
-        mTrackFormatAvailable = true;
-        mTrackFormatAvailableCondition.notify_all();
-    }
-
-    void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
-        std::unique_lock<std::mutex> lock(mMutex);
-        mTranscodingFinished = true;
-        mTranscodingFinishedCondition.notify_all();
-    }
-
-    virtual void onTrackStopped(const MediaTrackTranscoder* transcoder __unused) override {
-        std::unique_lock<std::mutex> lock(mMutex);
-        mTranscodingFinished = true;
-        mTranscodingStopped = true;
-        mTranscodingFinishedCondition.notify_all();
-    }
-
-    void onTrackError(const MediaTrackTranscoder* transcoder __unused, media_status_t status) {
-        std::unique_lock<std::mutex> lock(mMutex);
-        mTranscodingFinished = true;
-        mStatus = status;
-        mTranscodingFinishedCondition.notify_all();
-    }
-    // ~MediaTrackTranscoderCallback
-
-    media_status_t waitUntilFinished() {
-        std::unique_lock<std::mutex> lock(mMutex);
-        while (!mTranscodingFinished) {
-            mTranscodingFinishedCondition.wait(lock);
-        }
-        return mStatus;
-    }
-
-    void waitUntilTrackFormatAvailable() {
-        std::unique_lock<std::mutex> lock(mMutex);
-        while (!mTrackFormatAvailable) {
-            mTrackFormatAvailableCondition.wait(lock);
-        }
-    }
-
-    bool transcodingWasStopped() const { return mTranscodingFinished && mTranscodingStopped; }
-    bool transcodingFinished() const {
-        return mTranscodingFinished && !mTranscodingStopped && mStatus == AMEDIA_OK;
-    }
-
-private:
-    media_status_t mStatus = AMEDIA_OK;
-    std::mutex mMutex;
-    std::condition_variable mTranscodingFinishedCondition;
-    std::condition_variable mTrackFormatAvailableCondition;
-    bool mTranscodingFinished = false;
-    bool mTranscodingStopped = false;
-    bool mTrackFormatAvailable = false;
-};
-
-class OneShotSemaphore {
-public:
-    void wait() {
-        std::unique_lock<std::mutex> lock(mMutex);
-        while (!mSignaled) {
-            mCondition.wait(lock);
-        }
-    }
-
-    void signal() {
-        std::unique_lock<std::mutex> lock(mMutex);
-        mSignaled = true;
-        mCondition.notify_all();
-    }
-
-private:
-    std::mutex mMutex;
-    std::condition_variable mCondition;
-    bool mSignaled = false;
-};
-
-};  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
new file mode 100644
index 0000000..35fe25b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/MediaTranscoder.h>
+
+#include <condition_variable>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+//
+// This file contains transcoding test utilities.
+//
+
+namespace TranscoderTestUtils {
+
+std::shared_ptr<AMediaFormat> GetVideoFormat(const std::string& path,
+                                             std::string* mimeOut = nullptr) {
+    int fd = open(path.c_str(), O_RDONLY);
+    EXPECT_GT(fd, 0);
+    ssize_t fileSize = lseek(fd, 0, SEEK_END);
+    lseek(fd, 0, SEEK_SET);
+
+    auto sampleReader = MediaSampleReaderNDK::createFromFd(fd, 0, fileSize);
+    EXPECT_NE(sampleReader, nullptr);
+
+    for (size_t i = 0; i < sampleReader->getTrackCount(); ++i) {
+        AMediaFormat* format = sampleReader->getTrackFormat(i);
+
+        const char* mime = nullptr;
+        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+        if (strncmp(mime, "video/", 6) == 0) {
+            if (mimeOut != nullptr) {
+                mimeOut->assign(mime);
+            }
+            return std::shared_ptr<AMediaFormat>(format, &AMediaFormat_delete);
+        }
+
+        AMediaFormat_delete(format);
+    }
+    return nullptr;
+}
+
+};  // namespace TranscoderTestUtils
+
+class TrackTranscoderTestUtils {
+public:
+    static std::shared_ptr<AMediaFormat> getDefaultVideoDestinationFormat(
+            AMediaFormat* sourceFormat, bool includeBitrate = true) {
+        // Default video destination format setup.
+        static constexpr float kFrameRate = 30.0f;
+        static constexpr int32_t kBitRate = 2 * 1000 * 1000;
+
+        AMediaFormat* destinationFormat = AMediaFormat_new();
+        AMediaFormat_copy(destinationFormat, sourceFormat);
+        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_FRAME_RATE, kFrameRate);
+        if (includeBitrate) {
+            AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+        }
+
+        return std::shared_ptr<AMediaFormat>(destinationFormat, &AMediaFormat_delete);
+    }
+};
+
+class TestTrackTranscoderCallback : public MediaTrackTranscoderCallback {
+public:
+    TestTrackTranscoderCallback() = default;
+    ~TestTrackTranscoderCallback() = default;
+
+    // MediaTrackTranscoderCallback
+    void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTrackFormatAvailable = true;
+        mTrackFormatAvailableCondition.notify_all();
+    }
+
+    void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mTranscodingFinishedCondition.notify_all();
+    }
+
+    virtual void onTrackStopped(const MediaTrackTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mTranscodingStopped = true;
+        mTranscodingFinishedCondition.notify_all();
+    }
+
+    void onTrackError(const MediaTrackTranscoder* transcoder __unused, media_status_t status) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mStatus = status;
+        mTranscodingFinishedCondition.notify_all();
+    }
+    // ~MediaTrackTranscoderCallback
+
+    media_status_t waitUntilFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTranscodingFinished) {
+            mTranscodingFinishedCondition.wait(lock);
+        }
+        return mStatus;
+    }
+
+    void waitUntilTrackFormatAvailable() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTrackFormatAvailable) {
+            mTrackFormatAvailableCondition.wait(lock);
+        }
+    }
+
+    bool transcodingWasStopped() const { return mTranscodingFinished && mTranscodingStopped; }
+    bool transcodingFinished() const {
+        return mTranscodingFinished && !mTranscodingStopped && mStatus == AMEDIA_OK;
+    }
+
+private:
+    media_status_t mStatus = AMEDIA_OK;
+    std::mutex mMutex;
+    std::condition_variable mTranscodingFinishedCondition;
+    std::condition_variable mTrackFormatAvailableCondition;
+    bool mTranscodingFinished = false;
+    bool mTranscodingStopped = false;
+    bool mTrackFormatAvailable = false;
+};
+
+class TestTranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_NE(error, AMEDIA_OK);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mStatus = error;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        if (progress > 0 && !mProgressMade) {
+            mProgressMade = true;
+            mCondition.notify_all();
+        }
+    }
+
+    virtual void onHeartBeat(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mHeartBeatCount++;
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {}
+
+    void waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    void waitForProgressMade() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mProgressMade && !mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+    media_status_t mStatus = AMEDIA_OK;
+    bool mFinished = false;
+    int32_t mHeartBeatCount = 0;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mProgressMade = false;
+};
+
+class OneShotSemaphore {
+public:
+    void wait() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mSignaled) {
+            mCondition.wait(lock);
+        }
+    }
+
+    void signal() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mSignaled = true;
+        mCondition.notify_all();
+    }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mSignaled = false;
+};
+
+};  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 4ede97f..88c3fd3 100644
--- a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -27,7 +27,7 @@
 #include <media/VideoTrackTranscoder.h>
 #include <utils/Timers.h>
 
-#include "TrackTranscoderTestUtils.h"
+#include "TranscoderTestUtils.h"
 
 namespace android {
 
@@ -86,6 +86,10 @@
 
     ~VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests destroyed"; }
 
+    static int32_t getConfiguredBitrate(const std::shared_ptr<VideoTrackTranscoder>& transcoder) {
+        return transcoder->mConfiguredBitrate;
+    }
+
     std::shared_ptr<MediaSampleReader> mMediaSampleReader;
     int mTrackIndex;
     std::shared_ptr<AMediaFormat> mSourceFormat;
@@ -94,7 +98,7 @@
 
 TEST_F(VideoTrackTranscoderTests, SampleSoundness) {
     LOG(DEBUG) << "Testing SampleSoundness";
-    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
     auto transcoder = VideoTrackTranscoder::create(callback);
 
     EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
@@ -139,8 +143,8 @@
 
 TEST_F(VideoTrackTranscoderTests, PreserveBitrate) {
     LOG(DEBUG) << "Testing PreserveBitrate";
-    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
-    std::shared_ptr<MediaTrackTranscoder> transcoder = VideoTrackTranscoder::create(callback);
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
 
     auto destFormat = TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(
             mSourceFormat.get(), false /* includeBitrate*/);
@@ -155,15 +159,11 @@
     ASSERT_TRUE(transcoder->start());
 
     callback->waitUntilTrackFormatAvailable();
-
-    auto outputFormat = transcoder->getOutputFormat();
-    ASSERT_NE(outputFormat, nullptr);
-
     transcoder->stop();
     EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
 
-    int32_t outBitrate;
-    EXPECT_TRUE(AMediaFormat_getInt32(outputFormat.get(), AMEDIAFORMAT_KEY_BIT_RATE, &outBitrate));
+    int32_t outBitrate = getConfiguredBitrate(transcoder);
+    ASSERT_GT(outBitrate, 0);
 
     EXPECT_EQ(srcBitrate, outBitrate);
 }
@@ -171,7 +171,7 @@
 // VideoTrackTranscoder needs a valid destination format.
 TEST_F(VideoTrackTranscoderTests, NullDestinationFormat) {
     LOG(DEBUG) << "Testing NullDestinationFormat";
-    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
     std::shared_ptr<AMediaFormat> nullFormat;
 
     auto transcoder = VideoTrackTranscoder::create(callback);
@@ -181,7 +181,7 @@
 
 TEST_F(VideoTrackTranscoderTests, LingeringEncoder) {
     OneShotSemaphore semaphore;
-    auto callback = std::make_shared<TestCallback>();
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
     auto transcoder = VideoTrackTranscoder::create(callback);
 
     EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp b/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
index 3ae349b..69b2827 100644
--- a/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
+++ b/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
@@ -18,6 +18,15 @@
  * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_fuzz {
     name: "media_transcoder_fuzzer",
     srcs: [
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp b/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
index 48d3406..ec36c0f 100644
--- a/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
+++ b/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
@@ -88,6 +88,8 @@
         }
     }
 
+    virtual void onHeartBeat(const MediaTranscoder* transcoder UNUSED_PARAM) override {}
+
     virtual void onCodecResourceLost(const MediaTranscoder* transcoder UNUSED_PARAM,
                                      const shared_ptr<ndk::ScopedAParcel>& pausedState
                                              UNUSED_PARAM) override {}
diff --git a/media/libmediatranscoding/transcoder/tools/Android.bp b/media/libmediatranscoding/transcoder/tools/Android.bp
new file mode 100644
index 0000000..ba30d34
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tools/Android.bp
@@ -0,0 +1,33 @@
+package {
+    // See: http://go/android-license-faq
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_binary {
+    name: "transcode",
+    srcs: ["Transcode.cpp"],
+
+    shared_libs: [
+        "libmediandk",
+        "libmediatranscoder",
+    ],
+
+    header_libs: [
+        "libbase_headers",
+    ],
+
+    compile_multilib: "32",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/libmediatranscoding/transcoder/tools/Transcode.cpp b/media/libmediatranscoding/transcoder/tools/Transcode.cpp
new file mode 100644
index 0000000..1f5649e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tools/Transcode.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/macros.h>
+#include <fcntl.h>
+#include <getopt.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+using namespace android;
+
+#define ERR_MSG(fmt, ...) fprintf(stderr, "Error: " fmt "\n", ##__VA_ARGS__)
+
+class TranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    media_status_t waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+        return mStatus;
+    }
+
+private:
+    virtual void onFinished(const MediaTranscoder* /*transcoder*/) override {
+        notifyTranscoderFinished(AMEDIA_OK);
+    }
+
+    virtual void onError(const MediaTranscoder* /*transcoder*/, media_status_t error) override {
+        ERR_MSG("Transcoder failed with error %d", error);
+        notifyTranscoderFinished(error);
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* /*transcoder*/,
+                                  int32_t /*progress*/) override {}
+
+    virtual void onCodecResourceLost(
+            const MediaTranscoder* /*transcoder*/,
+            const std::shared_ptr<ndk::ScopedAParcel>& /*pausedState*/) override {
+        ERR_MSG("Transcoder lost codec resource while transcoding");
+        notifyTranscoderFinished(AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE);
+    }
+
+    virtual void onHeartBeat(const MediaTranscoder* /*transcoder*/) override {}
+
+    void notifyTranscoderFinished(media_status_t status) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mFinished = true;
+        mStatus = status;
+        mCondition.notify_all();
+    }
+
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+    media_status_t mStatus = AMEDIA_OK;
+};
+
+struct TranscodeConfig {
+    std::string srcFile;
+    std::string dstFile;
+
+    std::string dstCodec{AMEDIA_MIMETYPE_VIDEO_AVC};
+    int32_t bitrate = -1;
+};
+
+static int transcode(const struct TranscodeConfig& config) {
+    auto callbacks = std::make_shared<TranscoderCallbacks>();
+    auto transcoder = MediaTranscoder::create(callbacks, -1 /*heartBeatIntervalUs*/);
+
+    const int srcFd = open(config.srcFile.c_str(), O_RDONLY);
+    if (srcFd <= 0) {
+        ERR_MSG("Unable to open source file %s", config.srcFile.c_str());
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = transcoder->configureSource(srcFd);
+    close(srcFd);
+    if (status != AMEDIA_OK) {
+        ERR_MSG("configureSource returned error %d", status);
+        return status;
+    }
+
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+    if (trackFormats.size() <= 0) {
+        ERR_MSG("No tracks found in source file");
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        AMediaFormat* dstFormat = nullptr;
+
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if (strncmp(mime, "video/", 6) == 0) {
+            dstFormat = AMediaFormat_new();
+            AMediaFormat_setString(dstFormat, AMEDIAFORMAT_KEY_MIME, config.dstCodec.c_str());
+
+            if (config.bitrate > 0) {
+                AMediaFormat_setInt32(dstFormat, AMEDIAFORMAT_KEY_BIT_RATE, config.bitrate);
+            }
+        }
+
+        status = transcoder->configureTrackFormat(i, dstFormat);
+
+        if (dstFormat != nullptr) {
+            AMediaFormat_delete(dstFormat);
+        }
+
+        if (status != AMEDIA_OK) {
+            ERR_MSG("configureTrack returned error %d", status);
+            return status;
+        }
+    }
+
+    // Note: Overwrites existing file.
+    const int dstFd = open(config.dstFile.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
+    if (dstFd <= 0) {
+        ERR_MSG("Unable to open destination file %s", config.dstFile.c_str());
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    status = transcoder->configureDestination(dstFd);
+    close(dstFd);
+    if (status != AMEDIA_OK) {
+        ERR_MSG("configureDestination returned error %d", status);
+        return status;
+    }
+
+    status = transcoder->start();
+    if (status != AMEDIA_OK) {
+        ERR_MSG("start returned error %d", status);
+        return status;
+    }
+
+    return callbacks->waitForTranscodingFinished();
+}
+
+// Options.
+static const struct option kLongOpts[] = {{"help", no_argument, nullptr, 'h'},
+                                          {"codec", required_argument, nullptr, 'c'},
+                                          {"bitrate", required_argument, nullptr, 'b'},
+                                          {0, 0, 0, 0}};
+static const char kShortOpts[] = "hc:b:";
+
+static void printUsageAndExit() {
+    const char* usage =
+            "  -h / --help    : Print this usage message and exit.\n"
+            "  -c / --codec   : Specify output video codec type using MediaFormat codec mime "
+            "type.\n"
+            "                     Defaults to \"video/avc\".\n"
+            "  -b / --bitrate : Specify output video bitrate in bits per second.\n"
+            "                     Defaults to estimating and preserving the original bitrate.\n"
+            "";
+
+    printf("Usage: %s [-h] [-c CODEC] <srcfile> <dstfile>\n%s", getprogname(), usage);
+    exit(-1);
+}
+
+int main(int argc, char** argv) {
+    int c;
+    TranscodeConfig config;
+
+    while ((c = getopt_long(argc, argv, kShortOpts, kLongOpts, nullptr)) >= 0) {
+        switch (c) {
+        case 'c':
+            config.dstCodec.assign(optarg);
+            break;
+
+        case 'b':
+            config.bitrate = atoi(optarg);
+            if (config.bitrate <= 0) {
+                ERR_MSG("Bitrate must an integer larger than zero.");
+                printUsageAndExit();
+            }
+            break;
+
+        case '?':
+            FALLTHROUGH_INTENDED;
+        case 'h':
+            FALLTHROUGH_INTENDED;
+        default:
+            printUsageAndExit();
+            break;
+        }
+    }
+
+    if (optind > (argc - 2)) {
+        ERR_MSG("Source and destination file not specified");
+        printUsageAndExit();
+    }
+    config.srcFile.assign(argv[optind++]);
+    config.dstFile.assign(argv[optind]);
+
+    return transcode(config);
+}
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 1054b68..ca98b28 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -46,13 +46,11 @@
         status_t result;
         result = mStream->getBufferSize(&mStreamBufferSizeBytes);
         if (result != OK) return result;
-        audio_format_t streamFormat;
-        uint32_t sampleRate;
-        audio_channel_mask_t channelMask;
-        result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        result = mStream->getAudioProperties(&config);
         if (result != OK) return result;
-        mFormat = Format_from_SR_C(sampleRate,
-                audio_channel_count_from_in_mask(channelMask), streamFormat);
+        mFormat = Format_from_SR_C(config.sample_rate,
+                audio_channel_count_from_in_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
     }
     return NBAIO_Source::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 8564899..581867f 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -44,13 +44,11 @@
         status_t result;
         result = mStream->getBufferSize(&mStreamBufferSizeBytes);
         if (result != OK) return result;
-        audio_format_t streamFormat;
-        uint32_t sampleRate;
-        audio_channel_mask_t channelMask;
-        result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        result = mStream->getAudioProperties(&config);
         if (result != OK) return result;
-        mFormat = Format_from_SR_C(sampleRate,
-                audio_channel_count_from_out_mask(channelMask), streamFormat);
+        mFormat = Format_from_SR_C(config.sample_rate,
+                audio_channel_count_from_out_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
     }
     return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libnblog/ReportPerformance.cpp b/media/libnblog/ReportPerformance.cpp
index aa678ba..4c78b01 100644
--- a/media/libnblog/ReportPerformance.cpp
+++ b/media/libnblog/ReportPerformance.cpp
@@ -92,8 +92,8 @@
         (*dataJson)["threadNum"] = item.first;
         root.append(*dataJson);
     }
-    Json::StyledWriter writer;
-    std::string rootStr = writer.write(root);
+    Json::StreamWriterBuilder factory;
+    std::string rootStr = Json::writeString(factory, root);
     write(fd, rootStr.c_str(), rootStr.size());
 }
 
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index c715918..6e48078 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 aidl_interface {
     name: "shared-file-region-aidl",
     unstable: true,
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 8f1da0d..1aa1848 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -74,6 +74,12 @@
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
 };
 
+namespace {
+
+constexpr char TUNNEL_PEEK_KEY[] = "android._trigger-tunnel-peek";
+
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -1465,6 +1471,10 @@
     mCallback->onOutputFramesRendered(done);
 }
 
+void ACodec::onFirstTunnelFrameReady() {
+    mCallback->onFirstTunnelFrameReady();
+}
+
 ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
     ANativeWindowBuffer *buf;
     CHECK(mNativeWindow.get() != NULL);
@@ -2460,6 +2470,30 @@
     return err;
 }
 
+status_t ACodec::setTunnelPeek(int32_t tunnelPeek) {
+    if (mIsEncoder) {
+        ALOGE("encoder does not support %s", TUNNEL_PEEK_KEY);
+        return BAD_VALUE;
+    }
+    if (!mTunneled) {
+        ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_KEY);
+        return BAD_VALUE;
+    }
+
+    OMX_CONFIG_BOOLEANTYPE config;
+    InitOMXParams(&config);
+    config.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
+    status_t err = mOMXNode->setConfig(
+            (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeek,
+            &config, sizeof(config));
+    if (err != OK) {
+        ALOGE("decoder cannot set %s to %d (err %d)",
+              TUNNEL_PEEK_KEY, tunnelPeek, err);
+    }
+
+    return err;
+}
+
 status_t ACodec::setAudioPresentation(int32_t presentationId, int32_t programId) {
     OMX_AUDIO_CONFIG_ANDROID_AUDIOPRESENTATION config;
     InitOMXParams(&config);
@@ -5683,15 +5717,18 @@
     int32_t range, standard, transfer;
     convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
 
+    int32_t dsRange, dsStandard, dsTransfer;
+    getColorConfigFromDataSpace(dataSpace, &dsRange, &dsStandard, &dsTransfer);
+
     // if some aspects are unspecified, use dataspace fields
     if (range == 0) {
-        range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+        range = dsRange;
     }
     if (standard == 0) {
-        standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+        standard = dsStandard;
     }
     if (transfer == 0) {
-        transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+        transfer = dsTransfer;
     }
 
     mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
@@ -6860,6 +6897,7 @@
     ALOGV("onAllocateComponent");
 
     CHECK(mCodec->mOMXNode == NULL);
+    mCodec->mFatalError = false;
 
     sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
     notify->setInt32("generation", mCodec->mNodeGeneration + 1);
@@ -7889,6 +7927,15 @@
                 &presentation, sizeof(presentation));
         }
     }
+
+    int32_t tunnelPeek = 0;
+    if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
+        status_t err = setTunnelPeek(tunnelPeek);
+        if (err != OK) {
+            return err;
+        }
+    }
+
     return setVendorParameters(params);
 }
 
@@ -8354,6 +8401,12 @@
             return true;
         }
 
+        case OMX_EventOnFirstTunnelFrameReady:
+        {
+            mCodec->onFirstTunnelFrameReady();
+            return true;
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 489a4bd..d6e36b9 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -274,6 +274,7 @@
         "MPEG2TSWriter.cpp",
         "MPEG4Writer.cpp",
         "MediaAdapter.cpp",
+        "MediaAppender.cpp",
         "MediaClock.cpp",
         "MediaCodec.cpp",
         "MediaCodecList.cpp",
@@ -337,6 +338,8 @@
         "android.hardware.cas.native@1.0",
         "android.hardware.drm@1.0",
         "android.hardware.media.omx@1.0",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     static_libs: [
@@ -349,6 +352,9 @@
         "libogg",
         "libwebm",
         "libstagefright_id3",
+        "media_permission-aidl-cpp",
+        "libmediandk_format",
+        "libmedia_ndkformatpriv",
     ],
 
     header_libs:[
@@ -356,6 +362,7 @@
         "libnativeloader-headers",
         "libstagefright_xmlparser_headers",
         "media_ndk_headers",
+        "libmediaformatshaper_headers",
     ],
 
     export_shared_lib_headers: [
@@ -363,6 +370,7 @@
         "libhidlmemory",
         "libmedia",
         "android.hidl.allocator@1.0",
+        "media_permission-aidl-cpp",
     ],
 
     export_include_dirs: [
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 4bc861e..89fe56f 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -21,6 +21,8 @@
 #define LOG_TAG "AudioSource"
 #include <utils/Log.h>
 
+#include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
 #include <media/stagefright/AudioSource.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -32,6 +34,8 @@
 
 namespace android {
 
+using android::media::permission::Identity;
+
 static void AudioRecordCallbackFunction(int event, void *user, void *info) {
     AudioSource *source = (AudioSource *) user;
     switch (event) {
@@ -50,74 +54,100 @@
 }
 
 AudioSource::AudioSource(
+    const audio_attributes_t *attr, const Identity& identity,
+    uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
+    audio_port_handle_t selectedDeviceId,
+    audio_microphone_direction_t selectedMicDirection,
+    float selectedMicFieldDimension)
+{
+  set(attr, identity, sampleRate, channelCount, outSampleRate, selectedDeviceId,
+      selectedMicDirection, selectedMicFieldDimension);
+}
+
+AudioSource::AudioSource(
         const audio_attributes_t *attr, const String16 &opPackageName,
         uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
         uid_t uid, pid_t pid, audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
         float selectedMicFieldDimension)
-    : mStarted(false),
-      mSampleRate(sampleRate),
-      mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),
-      mTrackMaxAmplitude(false),
-      mStartTimeUs(0),
-      mStopSystemTimeUs(-1),
-      mLastFrameTimestampUs(0),
-      mMaxAmplitude(0),
-      mPrevSampleTimeUs(0),
-      mInitialReadTimeUs(0),
-      mNumFramesReceived(0),
-      mNumFramesSkipped(0),
-      mNumFramesLost(0),
-      mNumClientOwnedBuffers(0),
-      mNoMoreFramesToRead(false) {
-    ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
-            sampleRate, outSampleRate, channelCount);
-    CHECK(channelCount == 1 || channelCount == 2);
-    CHECK(sampleRate > 0);
+{
+  // TODO b/182392769: use identity util
+  Identity identity;
+  identity.packageName = VALUE_OR_FATAL(legacy2aidl_String16_string(opPackageName));
+  identity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
+  identity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
+  set(attr, identity, sampleRate, channelCount, outSampleRate, selectedDeviceId,
+      selectedMicDirection, selectedMicFieldDimension);
+}
 
-    size_t minFrameCount;
-    status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
-                                           sampleRate,
-                                           AUDIO_FORMAT_PCM_16_BIT,
-                                           audio_channel_in_mask_from_count(channelCount));
-    if (status == OK) {
-        // make sure that the AudioRecord callback never returns more than the maximum
-        // buffer size
-        uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
+void AudioSource::set(
+   const audio_attributes_t *attr, const Identity& identity,
+        uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
+        audio_port_handle_t selectedDeviceId,
+        audio_microphone_direction_t selectedMicDirection,
+        float selectedMicFieldDimension)
+{
+   mStarted = false;
+   mSampleRate = sampleRate;
+   mOutSampleRate = outSampleRate > 0 ? outSampleRate : sampleRate;
+   mTrackMaxAmplitude = false;
+   mStartTimeUs = 0;
+   mStopSystemTimeUs = -1;
+   mLastFrameTimestampUs = 0;
+   mMaxAmplitude = 0;
+   mPrevSampleTimeUs = 0;
+   mInitialReadTimeUs = 0;
+   mNumFramesReceived = 0;
+   mNumFramesSkipped = 0;
+   mNumFramesLost = 0;
+   mNumClientOwnedBuffers = 0;
+   mNoMoreFramesToRead = false;
+  ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
+        sampleRate, outSampleRate, channelCount);
+  CHECK(channelCount == 1 || channelCount == 2);
+  CHECK(sampleRate > 0);
 
-        // make sure that the AudioRecord total buffer size is large enough
-        size_t bufCount = 2;
-        while ((bufCount * frameCount) < minFrameCount) {
-            bufCount++;
-        }
+  size_t minFrameCount;
+  status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
+                                                  sampleRate,
+                                                  AUDIO_FORMAT_PCM_16_BIT,
+                                                  audio_channel_in_mask_from_count(channelCount));
+  if (status == OK) {
+    // make sure that the AudioRecord callback never returns more than the maximum
+    // buffer size
+    uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
 
-        mRecord = new AudioRecord(
-                    AUDIO_SOURCE_DEFAULT, sampleRate, AUDIO_FORMAT_PCM_16_BIT,
-                    audio_channel_in_mask_from_count(channelCount),
-                    opPackageName,
-                    (size_t) (bufCount * frameCount),
-                    AudioRecordCallbackFunction,
-                    this,
-                    frameCount /*notificationFrames*/,
-                    AUDIO_SESSION_ALLOCATE,
-                    AudioRecord::TRANSFER_DEFAULT,
-                    AUDIO_INPUT_FLAG_NONE,
-                    uid,
-                    pid,
-                    attr,
-                    selectedDeviceId,
-                    selectedMicDirection,
-                    selectedMicFieldDimension);
-        // Set caller name so it can be logged in destructor.
-        // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
-        mRecord->setCallerName("media");
-        mInitCheck = mRecord->initCheck();
-        if (mInitCheck != OK) {
-            mRecord.clear();
-        }
-    } else {
-        mInitCheck = status;
+    // make sure that the AudioRecord total buffer size is large enough
+    size_t bufCount = 2;
+    while ((bufCount * frameCount) < minFrameCount) {
+      bufCount++;
     }
+
+    mRecord = new AudioRecord(
+        AUDIO_SOURCE_DEFAULT, sampleRate, AUDIO_FORMAT_PCM_16_BIT,
+        audio_channel_in_mask_from_count(channelCount),
+        identity,
+        (size_t) (bufCount * frameCount),
+        AudioRecordCallbackFunction,
+        this,
+        frameCount /*notificationFrames*/,
+        AUDIO_SESSION_ALLOCATE,
+        AudioRecord::TRANSFER_DEFAULT,
+        AUDIO_INPUT_FLAG_NONE,
+        attr,
+        selectedDeviceId,
+        selectedMicDirection,
+        selectedMicFieldDimension);
+    // Set caller name so it can be logged in destructor.
+    // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
+    mRecord->setCallerName("media");
+    mInitCheck = mRecord->initCheck();
+    if (mInitCheck != OK) {
+      mRecord.clear();
+    }
+  } else {
+    mInitCheck = status;
+  }
 }
 
 AudioSource::~AudioSource() {
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index bcf418a..b1aa7a9 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -658,7 +658,7 @@
     mStartTimeUs = 0;
     mNumInputBuffers = 0;
     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
+    mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
 
     if (meta) {
         int64_t startTimeUs;
@@ -678,6 +678,7 @@
         }
         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
+            mBufferDataSpace = mEncoderDataSpace;
         }
     }
 
@@ -908,6 +909,11 @@
         (*buffer)->setObserver(this);
         (*buffer)->add_ref();
         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
+        if (mBufferDataSpace != mEncoderDataSpace) {
+            ALOGD("Data space updated to %x", mBufferDataSpace);
+            (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
+            mEncoderDataSpace = mBufferDataSpace;
+        }
     }
     return OK;
 }
@@ -1039,6 +1045,7 @@
     // Find a available memory slot to store the buffer as VideoNativeMetadata.
     sp<IMemory> data = *mMemoryBases.begin();
     mMemoryBases.erase(mMemoryBases.begin());
+    mBufferDataSpace = buffer.mDataSpace;
 
     ssize_t offset;
     size_t size;
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index d11408d..0fd4ef2 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -67,6 +67,12 @@
     if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
             && trackMeta->findInt32(kKeySARHeight, &sarHeight)
             && sarHeight != 0) {
+        int32_t multVal;
+        if (width < 0 || sarWidth < 0 ||
+            __builtin_mul_overflow(width, sarWidth, &multVal)) {
+            ALOGE("displayWidth overflow %dx%d", width, sarWidth);
+            return NULL;
+        }
         displayWidth = (width * sarWidth) / sarHeight;
         displayHeight = height;
     } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
@@ -87,6 +93,16 @@
         rotationAngle = 0;
     }
 
+    if (!metaOnly) {
+        int32_t multVal;
+        if (width < 0 || height < 0 || dstBpp < 0 ||
+            __builtin_mul_overflow(dstBpp, width, &multVal) ||
+            __builtin_mul_overflow(multVal, height, &multVal)) {
+            ALOGE("Frame size overflow %dx%d bpp %d", width, height, dstBpp);
+            return NULL;
+        }
+    }
+
     VideoFrame frame(width, height, displayWidth, displayHeight,
             tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
 
@@ -97,7 +113,7 @@
         return NULL;
     }
     sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
-    if (frameMem == NULL) {
+    if (frameMem == NULL || frameMem->unsecurePointer() == NULL) {
         ALOGE("not enough memory for VideoFrame size=%zu", size);
         return NULL;
     }
@@ -628,6 +644,10 @@
                 0,
                 dstBpp(),
                 mCaptureLayer != nullptr /*allocRotated*/);
+        if (frameMem == nullptr) {
+            return NO_MEMORY;
+        }
+
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
 
         setFrame(frameMem);
@@ -870,6 +890,11 @@
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
                 trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
+
+        if (frameMem == nullptr) {
+            return NO_MEMORY;
+        }
+
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
 
         setFrame(frameMem);
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 447d599..5c39239 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -519,12 +519,12 @@
     mSendNotify = false;
     mWriteSeekErr = false;
     mFallocateErr = false;
-
     // Reset following variables for all the sessions and they will be
     // initialized in start(MetaData *param).
     mIsRealTimeRecording = true;
     mUse4ByteNalLength = true;
     mOffset = 0;
+    mMaxOffsetAppend = 0;
     mPreAllocateFileEndOffset = 0;
     mMdatOffset = 0;
     mMdatEndOffset = 0;
@@ -992,6 +992,19 @@
         seekOrPostError(mFd, mFreeBoxOffset, SEEK_SET);
         writeInt32(mInMemoryCacheSize);
         write("free", 4);
+        if (mInMemoryCacheSize >= 8) {
+            off64_t bufSize = mInMemoryCacheSize - 8;
+            char* zeroBuffer = new (std::nothrow) char[bufSize];
+            if (zeroBuffer) {
+                std::fill_n(zeroBuffer, bufSize, '0');
+                writeOrPostError(mFd, zeroBuffer, bufSize);
+                delete [] zeroBuffer;
+            } else {
+                ALOGW("freebox in file isn't initialized to 0");
+            }
+        } else {
+            ALOGW("freebox size is less than 8:%" PRId64, mInMemoryCacheSize);
+        }
         mMdatOffset = mFreeBoxOffset + mInMemoryCacheSize;
     } else {
         mMdatOffset = mOffset;
@@ -1541,6 +1554,26 @@
         MediaBuffer *buffer, bool usePrefix,
         uint32_t tiffHdrOffset, size_t *bytesWritten) {
     off64_t old_offset = mOffset;
+    int64_t offset;
+    ALOGV("buffer->range_length:%lld", (long long)buffer->range_length());
+    if (buffer->meta_data().findInt64(kKeySampleFileOffset, &offset)) {
+        ALOGV("offset:%lld, old_offset:%lld", (long long)offset, (long long)old_offset);
+        if (old_offset == offset) {
+            mOffset += buffer->range_length();
+        } else {
+            ALOGV("offset and old_offset are not equal! diff:%lld", (long long)offset - old_offset);
+            mOffset = offset + buffer->range_length();
+            // mOffset += buffer->range_length() + offset - old_offset;
+        }
+        *bytesWritten = buffer->range_length();
+        ALOGV("mOffset:%lld, mMaxOffsetAppend:%lld, bytesWritten:%lld", (long long)mOffset,
+                  (long long)mMaxOffsetAppend, (long long)*bytesWritten);
+        mMaxOffsetAppend = std::max(mOffset, mMaxOffsetAppend);
+        seekOrPostError(mFd, mMaxOffsetAppend, SEEK_SET);
+        return offset;
+    }
+
+    ALOGV("mOffset:%lld, mMaxOffsetAppend:%lld", (long long)mOffset, (long long)mMaxOffsetAppend);
 
     if (usePrefix) {
         addMultipleLengthPrefixedSamples_l(buffer);
@@ -1557,6 +1590,10 @@
         mOffset += buffer->range_length();
     }
     *bytesWritten = mOffset - old_offset;
+
+    ALOGV("mOffset:%lld, old_offset:%lld, bytesWritten:%lld", (long long)mOffset,
+          (long long)old_offset, (long long)*bytesWritten);
+
     return old_offset;
 }
 
@@ -1569,6 +1606,7 @@
         (const uint8_t *)buffer->data() + buffer->range_offset();
 
     if (!memcmp(ptr, "\x00\x00\x00\x01", 4)) {
+        ALOGV("stripping start code");
         buffer->set_range(
                 buffer->range_offset() + 4, buffer->range_length() - 4);
     }
@@ -1599,8 +1637,10 @@
 }
 
 void MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
+    ALOGV("alp:buffer->range_length:%lld", (long long)buffer->range_length());
     size_t length = buffer->range_length();
     if (mUse4ByteNalLength) {
+        ALOGV("mUse4ByteNalLength");
         uint8_t x[4];
         x[0] = length >> 24;
         x[1] = (length >> 16) & 0xff;
@@ -1610,6 +1650,7 @@
         writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 4;
     } else {
+        ALOGV("mUse2ByteNalLength");
         CHECK_LT(length, 65536u);
 
         uint8_t x[2];
@@ -2762,6 +2803,9 @@
     }
 
     writeAllChunks();
+    ALOGV("threadFunc mOffset:%lld, mMaxOffsetAppend:%lld", (long long)mOffset,
+          (long long)mMaxOffsetAppend);
+    mOffset = std::max(mOffset, mMaxOffsetAppend);
 }
 
 status_t MPEG4Writer::startWriterThread() {
@@ -3323,6 +3367,7 @@
     uint32_t lastSamplesPerChunk = 0;
     int64_t lastSampleDurationUs = -1;      // Duration calculated from EOS buffer and its timestamp
     int64_t lastSampleDurationTicks = -1;   // Timescale based ticks
+    int64_t sampleFileOffset = -1;
 
     if (mIsAudio) {
         prctl(PR_SET_NAME, (unsigned long)"MP4WtrAudTrkThread", 0, 0, 0);
@@ -3342,6 +3387,7 @@
     MediaBufferBase *buffer;
     const char *trackName = getTrackType();
     while (!mDone && (err = mSource->read(&buffer)) == OK) {
+        ALOGV("read:buffer->range_length:%lld", (long long)buffer->range_length());
         int32_t isEOS = false;
         if (buffer->range_length() == 0) {
             if (buffer->meta_data().findInt32(kKeyIsEndOfStream, &isEOS) && isEOS) {
@@ -3448,6 +3494,14 @@
                 continue;
             }
         }
+        if (!buffer->meta_data().findInt64(kKeySampleFileOffset, &sampleFileOffset)) {
+            sampleFileOffset = -1;
+        }
+        int64_t lastSample = -1;
+        if (!buffer->meta_data().findInt64(kKeyLastSampleIndexInChunk, &lastSample)) {
+            lastSample = -1;
+        }
+        ALOGV("sampleFileOffset:%lld", (long long)sampleFileOffset);
 
         /*
          * Reserve space in the file for the current sample + to be written MOOV box. If reservation
@@ -3455,7 +3509,7 @@
          * write MOOV box successfully as space for the same was reserved in the prior call.
          * Release the current buffer/sample here.
          */
-        if (!mOwner->preAllocate(buffer->range_length())) {
+        if (sampleFileOffset == -1 && !mOwner->preAllocate(buffer->range_length())) {
             buffer->release();
             buffer = nullptr;
             break;
@@ -3466,9 +3520,14 @@
         // Make a deep copy of the MediaBuffer and Metadata and release
         // the original as soon as we can
         MediaBuffer *copy = new MediaBuffer(buffer->range_length());
-        memcpy(copy->data(), (uint8_t *)buffer->data() + buffer->range_offset(),
-                buffer->range_length());
+        if (sampleFileOffset != -1) {
+            copy->meta_data().setInt64(kKeySampleFileOffset, sampleFileOffset);
+        } else {
+            memcpy(copy->data(), (uint8_t*)buffer->data() + buffer->range_offset(),
+                   buffer->range_length());
+        }
         copy->set_range(0, buffer->range_length());
+
         meta_data = new MetaData(buffer->meta_data());
         buffer->release();
         buffer = NULL;
@@ -3476,14 +3535,16 @@
             copy->meta_data().setInt32(kKeyExifTiffOffset, tiffHdrOffset);
         }
         bool usePrefix = this->usePrefix() && !isExif;
-
-        if (usePrefix) StripStartcode(copy);
-
+        if (sampleFileOffset == -1 && usePrefix) {
+            StripStartcode(copy);
+        }
         size_t sampleSize = copy->range_length();
-        if (usePrefix) {
+        if (sampleFileOffset == -1 && usePrefix) {
             if (mOwner->useNalLengthFour()) {
+                ALOGV("nallength4");
                 sampleSize += 4;
             } else {
+                ALOGV("nallength2");
                 sampleSize += 2;
             }
         }
@@ -3778,7 +3839,8 @@
                 chunkTimestampUs = timestampUs;
             } else {
                 int64_t chunkDurationUs = timestampUs - chunkTimestampUs;
-                if (chunkDurationUs > interleaveDurationUs) {
+                if (chunkDurationUs > interleaveDurationUs || lastSample > 1) {
+                    ALOGV("lastSample:%lld", (long long)lastSample);
                     if (chunkDurationUs > mMaxChunkDurationUs) {
                         mMaxChunkDurationUs = chunkDurationUs;
                     }
@@ -4260,13 +4322,20 @@
 void MPEG4Writer::Track::writeColrBox() {
     ColorAspects aspects;
     memset(&aspects, 0, sizeof(aspects));
+    // Color metadata may have changed.
+    sp<MetaData> meta = mSource->getFormat();
     // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (mMeta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | mMeta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | mMeta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | mMeta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
+    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
+            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
+            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
+            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
         int32_t primaries, transfer, coeffs;
         bool fullRange;
+        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+                asString(aspects.mPrimaries),
+                asString(aspects.mTransfer),
+                asString(aspects.mMatrixCoeffs),
+                asString(aspects.mRange));
         ColorUtils::convertCodecColorAspectsToIsoAspects(
                 aspects, &primaries, &transfer, &coeffs, &fullRange);
         mOwner->beginBox("colr");
@@ -4276,6 +4345,8 @@
         mOwner->writeInt16(coeffs);
         mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
         mOwner->endBox(); // colr
+    } else {
+        ALOGV("no color information");
     }
 }
 
@@ -5322,4 +5393,4 @@
     endBox();
 }
 
-}  // namespace android
+}  // namespace android
\ No newline at end of file
diff --git a/media/libstagefright/MediaAppender.cpp b/media/libstagefright/MediaAppender.cpp
new file mode 100644
index 0000000..5d80b30
--- /dev/null
+++ b/media/libstagefright/MediaAppender.cpp
@@ -0,0 +1,425 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaAppender"
+
+#include <media/stagefright/MediaAppender.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <utils/Log.h>
+// TODO : check if this works for NDK apps without JVM
+// #include <media/ndk/NdkJavaVMHelperPriv.h>
+
+namespace android {
+
+struct MediaAppender::sampleDataInfo {
+    size_t size;
+    int64_t time;
+    size_t exTrackIndex;
+    sp<MetaData> meta;
+};
+
+sp<MediaAppender> MediaAppender::create(int fd, AppendMode mode) {
+    if (fd < 0) {
+        ALOGE("invalid file descriptor");
+        return nullptr;
+    }
+    if (!(mode >= APPEND_MODE_FIRST && mode <= APPEND_MODE_LAST)) {
+        ALOGE("invalid mode %d", mode);
+        return nullptr;
+    }
+    sp<MediaAppender> ma = new (std::nothrow) MediaAppender(fd, mode);
+    if (ma->init() != OK) {
+        return nullptr;
+    }
+    return ma;
+}
+
+// TODO: inject mediamuxer and mediaextractor objects.
+// TODO: @format is not required as an input if we can sniff the file and find the format of
+//       the existing content.
+// TODO: Code it to the interface(MediaAppender), and have a separate MediaAppender NDK
+MediaAppender::MediaAppender(int fd, AppendMode mode)
+    : mFd(fd),
+      mMode(mode),
+      // TODO : check if this works for NDK apps without JVM
+      // mExtractor(new NuMediaExtractor(NdkJavaVMHelper::getJNIEnv() != nullptr
+      //           ? NuMediaExtractor::EntryPoint::NDK_WITH_JVM
+      //           : NuMediaExtractor::EntryPoint::NDK_NO_JVM)),
+      mExtractor(new (std::nothrow) NuMediaExtractor(NuMediaExtractor::EntryPoint::NDK_WITH_JVM)),
+      mTrackCount(0),
+      mState(UNINITIALIZED) {
+          ALOGV("MediaAppender::MediaAppender mode:%d", mode);
+      }
+
+status_t MediaAppender::init() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::init");
+    status_t status = mExtractor->setDataSource(mFd, 0, lseek(mFd, 0, SEEK_END));
+    if (status != OK) {
+        ALOGE("extractor_setDataSource failed, status :%d", status);
+        return status;
+    }
+
+    if (strcmp("MPEG4Extractor", mExtractor->getName()) == 0) {
+        mFormat = MediaMuxer::OUTPUT_FORMAT_MPEG_4;
+    } else {
+        ALOGE("Unsupported format, extractor name:%s", mExtractor->getName());
+        return ERROR_UNSUPPORTED;
+    }
+
+    mTrackCount = mExtractor->countTracks();
+    ALOGV("mTrackCount:%zu", mTrackCount);
+    if (mTrackCount == 0) {
+        ALOGE("no tracks are present");
+        return ERROR_MALFORMED;
+    }
+    size_t exTrackIndex = 0;
+    ssize_t audioTrackIndex = -1, videoTrackIndex = -1;
+    bool audioSyncSampleTimeSet = false;
+
+    while (exTrackIndex < mTrackCount) {
+        sp<AMessage> fmt;
+        status = mExtractor->getTrackFormat(exTrackIndex, &fmt, 0);
+        if (status != OK) {
+            ALOGE("getTrackFormat failed for trackIndex:%zu, status:%d", exTrackIndex, status);
+            return status;
+        }
+        AString mime;
+        if (fmt->findString("mime", &mime)) {
+            if (!strncasecmp(mime.c_str(), "video/", 6)) {
+                ALOGV("VideoTrack");
+                if (videoTrackIndex != -1) {
+                    ALOGE("Not more than one video track is supported");
+                    return ERROR_UNSUPPORTED;
+                }
+                videoTrackIndex = exTrackIndex;
+            } else if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+                ALOGV("AudioTrack");
+                if (audioTrackIndex != -1) {
+                    ALOGE("Not more than one audio track is supported");
+                }
+                audioTrackIndex = exTrackIndex;
+            } else {
+                ALOGV("Neither Video nor Audio track");
+            }
+        }
+        mFmtIndexMap.emplace(exTrackIndex, fmt);
+        mSampleCountVect.emplace_back(0);
+        mMaxTimestampVect.emplace_back(0);
+        mLastSyncSampleTimeVect.emplace_back(0);
+        status = mExtractor->selectTrack(exTrackIndex);
+        if (status != OK) {
+            ALOGE("selectTrack failed for trackIndex:%zu, status:%d", exTrackIndex, status);
+            return status;
+        }
+        ++exTrackIndex;
+    }
+
+    ALOGV("AudioTrackIndex:%zu, VideoTrackIndex:%zu", audioTrackIndex, videoTrackIndex);
+
+    do {
+        sampleDataInfo tmpSDI;
+        // TODO: read info into members of the struct sampleDataInfo directly
+        size_t sampleSize;
+        status = mExtractor->getSampleSize(&sampleSize);
+        if (status != OK) {
+            ALOGE("getSampleSize failed, status:%d", status);
+            return status;
+        }
+        mSampleSizeVect.emplace_back(sampleSize);
+        tmpSDI.size = sampleSize;
+        int64_t sampleTime = 0;
+        status = mExtractor->getSampleTime(&sampleTime);
+        if (status != OK) {
+            ALOGE("getSampleTime failed, status:%d", status);
+            return status;
+        }
+        mSampleTimeVect.emplace_back(sampleTime);
+        tmpSDI.time = sampleTime;
+        status = mExtractor->getSampleTrackIndex(&exTrackIndex);
+        if (status != OK) {
+            ALOGE("getSampleTrackIndex failed, status:%d", status);
+            return status;
+        }
+        mSampleIndexVect.emplace_back(exTrackIndex);
+        tmpSDI.exTrackIndex = exTrackIndex;
+        ++mSampleCountVect[exTrackIndex];
+        mMaxTimestampVect[exTrackIndex] = std::max(mMaxTimestampVect[exTrackIndex], sampleTime);
+        sp<MetaData> sampleMeta;
+        status = mExtractor->getSampleMeta(&sampleMeta);
+        if (status != OK) {
+            ALOGE("getSampleMeta failed, status:%d", status);
+            return status;
+        }
+        mSampleMetaVect.emplace_back(sampleMeta);
+        int32_t val = 0;
+        if (sampleMeta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+            mLastSyncSampleTimeVect[exTrackIndex] = sampleTime;
+        }
+        tmpSDI.meta = sampleMeta;
+        mSDI.emplace_back(tmpSDI);
+    } while (mExtractor->advance() == OK);
+
+    mExtractor.clear();
+
+    std::sort(mSDI.begin(), mSDI.end(), [](sampleDataInfo& a, sampleDataInfo& b) {
+        int64_t aOffset, bOffset;
+        a.meta->findInt64(kKeySampleFileOffset, &aOffset);
+        b.meta->findInt64(kKeySampleFileOffset, &bOffset);
+        return aOffset < bOffset;
+    });
+    for (int64_t syncSampleTime : mLastSyncSampleTimeVect) {
+        ALOGV("before ignoring frames, mLastSyncSampleTimeVect:%lld", (long long)syncSampleTime);
+    }
+    ALOGV("mMode:%u", mMode);
+    if (mMode == APPEND_MODE_IGNORE_LAST_VIDEO_GOP && videoTrackIndex != -1 ) {
+        ALOGV("Video track is present");
+        bool lastVideoIframe = false;
+        size_t lastVideoIframeOffset = 0;
+        int64_t lastVideoSampleTime = -1;
+        for (auto rItr = mSDI.rbegin(); rItr != mSDI.rend(); ++rItr) {
+            if (rItr->exTrackIndex != videoTrackIndex) {
+                continue;
+            }
+            if (lastVideoSampleTime == -1) {
+                lastVideoSampleTime = rItr->time;
+            }
+            int64_t offset = 0;
+            if (!rItr->meta->findInt64(kKeySampleFileOffset, &offset) || offset == 0) {
+                ALOGE("Missing offset");
+                return ERROR_MALFORMED;
+            }
+            ALOGV("offset:%lld", (long long)offset);
+            int32_t val = 0;
+            if (rItr->meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+                ALOGV("sampleTime:%lld", (long long)rItr->time);
+                ALOGV("lastVideoSampleTime:%lld", (long long)lastVideoSampleTime);
+                if (lastVideoIframe == false && (lastVideoSampleTime - rItr->time) >
+                                1000000/* Track interleaving duration in MPEG4Writer*/) {
+                    ALOGV("lastVideoIframe got chosen");
+                    lastVideoIframe = true;
+                    mLastSyncSampleTimeVect[videoTrackIndex] = rItr->time;
+                    lastVideoIframeOffset = offset;
+                    ALOGV("lastVideoIframeOffset:%lld", (long long)offset);
+                    break;
+                }
+            }
+        }
+        if (lastVideoIframe == false) {
+            ALOGV("Need to rewrite all samples");
+            mLastSyncSampleTimeVect[videoTrackIndex] = 0;
+            lastVideoIframeOffset = 0;
+        }
+        unsigned int framesIgnoredCount = 0;
+        for (auto itr = mSDI.begin(); itr != mSDI.end();) {
+            int64_t offset = 0;
+            ALOGV("trackIndex:%zu, %" PRId64 "", itr->exTrackIndex, itr->time);
+            if (itr->meta->findInt64(kKeySampleFileOffset, &offset) &&
+                                        offset >= lastVideoIframeOffset) {
+                ALOGV("offset:%lld", (long long)offset);
+                if (!audioSyncSampleTimeSet && audioTrackIndex != -1 &&
+                                            audioTrackIndex == itr->exTrackIndex) {
+                    mLastSyncSampleTimeVect[audioTrackIndex] = itr->time;
+                    audioSyncSampleTimeSet = true;
+                }
+                itr = mSDI.erase(itr);
+                ++framesIgnoredCount;
+            } else {
+                ++itr;
+            }
+        }
+        ALOGV("framesIgnoredCount:%u", framesIgnoredCount);
+    }
+
+    if (mMode == APPEND_MODE_IGNORE_LAST_VIDEO_GOP && videoTrackIndex == -1 &&
+                            audioTrackIndex != -1) {
+        ALOGV("Only AudioTrack is present");
+        for (auto rItr = mSDI.rbegin(); rItr != mSDI.rend();  ++rItr) {
+            int32_t val = 0;
+            if (rItr->meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+                    mLastSyncSampleTimeVect[audioTrackIndex] = rItr->time;
+                    break;
+            }
+        }
+        unsigned int framesIgnoredCount = 0;
+        for (auto itr = mSDI.begin(); itr != mSDI.end();) {
+            if (itr->time >= mLastSyncSampleTimeVect[audioTrackIndex]) {
+                itr = mSDI.erase(itr);
+                ++framesIgnoredCount;
+            } else {
+                ++itr;
+            }
+        }
+        ALOGV("framesIgnoredCount :%u", framesIgnoredCount);
+    }
+
+    for (size_t i = 0; i < mLastSyncSampleTimeVect.size(); ++i) {
+        ALOGV("mLastSyncSampleTimeVect[%zu]:%lld", i, (long long)mLastSyncSampleTimeVect[i]);
+        mFmtIndexMap[i]->setInt64(
+                "sample-time-before-append" /*AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND*/,
+                mLastSyncSampleTimeVect[i]);
+    }
+    for (size_t i = 0; i < mMaxTimestampVect.size(); ++i) {
+        ALOGV("mMaxTimestamp[%zu]:%lld", i, (long long)mMaxTimestampVect[i]);
+    }
+    for (size_t i = 0; i < mSampleCountVect.size(); ++i) {
+        ALOGV("SampleCountVect[%zu]:%zu", i, mSampleCountVect[i]);
+    }
+    mState = INITIALIZED;
+    return OK;
+}
+
+MediaAppender::~MediaAppender() {
+    ALOGV("MediaAppender::~MediaAppender");
+    mMuxer.clear();
+    mExtractor.clear();
+}
+
+status_t MediaAppender::start() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::start");
+    if (mState != INITIALIZED) {
+        ALOGE("MediaAppender::start() is called in invalid state %d", mState);
+        return INVALID_OPERATION;
+    }
+    mMuxer = new (std::nothrow) MediaMuxer(mFd, mFormat);
+    for (const auto& n : mFmtIndexMap) {
+        ssize_t muxIndex = mMuxer->addTrack(n.second);
+        if (muxIndex < 0) {
+            ALOGE("addTrack failed");
+            return UNKNOWN_ERROR;
+        }
+        mTrackIndexMap.emplace(n.first, muxIndex);
+    }
+    ALOGV("trackIndexmap size:%zu", mTrackIndexMap.size());
+
+    status_t status = mMuxer->start();
+    if (status != OK) {
+        ALOGE("muxer start failed:%d", status);
+        return status;
+    }
+
+    ALOGV("Sorting samples based on their offsets");
+    for (int i = 0; i < mSDI.size(); ++i) {
+        ALOGV("i:%d", i + 1);
+        /* TODO : Allocate a single allocation of the max size, and reuse it across ABuffers if
+         * using new ABuffer(void *, size_t).
+         */
+        sp<ABuffer> data = new (std::nothrow) ABuffer(mSDI[i].size);
+        if (data == nullptr) {
+            ALOGE("memory allocation failed");
+            return NO_MEMORY;
+        }
+        data->setRange(0, mSDI[i].size);
+        int32_t val = 0;
+        int sampleFlags = 0;
+        if (mSDI[i].meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+            sampleFlags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+        }
+
+        int64_t val64;
+        if (mSDI[i].meta->findInt64(kKeySampleFileOffset, &val64)) {
+            ALOGV("SampleFileOffset Found :%zu:%lld:%lld", mSDI[i].exTrackIndex,
+                  (long long)mSampleCountVect[mSDI[i].exTrackIndex], (long long)val64);
+            sp<AMessage> bufMeta = data->meta();
+            bufMeta->setInt64("sample-file-offset" /*AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND*/,
+                              val64);
+        }
+        if (mSDI[i].meta->findInt64(kKeyLastSampleIndexInChunk, &val64)) {
+            ALOGV("kKeyLastSampleIndexInChunk Found %lld:%lld",
+                  (long long)mSampleCountVect[mSDI[i].exTrackIndex], (long long)val64);
+            sp<AMessage> bufMeta = data->meta();
+            bufMeta->setInt64(
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    val64);
+        }
+        status = mMuxer->writeSampleData(data, mTrackIndexMap[mSDI[i].exTrackIndex], mSDI[i].time,
+                                         sampleFlags);
+        if (status != OK) {
+            ALOGE("muxer writeSampleData failed:%d", status);
+            return status;
+        }
+    }
+    mState = STARTED;
+    return OK;
+}
+
+status_t MediaAppender::stop() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::stop");
+    if (mState == STARTED) {
+        status_t status = mMuxer->stop();
+        if (status != OK) {
+            mState = ERROR;
+        } else {
+            mState = STOPPED;
+        }
+        return status;
+    } else {
+        ALOGE("stop() is called in invalid state %d", mState);
+        return INVALID_OPERATION;
+    }
+}
+
+ssize_t MediaAppender::getTrackCount() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::getTrackCount");
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackCount() is called in invalid state %d", mState);
+        return -1;
+    }
+    return mTrackCount;
+}
+
+sp<AMessage> MediaAppender::getTrackFormat(size_t idx) {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::getTrackFormat");
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackFormat() is called in invalid state %d", mState);
+        return nullptr;
+    }
+    if (idx < 0 || idx >= mTrackCount) {
+        ALOGE("getTrackFormat() idx is out of range");
+        return nullptr;
+    }
+    return mFmtIndexMap[idx];
+}
+
+status_t MediaAppender::writeSampleData(const sp<ABuffer>& buffer, size_t trackIndex,
+                                        int64_t timeUs, uint32_t flags) {
+    std::scoped_lock lock(mMutex);
+    ALOGV("writeSampleData:trackIndex:%zu, time:%" PRId64 "", trackIndex, timeUs);
+    return mMuxer->writeSampleData(buffer, trackIndex, timeUs, flags);
+}
+
+status_t MediaAppender::setOrientationHint([[maybe_unused]] int degrees) {
+    ALOGE("setOrientationHint not supported. Has to be called prior to start on initial muxer");
+    return ERROR_UNSUPPORTED;
+};
+
+status_t MediaAppender::setLocation([[maybe_unused]] int latit, [[maybe_unused]] int longit) {
+    ALOGE("setLocation not supported. Has to be called prior to start on initial muxer");
+    return ERROR_UNSUPPORTED;
+}
+
+ssize_t MediaAppender::addTrack([[maybe_unused]] const sp<AMessage> &format) {
+    ALOGE("addTrack not supported");
+    return ERROR_UNSUPPORTED;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 358c5e3..50ebeef 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -20,9 +20,11 @@
 #include <utils/Log.h>
 
 #include <set>
+#include <stdlib.h>
 
 #include <inttypes.h>
 #include <stdlib.h>
+#include <dlfcn.h>
 
 #include <C2Buffer.h>
 
@@ -35,6 +37,7 @@
 #include <aidl/android/media/IResourceManagerService.h>
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
+#include <android/dlext.h>
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
 #include <cutils/properties.h>
@@ -47,6 +50,10 @@
 #include <media/MediaCodecInfo.h>
 #include <media/MediaMetricsItem.h>
 #include <media/MediaResource.h>
+#include <media/NdkMediaErrorPriv.h>
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/formatshaper/FormatShaper.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -68,6 +75,7 @@
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
 #include <media/stagefright/SurfaceUtils.h>
+#include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
 #include <utils/Singleton.h>
 
@@ -101,6 +109,7 @@
 static const char *kCodecLevel = "android.media.mediacodec.level";  /* 0..n */
 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode";  /* CQ/VBR/CBR */
 static const char *kCodecBitrate = "android.media.mediacodec.bitrate";  /* 0..n */
+static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate";  /* 0..n */
 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth";  /* 0..n */
 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight";  /* 0..n */
 static const char *kCodecError = "android.media.mediacodec.errcode";
@@ -118,6 +127,11 @@
 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on";  /* 0..n */
 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off";  /* 0..n */
 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame";  /* 0..n */
+static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
+static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
+static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
+static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
+static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
 
 // the kCodecRecent* fields appear only in getMetrics() results
 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max";      /* in us */
@@ -126,6 +140,8 @@
 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
 
+static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";    /* 0/1 */
+
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
 
@@ -358,11 +374,24 @@
         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
         mSurface = new Surface(mProducer, false /* controlledByApp */);
         struct ConsumerListener : public BnConsumerListener {
-            void onFrameAvailable(const BufferItem&) override {}
+            ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
+                mConsumer = consumer;
+            }
+            void onFrameAvailable(const BufferItem&) override {
+                BufferItem buffer;
+                // consume buffer
+                sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
+                if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
+                    consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
+                                            EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
+                }
+            }
+
+            wp<IGraphicBufferConsumer> mConsumer;
             void onBuffersReleased() override {}
             void onSidebandStreamChanged() override {}
         };
-        sp<ConsumerListener> listener{new ConsumerListener};
+        sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
         mConsumer->consumerConnect(listener, false);
         mConsumer->setConsumerName(String8{"MediaCodec.release"});
         mConsumer->setConsumerUsageBits(usage);
@@ -398,6 +427,7 @@
     kWhatSignaledInputEOS    = 'seos',
     kWhatOutputFramesRendered = 'outR',
     kWhatOutputBuffersChanged = 'outC',
+    kWhatFirstTunnelFrameReady = 'ftfR',
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -460,6 +490,7 @@
     virtual void onSignaledInputEOS(status_t err) override;
     virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
     virtual void onOutputBuffersChanged() override;
+    virtual void onFirstTunnelFrameReady() override;
 private:
     const sp<AMessage> mNotify;
 };
@@ -580,6 +611,12 @@
     notify->post();
 }
 
+void CodecCallback::onFirstTunnelFrameReady() {
+    sp<AMessage> notify(mNotify->dup());
+    notify->setInt32("what", kWhatFirstTunnelFrameReady);
+    notify->post();
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -678,10 +715,15 @@
       mTunneledInputWidth(0),
       mTunneledInputHeight(0),
       mTunneled(false),
+      mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
       mLatencyUnknown(0),
+      mBytesEncoded(0),
+      mEarliestEncodedPtsUs(INT64_MAX),
+      mLatestEncodedPtsUs(INT64_MIN),
+      mFramesEncoded(0),
       mNumLowLatencyEnables(0),
       mNumLowLatencyDisables(0),
       mIsLowLatencyModeOn(false),
@@ -789,6 +831,18 @@
         mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
     }
 
+    if (mBytesEncoded) {
+        Mutex::Autolock al(mOutputStatsLock);
+
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
+        int64_t duration = 0;
+        if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
+            duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
+        }
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
+    }
+
     {
         Mutex::Autolock al(mLatencyLock);
         mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
@@ -867,6 +921,47 @@
     }
 }
 
+constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
+    switch(state) {
+        case TunnelPeekState::kEnabledNoBuffer:
+            return "EnabledNoBuffer";
+        case TunnelPeekState::kDisabledNoBuffer:
+            return "DisabledNoBuffer";
+        case TunnelPeekState::kBufferDecoded:
+            return "BufferDecoded";
+        case TunnelPeekState::kBufferRendered:
+            return "BufferRendered";
+        default:
+            return default_string;
+    }
+}
+
+void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
+    int32_t tunnelPeek = 0;
+    if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
+        return;
+    }
+    if(tunnelPeek == 0){
+        if (mTunnelPeekState == TunnelPeekState::kEnabledNoBuffer) {
+            mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(TunnelPeekState::kEnabledNoBuffer),
+                  asString(TunnelPeekState::kDisabledNoBuffer));
+            return;
+        }
+    } else {
+        if (mTunnelPeekState == TunnelPeekState::kDisabledNoBuffer) {
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(TunnelPeekState::kDisabledNoBuffer),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
+            return;
+        }
+    }
+
+    ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
+}
+
 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
 {
     if (nbuckets <= 0 || width <= 0) {
@@ -992,10 +1087,34 @@
 }
 
 // when we get a buffer back from the codec
-void MediaCodec::statsBufferReceived(int64_t presentationUs) {
+void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
 
     CHECK_NE(mState, UNINITIALIZED);
 
+    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+        int32_t flags = 0;
+        (void) buffer->meta()->findInt32("flags", &flags);
+
+        // some of these frames, we don't want to count
+        // standalone EOS.... has an invalid timestamp
+        if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
+            mBytesEncoded += buffer->size();
+            mFramesEncoded++;
+
+            Mutex::Autolock al(mOutputStatsLock);
+            int64_t timeUs = 0;
+            if (buffer->meta()->findInt64("timeUs", &timeUs)) {
+                if (timeUs > mLatestEncodedPtsUs) {
+                    mLatestEncodedPtsUs = timeUs;
+                }
+                // can't chain as an else-if or this never triggers
+                if (timeUs < mEarliestEncodedPtsUs) {
+                    mEarliestEncodedPtsUs = timeUs;
+                }
+            }
+        }
+    }
+
     // mutex access to mBuffersInFlight and other stats
     Mutex::Autolock al(mLatencyLock);
 
@@ -1051,7 +1170,7 @@
         return;
     }
 
-    // nowNs start our calculations
+    // now start our calculations
     const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
     int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
 
@@ -1268,6 +1387,21 @@
     return msg->post();
 }
 
+status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
+    sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
+    msg->setMessage("first-tunnel-frame-ready", notify);
+    return msg->post();
+}
+
+/*
+ * MediaFormat Shaping forward declarations
+ * including the property name we use for control.
+ */
+static int enableMediaFormatShapingDefault = 1;
+static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
+static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
+                      bool reverse);
+
 status_t MediaCodec::configure(
         const sp<AMessage> &format,
         const sp<Surface> &nativeWindow,
@@ -1324,6 +1458,30 @@
             ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
             return BAD_VALUE;
         }
+
+    } else {
+        if (mMetricsHandle != 0) {
+            int32_t channelCount;
+            if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+            }
+            int32_t sampleRate;
+            if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+            }
+        }
+    }
+
+    if (flags & CONFIGURE_FLAG_ENCODE) {
+        int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
+                                                 enableMediaFormatShapingDefault);
+        if (!enableShaping) {
+            ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
+        } else {
+            (void) shapeMediaFormat(format, flags);
+            // XXX: do we want to do this regardless of shaping enablement?
+            mapFormat(mComponentName, format, nullptr, false);
+        }
     }
 
     updateLowLatency(format);
@@ -1348,6 +1506,8 @@
     // save msg for reset
     mConfigureMsg = msg;
 
+    sp<AMessage> callback = mCallback;
+
     status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
@@ -1372,7 +1532,18 @@
             // the configure failure is due to wrong state.
 
             ALOGE("configure failed with err 0x%08x, resetting...", err);
-            reset();
+            status_t err2 = reset();
+            if (err2 != OK) {
+                ALOGE("retrying configure: failed to reset codec (%08x)", err2);
+                break;
+            }
+            if (callback != nullptr) {
+                err2 = setCallback(callback);
+                if (err2 != OK) {
+                    ALOGE("retrying configure: failed to set callback (%08x)", err2);
+                    break;
+                }
+            }
         }
         if (!isResourceError(err)) {
             break;
@@ -1382,6 +1553,358 @@
     return err;
 }
 
+// Media Format Shaping support
+//
+
+static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
+
+static bool connectFormatShaper() {
+    static std::once_flag sCheckOnce;
+
+    ALOGV("connectFormatShaper...");
+
+    std::call_once(sCheckOnce, [&](){
+
+        void *libHandle = NULL;
+        nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
+
+        // prefer any copy in the mainline module
+        //
+        android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
+        AString libraryName = "libmediaformatshaper.so";
+
+        if (mediaNs != NULL) {
+            static const android_dlextinfo dlextinfo = {
+                .flags = ANDROID_DLEXT_USE_NAMESPACE,
+                .library_namespace = mediaNs,
+            };
+
+            AString libraryMainline = "/apex/com.android.media/";
+#if __LP64__
+            libraryMainline.append("lib64/");
+#else
+            libraryMainline.append("lib/");
+#endif
+            libraryMainline.append(libraryName);
+
+            libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
+                                                 &dlextinfo);
+
+            if (libHandle != NULL) {
+                sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
+                                dlsym(libHandle, "shaper_ops");
+            } else {
+                ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
+                      libraryMainline.c_str());
+            }
+        } else {
+            ALOGV("connectFormatShaper: couldn't find media namespace.");
+        }
+
+        // fall back to the system partition, if present.
+        //
+        if (sShaperOps == NULL) {
+
+            libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
+
+            if (libHandle != NULL) {
+                sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
+                                dlsym(libHandle, "shaper_ops");
+            } else {
+                ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
+            }
+        }
+
+        if (sShaperOps != nullptr
+            && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
+            ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
+                  sShaperOps->version);
+            sShaperOps = nullptr;
+        }
+
+        if (sShaperOps != nullptr) {
+            ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
+        }
+
+        nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
+        ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
+              (loading_finished - loading_started)/1000);
+
+    });
+
+    return true;
+}
+
+
+#if 0
+// a construct to force the above dlopen() to run very early.
+// goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
+// failure of this means that cold start of those apps is slower by the time to dlopen()
+// TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
+//
+static bool forceEarlyLoadingShaper = connectFormatShaper();
+#endif
+
+// parse the codec's properties: mapping, whether it meets min quality, etc
+// and pass them into the video quality code
+//
+static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
+                                  sp<MediaCodecInfo> codecInfo, AString mediaType) {
+
+    sp<MediaCodecInfo::Capabilities> capabilities =
+                    codecInfo->getCapabilitiesFor(mediaType.c_str());
+    if (capabilities == nullptr) {
+        ALOGI("no capabilities as part of the codec?");
+    } else {
+        const sp<AMessage> &details = capabilities->getDetails();
+        AString mapTarget;
+        int count = details->countEntries();
+        for(int ix = 0; ix < count; ix++) {
+            AMessage::Type entryType;
+            const char *mapSrc = details->getEntryNameAt(ix, &entryType);
+            // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
+            //
+            static const char *featurePrefix = "feature-";
+            static const int featurePrefixLen = strlen(featurePrefix);
+            static const char *tuningPrefix = "tuning-";
+            static const int tuningPrefixLen = strlen(tuningPrefix);
+            static const char *mappingPrefix = "mapping-";
+            static const int mappingPrefixLen = strlen(mappingPrefix);
+
+            if (mapSrc == NULL) {
+                continue;
+            } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
+                int32_t intValue;
+                if (details->findInt32(mapSrc, &intValue)) {
+                    ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
+                    (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
+                                                   intValue);
+                }
+                continue;
+            } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
+                AString value;
+                if (details->findString(mapSrc, &value)) {
+                    ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
+                    (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
+                                                   value.c_str());
+                }
+                continue;
+            } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
+                AString target;
+                if (details->findString(mapSrc, &target)) {
+                    ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
+                          target.c_str());
+                    // key is really "kind-key"
+                    // separate that, so setMap() sees the triple  kind, key, value
+                    const char *kind = &mapSrc[mappingPrefixLen];
+                    const char *sep = strchr(kind, '-');
+                    const char *key = sep+1;
+                    if (sep != NULL) {
+                         std::string xkind = std::string(kind, sep-kind);
+                        (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
+                                                   key, target.c_str());
+                    }
+                }
+            }
+        }
+    }
+}
+
+status_t MediaCodec::setupFormatShaper(AString mediaType) {
+    ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
+          mComponentName.c_str(), mediaType.c_str());
+
+    nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
+
+    // someone might have beaten us to it.
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle != nullptr) {
+        ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
+        return OK;
+    }
+
+    // we get to build & register one
+    shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr) {
+        ALOGW("unable to create a shaper for cocodec %s mediaType %s",
+              mComponentName.c_str(), mediaType.c_str());
+        return OK;
+    }
+
+    (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
+
+    shaperHandle = sShaperOps->registerShaper(shaperHandle,
+                                              mComponentName.c_str(), mediaType.c_str());
+
+    nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
+    ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
+          mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
+
+    return OK;
+}
+
+
+// Format Shaping
+//      Mapping and Manipulation of encoding parameters
+//
+
+status_t MediaCodec::shapeMediaFormat(
+            const sp<AMessage> &format,
+            uint32_t flags) {
+    ALOGV("shapeMediaFormat entry");
+
+    if (!(flags & CONFIGURE_FLAG_ENCODE)) {
+        ALOGW("shapeMediaFormat: not encoder");
+        return OK;
+    }
+    if (mCodecInfo == NULL) {
+        ALOGW("shapeMediaFormat: no codecinfo");
+        return OK;
+    }
+
+    AString mediaType;
+    if (!format->findString("mime", &mediaType)) {
+        ALOGW("shapeMediaFormat: no mediaType information");
+        return OK;
+    }
+
+    // make sure we have the function entry points for the shaper library
+    //
+
+    connectFormatShaper();
+    if (sShaperOps == nullptr) {
+        ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
+        return OK;
+    }
+
+    // find the shaper information for this codec+mediaType pair
+    //
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr)  {
+        setupFormatShaper(mediaType);
+        shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    }
+    if (shaperHandle == nullptr) {
+        ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
+              mComponentName.c_str(), mediaType.c_str());
+        return OK;
+    }
+
+    // run the shaper
+    //
+
+    ALOGV("Shaping input: %s", format->debugString(0).c_str());
+
+    sp<AMessage> updatedFormat = format->dup();
+    AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
+
+    int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
+    if (result == 0) {
+        AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
+
+        sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
+        size_t changeCount = deltas->countEntries();
+        ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
+        if (changeCount > 0) {
+            if (mMetricsHandle != 0) {
+                mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
+                // save some old properties before we fold in the new ones
+                int32_t bitrate;
+                if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
+                }
+            }
+            // NB: for any field in both format and deltas, the deltas copy wins
+            format->extend(deltas);
+        }
+    }
+
+    AMediaFormat_delete(updatedNdkFormat);
+    return OK;
+}
+
+static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
+                      bool reverse) {
+    AString mediaType;
+    if (!format->findString("mime", &mediaType)) {
+        ALOGW("mapFormat: no mediaType information");
+        return;
+    }
+    ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
+          mediaType.c_str(), kind ? kind : "<all>", reverse);
+
+    // make sure we have the function entry points for the shaper library
+    //
+
+#if 0
+    // let's play the faster "only do mapping if we've already loaded the library
+    connectFormatShaper();
+#endif
+    if (sShaperOps == nullptr) {
+        ALOGV("mapFormat: no MediaFormatShaper hooks available");
+        return;
+    }
+
+    // find the shaper information for this codec+mediaType pair
+    //
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr) {
+        ALOGV("mapFormat: no shaper handle");
+        return;
+    }
+
+    const char **mappings;
+    if (reverse)
+        mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
+    else
+        mappings = sShaperOps->getMappings(shaperHandle, kind);
+
+    if (mappings == nullptr) {
+        ALOGV("no mappings returned");
+        return;
+    }
+
+    ALOGV("Pre-mapping: %s",  format->debugString(2).c_str());
+    // do the mapping
+    //
+    int entries = format->countEntries();
+    for (int i = 0; ; i += 2) {
+        if (mappings[i] == nullptr) {
+            break;
+        }
+
+        size_t ix = format->findEntryByName(mappings[i]);
+        if (ix < entries) {
+            ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
+            status_t status = format->setEntryNameAt(ix, mappings[i+1]);
+            if (status != OK) {
+                ALOGW("Unable to map from '%s' to '%s': status %d",
+                      mappings[i], mappings[i+1], status);
+            }
+        }
+    }
+    ALOGV("Post-mapping: %s",  format->debugString(2).c_str());
+
+
+    // reclaim the mapping memory
+    for (int i = 0; ; i += 2) {
+        if (mappings[i] == nullptr) {
+            break;
+        }
+        free((void*)mappings[i]);
+        free((void*)mappings[i + 1]);
+    }
+    free(mappings);
+    mappings = nullptr;
+}
+
+//
+// end of Format Shaping hooks within MediaCodec
+//
+
 status_t MediaCodec::releaseCrypto()
 {
     ALOGV("releaseCrypto");
@@ -1481,6 +2004,8 @@
 status_t MediaCodec::start() {
     sp<AMessage> msg = new AMessage(kWhatStart, this);
 
+    sp<AMessage> callback;
+
     status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
@@ -1505,6 +2030,20 @@
                 ALOGE("retrying start: failed to configure codec");
                 break;
             }
+            if (callback != nullptr) {
+                err = setCallback(callback);
+                if (err != OK) {
+                    ALOGE("retrying start: failed to set callback");
+                    break;
+                }
+                ALOGD("succeed to set callback for reclaim");
+            }
+        }
+
+        // Keep callback message after the first iteration if necessary.
+        if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
+            callback = mCallback;
+            ALOGD("keep callback message for reclaim");
         }
 
         sp<AMessage> response;
@@ -1976,6 +2515,22 @@
     return OK;
 }
 
+status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
+    return mCodec->querySupportedParameters(names);
+}
+
+status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
+    return mCodec->describeParameter(name, desc);
+}
+
+status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->subscribeToParameters(names);
+}
+
+status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->unsubscribeFromParameters(names);
+}
+
 void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
     sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
     msg->setMessage("notify", notify);
@@ -2141,14 +2696,15 @@
         int64_t timeUs;
         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
-        statsBufferReceived(timeUs);
-
         response->setInt64("timeUs", timeUs);
 
         int32_t flags;
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
         response->setInt32("flags", flags);
+
+        statsBufferReceived(timeUs, buffer);
+
         response->postReply(replyID);
     }
 
@@ -2239,6 +2795,8 @@
                                 }
                                 postPendingRepliesAndDeferredMessages(origin + ":dead");
                                 sendErrorResponse = false;
+                            } else if (!mReplyID) {
+                                sendErrorResponse = false;
                             }
                             break;
                         }
@@ -2557,9 +3115,17 @@
 
                 case kWhatOutputFramesRendered:
                 {
-                    // ignore these in all states except running, and check that we have a
-                    // notification set
-                    if (mState == STARTED && mOnFrameRenderedNotification != NULL) {
+                    // ignore these in all states except running
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    TunnelPeekState previousState = mTunnelPeekState;
+                    mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                    ALOGV("TunnelPeekState: %s -> %s",
+                          asString(previousState),
+                          asString(TunnelPeekState::kBufferRendered));
+                    // check that we have a notification set
+                    if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
                         notify->setMessage("data", msg);
                         notify->post();
@@ -2567,6 +3133,41 @@
                     break;
                 }
 
+                case kWhatFirstTunnelFrameReady:
+                {
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    switch(mTunnelPeekState) {
+                        case TunnelPeekState::kDisabledNoBuffer:
+                            mTunnelPeekState = TunnelPeekState::kBufferDecoded;
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(TunnelPeekState::kDisabledNoBuffer),
+                                  asString(TunnelPeekState::kBufferDecoded));
+                            break;
+                        case TunnelPeekState::kEnabledNoBuffer:
+                            mTunnelPeekState = TunnelPeekState::kBufferDecoded;
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(TunnelPeekState::kEnabledNoBuffer),
+                                  asString(TunnelPeekState::kBufferDecoded));
+                            {
+                                sp<AMessage> parameters = new AMessage();
+                                parameters->setInt32("android._trigger-tunnel-peek", 1);
+                                mCodec->signalSetParameters(parameters);
+                            }
+                            break;
+                        default:
+                            break;
+                    }
+
+                    if (mOnFirstTunnelFrameReadyNotification != nullptr) {
+                        sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
+                        notify->setMessage("data", msg);
+                        notify->post();
+                    }
+                    break;
+                }
+
                 case kWhatFillThisBuffer:
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -2785,6 +3386,9 @@
             if (msg->findMessage("on-frame-rendered", &notify)) {
                 mOnFrameRenderedNotification = notify;
             }
+            if (msg->findMessage("first-tunnel-frame-ready", &notify)) {
+                mOnFirstTunnelFrameReadyNotification = notify;
+            }
             break;
         }
 
@@ -3026,6 +3630,11 @@
             }
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
 
             mReplyID = replyID;
             setState(STARTING);
@@ -3129,16 +3738,21 @@
                 break;
             }
 
-            // If we're flushing, stopping, configuring or starting  but
+            // If we're flushing, configuring or starting  but
             // received a release request, post the reply for the pending call
             // first, and consider it done. The reply token will be replaced
             // after this, and we'll no longer be able to reply.
-            if (mState == FLUSHING || mState == STOPPING
-                    || mState == CONFIGURING || mState == STARTING) {
+            if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
                 // mReply is always set if in these states.
                 postPendingRepliesAndDeferredMessages(
                         std::string("kWhatRelease:") + stateString(mState));
             }
+            // If we're stopping but received a release request, post the reply
+            // for the pending call if necessary. Note that the reply may have been
+            // already posted due to an error.
+            if (mState == STOPPING && mReplyID) {
+                postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
+            }
 
             if (mFlags & kFlagSawMediaServerDie) {
                 // It's dead, Jim. Don't expect initiateShutdown to yield
@@ -3455,6 +4069,11 @@
 
             mCodec->signalFlush();
             returnBuffersToCodec();
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
             break;
         }
 
@@ -3585,6 +4204,7 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
+    mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
 
@@ -4290,13 +4910,13 @@
 
         msg->setInt64("timeUs", timeUs);
 
-        statsBufferReceived(timeUs);
-
         int32_t flags;
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
         msg->setInt32("flags", flags);
 
+        statsBufferReceived(timeUs, buffer);
+
         msg->post();
     }
 }
@@ -4363,6 +4983,8 @@
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
     updateLowLatency(params);
+    mapFormat(mComponentName, params, nullptr, false);
+    updateTunnelPeek(params);
     mCodec->signalSetParameters(params);
 
     return OK;
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index bc656a2..0107c32 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -30,6 +30,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
@@ -168,9 +169,7 @@
 }
 
 status_t MediaCodecSource::Puller::setStopTimeUs(int64_t stopTimeUs) {
-    sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, this);
-    msg->setInt64("stop-time-us", stopTimeUs);
-    return postSynchronouslyAndReturnError(msg);
+    return mSource->setStopTimeUs(stopTimeUs);
 }
 
 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> &notify) {
@@ -188,19 +187,11 @@
 }
 
 void MediaCodecSource::Puller::stop() {
-    bool interrupt = false;
-    {
-        // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
-        // stop.
-        Mutexed<Queue>::Locked queue(mQueue);
-        queue->mPulling = false;
-        interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
-        queue->flush(); // flush any unprocessed pulled buffers
-    }
-
-    if (interrupt) {
-        interruptSource();
-    }
+    // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
+    // stop.
+    Mutexed<Queue>::Locked queue(mQueue);
+    queue->mPulling = false;
+    queue->flush(); // flush any unprocessed pulled buffers
 }
 
 void MediaCodecSource::Puller::interruptSource() {
@@ -684,9 +675,9 @@
     if (mStopping && reachedEOS) {
         ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
         if (mPuller != NULL) {
-            mPuller->stopSource();
+            mPuller->interruptSource();
         }
-        ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
+        ALOGI("source (%s) stopped", mIsVideo ? "video" : "audio");
         // posting reply to everyone that's waiting
         List<sp<AReplyToken>>::iterator it;
         for (it = mStopReplyIDQueue.begin();
@@ -714,6 +705,9 @@
 status_t MediaCodecSource::feedEncoderInputBuffers() {
     MediaBufferBase* mbuf = NULL;
     while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
+        if (!mEncoder) {
+            return BAD_VALUE;
+        }
         size_t bufferIndex = *mAvailEncoderInputIndices.begin();
         mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
 
@@ -768,6 +762,23 @@
             memcpy(inbuf->data(), mbuf->data(), size);
 
             if (mIsVideo) {
+                int32_t ds = 0;
+                if (mbuf->meta_data().findInt32(kKeyColorSpace, &ds)
+                        && ds != HAL_DATASPACE_UNKNOWN) {
+                    android_dataspace dataspace = static_cast<android_dataspace>(ds);
+                    ColorUtils::convertDataSpaceToV0(dataspace);
+                    ALOGD("Updating dataspace to %x", dataspace);
+                    int32_t standard, transfer, range;
+                    ColorUtils::getColorConfigFromDataSpace(
+                            dataspace, &range, &standard, &transfer);
+                    sp<AMessage> msg = new AMessage;
+                    msg->setInt32(KEY_COLOR_STANDARD, standard);
+                    msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+                    msg->setInt32(KEY_COLOR_RANGE, range);
+                    msg->setInt32("android._dataspace", dataspace);
+                    mEncoder->setParameters(msg);
+                }
+
                 // video encoder will release MediaBuffer when done
                 // with underlying data.
                 inbuf->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
@@ -875,7 +886,7 @@
     {
         int32_t eos = 0;
         if (msg->findInt32("eos", &eos) && eos) {
-            ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
+            ALOGI("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
             signalEOS();
             break;
         }
@@ -1093,12 +1104,7 @@
         if (generation != mGeneration) {
              break;
         }
-
-        if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
-            ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
-            mPuller->interruptSource();
-            ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
-        }
+        ALOGD("source (%s) stopping stalled", mIsVideo ? "video" : "audio");
         signalEOS();
         break;
     }
@@ -1130,7 +1136,7 @@
         if (mFlags & FLAG_USE_SURFACE_INPUT) {
             sp<AMessage> params = new AMessage;
             params->setInt64(PARAMETER_KEY_OFFSET_TIME, mInputBufferTimeOffsetUs);
-            err = mEncoder->setParameters(params);
+            err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
         }
 
         sp<AMessage> response = new AMessage;
@@ -1150,7 +1156,7 @@
         if (mFlags & FLAG_USE_SURFACE_INPUT) {
             sp<AMessage> params = new AMessage;
             params->setInt64("stop-time-us", stopTimeUs);
-            err = mEncoder->setParameters(params);
+            err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
         } else {
             err = mPuller->setStopTimeUs(stopTimeUs);
         }
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index 7c981b3..2520e2a 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -188,11 +188,11 @@
     // sanity check check struct version, uuid, name
     if (plugin->def.def_version != EXTRACTORDEF_VERSION_NDK_V1 &&
             plugin->def.def_version != EXTRACTORDEF_VERSION_NDK_V2) {
-        ALOGE("don't understand extractor format %u, ignoring.", plugin->def.def_version);
+        ALOGW("don't understand extractor format %u, ignoring.", plugin->def.def_version);
         return;
     }
     if (memcmp(&plugin->def.extractor_uuid, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", 16) == 0) {
-        ALOGE("invalid UUID, ignoring");
+        ALOGW("invalid UUID, ignoring");
         return;
     }
     if (plugin->def.extractor_name == NULL || strlen(plugin->def.extractor_name) == 0) {
@@ -244,13 +244,18 @@
             void *libHandle = android_dlopen_ext(
                     libPath.string(),
                     RTLD_NOW | RTLD_LOCAL, dlextinfo);
-            CHECK(libHandle != nullptr)
-                    << "couldn't dlopen(" << libPath.string() << ") " << strerror(errno);
+            if (libHandle == nullptr) {
+                ALOGI("dlopen(%s) reported error %s", libPath.string(), strerror(errno));
+                continue;
+            }
 
             GetExtractorDef getDef =
                 (GetExtractorDef) dlsym(libHandle, "GETEXTRACTORDEF");
-            CHECK(getDef != nullptr)
-                    << libPath.string() << " does not contain sniffer";
+            if (getDef == nullptr) {
+                ALOGI("no sniffer found in %s", libPath.string());
+                dlclose(libHandle);
+                continue;
+            }
 
             ALOGV("registering sniffer for %s", libPath.string());
             RegisterExtractor(
@@ -258,7 +263,7 @@
         }
         closedir(libDir);
     } else {
-        ALOGE("couldn't opendir(%s)", libDirPath);
+        ALOGI("plugin directory not present (%s)", libDirPath);
     }
 }
 
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index c91386d..a946f71 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -76,6 +76,7 @@
     mFileMeta.clear();
     mWriter.clear();
     mTrackList.clear();
+    mFormatList.clear();
 }
 
 ssize_t MediaMuxer::addTrack(const sp<AMessage> &format) {
@@ -109,6 +110,8 @@
             ALOGW("addTrack() setCaptureRate failed :%d", result);
         }
     }
+
+    mFormatList.add(format);
     return mTrackList.add(newTrack);
 }
 
@@ -224,9 +227,42 @@
         ALOGV("BUFFER_FLAG_EOS");
     }
 
+    sp<AMessage> bufMeta = buffer->meta();
+    int64_t val64;
+    if (bufMeta->findInt64("sample-file-offset", &val64)) {
+        sampleMetaData.setInt64(kKeySampleFileOffset, val64);
+    }
+    if (bufMeta->findInt64(
+                "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                &val64)) {
+        sampleMetaData.setInt64(kKeyLastSampleIndexInChunk, val64);
+    }
+
     sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
     // This pushBuffer will wait until the mediaBuffer is consumed.
     return currentTrack->pushBuffer(mediaBuffer);
 }
 
+ssize_t MediaMuxer::getTrackCount() {
+    Mutex::Autolock autoLock(mMuxerLock);
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackCount() must be called either in INITIALIZED or STARTED state");
+        return -1;
+    }
+    return mTrackList.size();
+}
+
+sp<AMessage> MediaMuxer::getTrackFormat([[maybe_unused]] size_t idx) {
+    Mutex::Autolock autoLock(mMuxerLock);
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackFormat() must be called either in INITIALIZED or STARTED state");
+        return nullptr;
+    }
+    if (idx < 0 || idx >= mFormatList.size()) {
+        ALOGE("getTrackFormat() idx is out of range");
+        return nullptr;
+    }
+    return mFormatList[idx];
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaTrack.cpp b/media/libstagefright/MediaTrack.cpp
index 24ba38a..2447f5e 100644
--- a/media/libstagefright/MediaTrack.cpp
+++ b/media/libstagefright/MediaTrack.cpp
@@ -133,6 +133,14 @@
         if (format->mFormat->findInt64("target-time", &val64)) {
             meta.setInt64(kKeyTargetTime, val64);
         }
+        if (format->mFormat->findInt64("sample-file-offset", &val64)) {
+            meta.setInt64(kKeySampleFileOffset, val64);
+        }
+        if (format->mFormat->findInt64(
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    &val64)) {
+            meta.setInt64(kKeyLastSampleIndexInChunk, val64);
+        }
         int32_t val32;
         if (format->mFormat->findInt32("is-sync-frame", &val32)) {
             meta.setInt32(kKeyIsSyncFrame, val32);
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index f2c7dd6..f0383b5 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -189,6 +189,11 @@
     return err;
 }
 
+const char* NuMediaExtractor::getName() const {
+    Mutex::Autolock autoLock(mLock);
+    return mImpl == nullptr ? nullptr : mImpl->name().string();
+}
+
 static String8 arrayToString(const std::vector<uint8_t> &array) {
     String8 result;
     for (size_t i = 0; i < array.size(); i++) {
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index 819389d..0cc2294 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -4,4 +4,8 @@
 lajos@google.com
 marcone@google.com
 taklee@google.com
-wonsik@google.com
\ No newline at end of file
+wonsik@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index dff7b22..7ce2968 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -34,6 +34,9 @@
   "presubmit": [
     {
       "name": "mediacodecTest"
+    },
+    {
+      "name": "CtsMediaTranscodingTestCases"
     }
   ],
   "postsubmit": [
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index f63740e..04a9b17 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -725,16 +725,19 @@
     }
 };
 
-static std::vector<std::pair<const char *, uint32_t>> int64Mappings {
+static std::vector<std::pair<const char*, uint32_t>> int64Mappings {
     {
-        { "exif-offset", kKeyExifOffset },
-        { "exif-size", kKeyExifSize },
-        { "xmp-offset", kKeyXmpOffset },
-        { "xmp-size", kKeyXmpSize },
-        { "target-time", kKeyTargetTime },
-        { "thumbnail-time", kKeyThumbnailTime },
-        { "timeUs", kKeyTime },
-        { "durationUs", kKeyDuration },
+        { "exif-offset", kKeyExifOffset},
+        { "exif-size", kKeyExifSize},
+        { "xmp-offset", kKeyXmpOffset},
+        { "xmp-size", kKeyXmpSize},
+        { "target-time", kKeyTargetTime},
+        { "thumbnail-time", kKeyThumbnailTime},
+        { "timeUs", kKeyTime},
+        { "durationUs", kKeyDuration},
+        { "sample-file-offset", kKeySampleFileOffset},
+        { "last-sample-index-in-chunk", kKeyLastSampleIndexInChunk},
+        { "sample-time-before-append", kKeySampleTimeBeforeAppend},
     }
 };
 
@@ -1675,7 +1678,7 @@
     if (msg->findString("mime", &mime)) {
         meta->setCString(kKeyMIMEType, mime.c_str());
     } else {
-        ALOGE("did not find mime type");
+        ALOGI("did not find mime type");
         return BAD_VALUE;
     }
 
@@ -1725,7 +1728,7 @@
             meta->setInt32(kKeyWidth, width);
             meta->setInt32(kKeyHeight, height);
         } else {
-            ALOGE("did not find width and/or height");
+            ALOGI("did not find width and/or height");
             return BAD_VALUE;
         }
 
@@ -1814,7 +1817,7 @@
         int32_t numChannels, sampleRate;
         if (!msg->findInt32("channel-count", &numChannels) ||
                 !msg->findInt32("sample-rate", &sampleRate)) {
-            ALOGE("did not find channel-count and/or sample-rate");
+            ALOGI("did not find channel-count and/or sample-rate");
             return BAD_VALUE;
         }
         meta->setInt32(kKeyChannelCount, numChannels);
@@ -2169,7 +2172,7 @@
     }
     info->duration_us = duration;
 
-    int32_t brate = -1;
+    int32_t brate = 0;
     if (!meta->findInt32(kKeyBitRate, &brate)) {
         ALOGV("track of type '%s' does not publish bitrate", mime);
     }
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dd2eed3..a15a988 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -274,6 +274,9 @@
                 <Limit name="bitrate" range="1-2000000" />
             </Variant>
             <Feature name="intra-refresh" />
+            <!-- Video Quality control -->
+                    <!-- supports QP bounding with standard keys -->
+            <Feature name="qp-bounds" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp8.encoder" />
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 225c930..665aae1 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -40,12 +40,12 @@
     },
 
     shared_libs: [
-        "libaudioutils",
         "liblog",
     ],
 
     static_libs: [
         "libFLAC",
+        "libaudioutils", // needed for 'float_from_i32'
     ],
 
     export_static_lib_headers: [
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index f242b19..6bb7b37 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -1078,6 +1078,17 @@
     return OK;
 }
 
+status_t AMessage::removeEntryByName(const char *name) {
+    if (name == nullptr) {
+        return BAD_VALUE;
+    }
+    size_t index = findEntryByName(name);
+    if (index >= mNumItems) {
+        return BAD_INDEX;
+    }
+    return removeEntryAt(index);
+}
+
 void AMessage::setItem(const char *name, const ItemData &item) {
     if (item.used()) {
         Item *it = allocateItem(name);
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 070e325..3812afe 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -613,6 +613,35 @@
 }
 
 // static
+void ColorUtils::getColorConfigFromDataSpace(
+        const android_dataspace &dataspace, int32_t *range, int32_t *standard, int32_t *transfer) {
+    uint32_t gfxRange =
+        (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+    uint32_t gfxStandard =
+        (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+    uint32_t gfxTransfer =
+        (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+
+    // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+    CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
+    CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
+    CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
+    // TRICKY: use & to ensure all three mappings are completed
+    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
+            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+        ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
+              "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
+              gfxRange, gfxStandard, gfxTransfer,
+              cuRange,    asString(cuRange),
+              cuStandard, asString(cuStandard),
+              cuTransfer, asString(cuTransfer));
+    }
+    *range    = cuRange;
+    *standard = cuStandard;
+    *transfer = cuTransfer;
+}
+
+// static
 void ColorUtils::getColorConfigFromFormat(
         const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
     if (!format->findInt32("color-range", range)) {
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
index 31e58ba..98d6147 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
@@ -261,6 +261,17 @@
      */
     status_t removeEntryAt(size_t index);
 
+    /**
+     * Removes an entry based on name.
+     *
+     * \param name  name of the entry
+     *
+     * \retval OK the entry was removed successfully
+     * \retval BAD_VALUE name is invalid (null)
+     * \retval BAD_INDEX name not found
+     */
+    status_t removeEntryByName(const char *name);
+
 protected:
     virtual ~AMessage();
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index cd0af2b..9e3f718 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -156,6 +156,10 @@
     // suited to blending. This requires implicit color space conversion on part of the device.
     static android_dataspace getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand);
 
+    // it returns the platform color configs from given |dataspace|.
+    static void getColorConfigFromDataSpace(
+            const android_dataspace &dataspace, int *range, int *standard, int *transfer);
+
     // converts |dataSpace| to a V0 enum, and returns true if dataSpace is an aspect-only value
     static bool convertDataSpaceToV0(android_dataspace &dataSpace);
 
diff --git a/media/libstagefright/foundation/tests/AMessage_test.cpp b/media/libstagefright/foundation/tests/AMessage_test.cpp
new file mode 100644
index 0000000..2b11326
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AMessage_test.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AData_test"
+
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include <media/stagefright/foundation/AMessage.h>
+
+using namespace android;
+
+class AMessageTest : public ::testing::Test {
+};
+
+
+TEST(AMessage_tests, item_manipulation) {
+  sp<AMessage> m1 = new AMessage();
+
+  m1->setInt32("value", 2);
+  m1->setInt32("bar", 3);
+
+  int32_t i32;
+  EXPECT_TRUE(m1->findInt32("value", &i32));
+  EXPECT_EQ(2, i32);
+
+  EXPECT_TRUE(m1->findInt32("bar", &i32));
+  EXPECT_EQ(3, i32);
+
+
+  m1->setInt64("big", INT64_MAX);
+  m1->setInt64("smaller", INT64_MAX - 2);
+  m1->setInt64("smallest", 257);
+
+  int64_t i64;
+  EXPECT_TRUE(m1->findInt64("big", &i64));
+  EXPECT_EQ(INT64_MAX, i64);
+
+  EXPECT_TRUE(m1->findInt64("smaller", &i64));
+  EXPECT_EQ(INT64_MAX - 2, i64);
+
+  m1->setSize("size1", 257);
+  m1->setSize("size2", 1023);
+
+  size_t sizing;
+  EXPECT_TRUE(m1->findSize("size2", &sizing));
+  EXPECT_EQ(1023, sizing);
+  EXPECT_TRUE(m1->findSize("size1", &sizing));
+  EXPECT_EQ(257, sizing);
+
+  m1->setDouble("precise", 10.5);
+  m1->setDouble("small", 0.125);
+
+  double d;
+  EXPECT_TRUE(m1->findDouble("precise", &d));
+  EXPECT_EQ(10.5, d);
+
+  EXPECT_TRUE(m1->findDouble("small", &d));
+  EXPECT_EQ(0.125, d);
+
+  // should be unchanged from the top of the test
+  EXPECT_TRUE(m1->findInt32("bar", &i32));
+  EXPECT_EQ(3, i32);
+
+  EXPECT_FALSE(m1->findInt32("nonesuch", &i32));
+  EXPECT_FALSE(m1->findInt64("nonesuch2", &i64));
+  // types disagree, not found
+  EXPECT_FALSE(m1->findInt32("big", &i32));
+  EXPECT_FALSE(m1->findInt32("precise", &i32));
+
+  // integral types should come back true
+  EXPECT_TRUE(m1->findAsInt64("big", &i64));
+  EXPECT_EQ(INT64_MAX, i64);
+  EXPECT_TRUE(m1->findAsInt64("bar", &i64));
+  EXPECT_EQ(3, i64);
+  EXPECT_FALSE(m1->findAsInt64("precise", &i64));
+
+  // recovers ints, size, and floating point values
+  float value;
+  EXPECT_TRUE(m1->findAsFloat("value", &value));
+  EXPECT_EQ(2, value);
+  EXPECT_TRUE(m1->findAsFloat("smallest", &value));
+  EXPECT_EQ(257, value);
+  EXPECT_TRUE(m1->findAsFloat("size2", &value));
+  EXPECT_EQ(1023, value);
+  EXPECT_TRUE(m1->findAsFloat("precise", &value));
+  EXPECT_EQ(10.5, value);
+  EXPECT_TRUE(m1->findAsFloat("small", &value));
+  EXPECT_EQ(0.125, value);
+
+
+  // need to handle still:
+  // strings
+  // Object
+  // Buffer
+  // Message (nested)
+  //
+
+  // removal
+  m1->setInt32("shortlived", 2);
+  m1->setInt32("alittlelonger", 2);
+  EXPECT_EQ(OK, m1->removeEntryByName("shortlived"));
+  EXPECT_EQ(BAD_VALUE, m1->removeEntryByName(nullptr));
+  EXPECT_EQ(BAD_INDEX, m1->removeEntryByName("themythicalnonesuch"));
+  EXPECT_FALSE(m1->findInt32("shortlived", &i32));
+  EXPECT_TRUE(m1->findInt32("alittlelonger", &i32));
+
+  EXPECT_NE(OK, m1->removeEntryByName("notpresent"));
+
+}
+
diff --git a/media/libstagefright/foundation/tests/Android.bp b/media/libstagefright/foundation/tests/Android.bp
index 715b57a..e50742e 100644
--- a/media/libstagefright/foundation/tests/Android.bp
+++ b/media/libstagefright/foundation/tests/Android.bp
@@ -30,6 +30,7 @@
 
     srcs: [
         "AData_test.cpp",
+        "AMessage_test.cpp",
         "Base64_test.cpp",
         "Flagged_test.cpp",
         "TypeTraits_test.cpp",
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 30bc44e..c84cc10 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -518,6 +518,7 @@
     status_t setLowLatency(int32_t lowLatency);
     status_t setLatency(uint32_t latency);
     status_t getLatency(uint32_t *latency);
+    status_t setTunnelPeek(int32_t tunnelPeek);
     status_t setAudioPresentation(int32_t presentationId, int32_t programId);
     status_t setOperatingRate(float rateFloat, bool isVideo);
     status_t getIntraRefreshPeriod(uint32_t *intraRefreshPeriod);
@@ -578,6 +579,8 @@
     void notifyOfRenderedFrames(
             bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
 
+    void onFirstTunnelFrameReady();
+
     // Pass |expectedFormat| to print a warning if the format differs from it.
     // Using sp<> instead of const sp<>& because expectedFormat is likely the current mOutputFormat
     // which will get updated inside.
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 451aa57..d1dcdb5 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -37,16 +37,27 @@
     // Note that the "channels" parameter _is_ the number of channels,
     // _not_ a bitmask of audio_channels_t constants.
     AudioSource(
-            const audio_attributes_t *attr,
-            const String16 &opPackageName,
-            uint32_t sampleRate,
-            uint32_t channels,
-            uint32_t outSampleRate = 0,
-            uid_t uid = -1,
-            pid_t pid = -1,
-            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
-            audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
-            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
+        const audio_attributes_t *attr,
+        const media::permission::Identity& identity,
+        uint32_t sampleRate,
+        uint32_t channels,
+        uint32_t outSampleRate = 0,
+        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+        audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+        float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
+
+    // Legacy constructor kept for vendor dependencies
+    AudioSource(
+        const audio_attributes_t *attr,
+        const String16 &opPackageName,
+        uint32_t sampleRate,
+        uint32_t channels,
+        uint32_t outSampleRate = 0,
+        uid_t uid = -1,
+        pid_t pid = -1,
+        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+        audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+        float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
 
     status_t initCheck() const;
 
@@ -131,6 +142,16 @@
 
     AudioSource(const AudioSource &);
     AudioSource &operator=(const AudioSource &);
+
+    void set(
+        const audio_attributes_t *attr,
+        const media::permission::Identity& identity,
+        uint32_t sampleRate,
+        uint32_t channels,
+        uint32_t outSampleRate = 0,
+        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+        audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+        float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
 };
 
 }  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 16e7d89..e8770ed 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -159,6 +159,7 @@
     int32_t  mColorFormat;
     int32_t  mEncoderFormat;
     int32_t  mEncoderDataSpace;
+    int32_t  mBufferDataSpace;
     status_t mInitCheck;
 
     sp<Camera>   mCamera;
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index dd6df90..efb2f86 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,11 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 
+struct CodecParameterDescriptor {
+    std::string name;
+    AMessage::Type type;
+};
+
 struct CodecBase : public AHandler, /* static */ ColorUtils {
     /**
      * This interface defines events firing from CodecBase back to MediaCodec.
@@ -173,6 +178,10 @@
          * Notify MediaCodec that output buffers are changed.
          */
         virtual void onOutputBuffersChanged() = 0;
+        /**
+         * Notify MediaCodec that the first tunnel frame is ready.
+         */
+        virtual void onFirstTunnelFrameReady() = 0;
     };
 
     /**
@@ -233,6 +242,64 @@
     virtual void signalSetParameters(const sp<AMessage> &msg) = 0;
     virtual void signalEndOfInputStream() = 0;
 
+    /**
+     * Query supported parameters from this instance, and fill |names| with the
+     * names of the parameters.
+     *
+     * \param names string vector to fill with supported parameters.
+     * \return OK if successful;
+     *         BAD_VALUE if |names| is null;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Fill |desc| with description of the parameter with |name|.
+     *
+     * \param name name of the parameter to describe
+     * \param desc pointer to CodecParameterDescriptor to be filled
+     * \return OK if successful;
+     *         BAD_VALUE if |desc| is null;
+     *         NAME_NOT_FOUND if |name| is not recognized by the component;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t describeParameter(
+            [[maybe_unused]] const std::string &name,
+            [[maybe_unused]] CodecParameterDescriptor *desc) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Subscribe to parameters in |names| and get output format change event
+     * when they change.
+     * Unrecognized / already subscribed parameters are ignored.
+     *
+     * \param names names of parameters to subscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t subscribeToParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Unsubscribe from parameters in |names| and no longer get
+     * output format change event when they change.
+     * Unrecognized / already unsubscribed parameters are ignored.
+     *
+     * \param names names of parameters to unsubscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t unsubscribeFromParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+
     typedef CodecBase *(*CreateCodecFunc)(void);
     typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
 
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 2582ed0..7f2728e 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -106,6 +106,7 @@
     off64_t mOffset;
     off64_t mPreAllocateFileEndOffset;  //End of file offset during preallocation.
     off64_t mMdatOffset;
+    off64_t mMaxOffsetAppend; // File offset written upto while appending.
     off64_t mMdatEndOffset;  // End offset of mdat atom.
     uint8_t *mInMemoryCache;
     off64_t mInMemoryCacheOffset;
diff --git a/media/libstagefright/include/media/stagefright/MediaAppender.h b/media/libstagefright/include/media/stagefright/MediaAppender.h
new file mode 100644
index 0000000..c2f6f10
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaAppender.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_APPENDER_H
+#define ANDROID_MEDIA_APPENDER_H
+
+#include <media/stagefright/MediaMuxer.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <stack>
+
+namespace android {
+
+struct MediaAppender : public MediaMuxerBase {
+public:
+    enum AppendMode {
+        APPEND_MODE_FIRST = 0,
+        APPEND_MODE_IGNORE_LAST_VIDEO_GOP = APPEND_MODE_FIRST,
+        APPEND_MODE_ADD_TO_EXISTING_DATA = 1,
+        APPEND_MODE_LAST = APPEND_MODE_ADD_TO_EXISTING_DATA,
+    };
+
+    static sp<MediaAppender> create(int fd, AppendMode mode);
+
+    virtual ~MediaAppender();
+
+    status_t init();
+
+    status_t start();
+
+    status_t stop();
+
+    status_t writeSampleData(const sp<ABuffer>& buffer, size_t trackIndex, int64_t timeUs,
+                             uint32_t flags);
+
+    status_t setOrientationHint(int degrees);
+
+    status_t setLocation(int latitude, int longitude);
+
+    ssize_t addTrack(const sp<AMessage> &format);
+
+    ssize_t getTrackCount();
+
+    sp<AMessage> getTrackFormat(size_t idx);
+
+private:
+    MediaAppender(int fd, AppendMode mode);
+
+    int mFd;
+    MediaMuxer::OutputFormat mFormat;
+    AppendMode mMode;
+    sp<NuMediaExtractor> mExtractor;
+    sp<MediaMuxer> mMuxer;
+    size_t mTrackCount;
+    // Map track index given by extractor to the ones received from muxer.
+    std::map<size_t, ssize_t> mTrackIndexMap;
+    // Count of the samples in each track, indexed by extractor track ids.
+    std::vector<size_t> mSampleCountVect;
+    // Extractor track index of samples.
+    std::vector<size_t> mSampleIndexVect;
+    // Track format indexed by extractor track ids.
+    std::map<size_t, sp<AMessage>> mFmtIndexMap;
+    // Size of samples.
+    std::vector<size_t> mSampleSizeVect;
+    // Presentation time stamp of samples.
+    std::vector<int64_t> mSampleTimeVect;
+    // Timestamp of last sample of tracks.
+    std::vector<int64_t> mMaxTimestampVect;
+    // Metadata of samples.
+    std::vector<sp<MetaData>> mSampleMetaVect;
+    std::mutex mMutex;
+    // Timestamp of the last sync sample of tracks.
+    std::vector<int64_t> mLastSyncSampleTimeVect;
+
+    struct sampleDataInfo;
+    std::vector<sampleDataInfo> mSDI;
+
+    enum : int {
+        UNINITIALIZED,
+        INITIALIZED,
+        STARTED,
+        STOPPED,
+        ERROR,
+    } mState GUARDED_BY(mMutex);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_APPENDER_H
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index a28d479..3f93e6d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -50,6 +50,7 @@
 struct BatteryChecker;
 class BufferChannelBase;
 struct CodecBase;
+struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
 class MediaCodecBuffer;
@@ -128,6 +129,8 @@
 
     status_t setOnFrameRenderedNotification(const sp<AMessage> &notify);
 
+    status_t setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify);
+
     status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
 
     status_t setInputSurface(const sp<PersistentSurface> &surface);
@@ -246,6 +249,11 @@
 
     status_t setParameters(const sp<AMessage> &params);
 
+    status_t querySupportedVendorParameters(std::vector<std::string> *names);
+    status_t describeParameter(const std::string &name, CodecParameterDescriptor *desc);
+    status_t subscribeToVendorParameters(const std::vector<std::string> &names);
+    status_t unsubscribeFromVendorParameters(const std::vector<std::string> &names);
+
     // Create a MediaCodec notification message from a list of rendered or dropped render infos
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
@@ -361,6 +369,22 @@
         bool mOwnedByClient;
     };
 
+    // This type is used to track the tunnel mode video peek state machine:
+    //
+    // DisabledNoBuffer -> EnabledNoBuffer  when tunnel-peek = true
+    // EnabledNoBuffer  -> DisabledNoBuffer when tunnel-peek = false
+    // DisabledNoBuffer -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // EnabledNoBuffer  -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // BufferDecoded    -> BufferRendered   when kWhatFrameRendered
+    // <all states>     -> EnabledNoBuffer  when flush
+    // <all states>     -> EnabledNoBuffer  when stop then configure then start
+    enum struct TunnelPeekState {
+        kDisabledNoBuffer,
+        kEnabledNoBuffer,
+        kBufferDecoded,
+        kBufferRendered,
+    };
+
     struct ResourceManagerServiceProxy;
 
     State mState;
@@ -387,12 +411,15 @@
     void flushMediametrics();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
+    void updateTunnelPeek(const sp<AMessage> &msg);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
     sp<AMessage> mOnFrameRenderedNotification;
     sp<AMessage> mAsyncReleaseCompleteNotification;
+    sp<AMessage> mOnFirstTunnelFrameReadyNotification;
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
@@ -408,6 +435,17 @@
     // configure parameter
     sp<AMessage> mConfigureMsg;
 
+    // rewrites the format description during configure() for encoding.
+    // format and flags as they exist within configure()
+    // the (possibly) updated format is returned in place.
+    status_t shapeMediaFormat(
+            const sp<AMessage> &format,
+            uint32_t flags);
+
+    // populate the format shaper library with information for this codec encoding
+    // for the indicated media type
+    status_t setupFormatShaper(AString mediaType);
+
     // Used only to synchronize asynchronous getBufferAndFormat
     // across all the other (synchronous) buffer state change
     // operations, such as de/queueIn/OutputBuffer, start and
@@ -428,6 +466,7 @@
     int32_t mTunneledInputWidth;
     int32_t mTunneledInputHeight;
     bool mTunneled;
+    TunnelPeekState mTunnelPeekState;
 
     sp<IDescrambler> mDescrambler;
 
@@ -528,6 +567,14 @@
     std::deque<BufferFlightTiming_t> mBuffersInFlight;
     Mutex mLatencyLock;
     int64_t mLatencyUnknown;    // buffers for which we couldn't calculate latency
+
+    Mutex mOutputStatsLock;
+    int64_t mBytesEncoded = 0;
+    int64_t mEarliestEncodedPtsUs = INT64_MAX;
+    int64_t mLatestEncodedPtsUs = INT64_MIN;
+    int32_t mFramesEncoded = 0;
+
+
     int64_t mNumLowLatencyEnables;  // how many times low latency mode is enabled
     int64_t mNumLowLatencyDisables;  // how many times low latency mode is disabled
     bool mIsLowLatencyModeOn;  // is low latency mode on currently
@@ -544,7 +591,7 @@
     sp<BatteryChecker> mBatteryChecker;
 
     void statsBufferSent(int64_t presentationUs);
-    void statsBufferReceived(int64_t presentationUs);
+    void statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
 
     enum {
         // the default shape of our latency histogram buckets
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index d3ced29..1a5609a 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -753,7 +753,7 @@
 constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
 constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
 constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
-constexpr char KEY_CHANNEL_COUNT[] = "channel-count";
+constexpr char KEY_CHANNEL_COUNT[] = "channel-count";   // value N, eq to range 1..N
 constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
 constexpr char KEY_COLOR_FORMAT[] = "color-format";
 constexpr char KEY_COLOR_RANGE[] = "color-range";
@@ -794,7 +794,7 @@
 constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
 constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
 constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
-constexpr char KEY_PREPEND_HEADERS_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
+constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
 constexpr char KEY_PRIORITY[] = "priority";
 constexpr char KEY_PROFILE[] = "profile";
 constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
@@ -808,6 +808,14 @@
 constexpr char KEY_TILE_HEIGHT[] = "tile-height";
 constexpr char KEY_TILE_WIDTH[] = "tile-width";
 constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
+constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
+constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
+constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
+constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
+constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
+constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
+constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
 constexpr char KEY_WIDTH[] = "width";
 
 // from MediaCodec.java
@@ -849,9 +857,9 @@
 constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
 constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
 constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
+constexpr char PARAMETER_KEY_TUNNEL_PEEK[] =  "tunnel-peek";
 constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
 
 }
 
 #endif  // MEDIA_CODEC_CONSTANTS_H_
-
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index cfd5608..d1df2ca 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -94,12 +94,13 @@
     ERROR_DRM_PROVISIONING_CERTIFICATE       = DRM_ERROR_BASE - 31,
     ERROR_DRM_PROVISIONING_CONFIG            = DRM_ERROR_BASE - 32,
     ERROR_DRM_PROVISIONING_PARSE             = DRM_ERROR_BASE - 33,
-    ERROR_DRM_PROVISIONING_RETRY             = DRM_ERROR_BASE - 34,
-    ERROR_DRM_SECURE_STOP_RELEASE            = DRM_ERROR_BASE - 35,
-    ERROR_DRM_STORAGE_READ                   = DRM_ERROR_BASE - 36,
-    ERROR_DRM_STORAGE_WRITE                  = DRM_ERROR_BASE - 37,
-    ERROR_DRM_ZERO_SUBSAMPLES                = DRM_ERROR_BASE - 38,
-    ERROR_DRM_LAST_USED_ERRORCODE            = DRM_ERROR_BASE - 38,
+    ERROR_DRM_PROVISIONING_REQUEST_REJECTED  = DRM_ERROR_BASE - 34,
+    ERROR_DRM_PROVISIONING_RETRY             = DRM_ERROR_BASE - 35,
+    ERROR_DRM_SECURE_STOP_RELEASE            = DRM_ERROR_BASE - 36,
+    ERROR_DRM_STORAGE_READ                   = DRM_ERROR_BASE - 37,
+    ERROR_DRM_STORAGE_WRITE                  = DRM_ERROR_BASE - 38,
+    ERROR_DRM_ZERO_SUBSAMPLES                = DRM_ERROR_BASE - 39,
+    ERROR_DRM_LAST_USED_ERRORCODE            = ERROR_DRM_ZERO_SUBSAMPLES,
 
     ERROR_DRM_VENDOR_MAX                     = DRM_ERROR_BASE - 500,
     ERROR_DRM_VENDOR_MIN                     = DRM_ERROR_BASE - 999,
@@ -202,6 +203,7 @@
         STATUS_CASE(ERROR_DRM_PROVISIONING_CERTIFICATE);
         STATUS_CASE(ERROR_DRM_PROVISIONING_CONFIG);
         STATUS_CASE(ERROR_DRM_PROVISIONING_PARSE);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_REQUEST_REJECTED);
         STATUS_CASE(ERROR_DRM_PROVISIONING_RETRY);
         STATUS_CASE(ERROR_DRM_SECURE_STOP_RELEASE);
         STATUS_CASE(ERROR_DRM_STORAGE_READ);
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index a1b9465..e97a65e 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -22,7 +22,12 @@
 #include <utils/Vector.h>
 #include <utils/threads.h>
 
+#include <map>
+#include <mutex>
+#include <vector>
+
 #include "media/stagefright/foundation/ABase.h"
+#include "MediaMuxerBase.h"
 
 namespace android {
 
@@ -33,6 +38,7 @@
 struct MediaSource;
 class MetaData;
 struct MediaWriter;
+struct NuMediaExtractor;
 
 // MediaMuxer is used to mux multiple tracks into a video. Currently, we only
 // support a mp4 file as the output.
@@ -40,19 +46,8 @@
 // Constructor -> addTrack+ -> start -> writeSampleData+ -> stop
 // If muxing operation need to be cancelled, the app is responsible for
 // deleting the output file after stop.
-struct MediaMuxer : public RefBase {
+struct MediaMuxer : public MediaMuxerBase {
 public:
-    // Please update media/java/android/media/MediaMuxer.java if the
-    // OutputFormat is updated.
-    enum OutputFormat {
-        OUTPUT_FORMAT_MPEG_4      = 0,
-        OUTPUT_FORMAT_WEBM        = 1,
-        OUTPUT_FORMAT_THREE_GPP   = 2,
-        OUTPUT_FORMAT_HEIF        = 3,
-        OUTPUT_FORMAT_OGG         = 4,
-        OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
-    };
-
     // Construct the muxer with the file descriptor. Note that the MediaMuxer
     // will close this file at stop().
     MediaMuxer(int fd, OutputFormat format);
@@ -117,10 +112,25 @@
     status_t writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                              int64_t timeUs, uint32_t flags) ;
 
+    /**
+     * Gets the number of tracks added successfully.  Should be called in
+     * INITIALIZED(after constructor) or STARTED(after start()) state.
+     * @return the number of tracks or -1 in wrong state.
+     */
+    ssize_t getTrackCount();
+
+    /**
+     * Gets the format of the track by their index.
+     * @param idx : index of the track whose format is wanted.
+     * @return smart pointer to AMessage containing the format details.
+     */
+    sp<AMessage> getTrackFormat(size_t idx);
+
 private:
     const OutputFormat mFormat;
     sp<MediaWriter> mWriter;
     Vector< sp<MediaAdapter> > mTrackList;  // Each track has its MediaAdapter.
+    Vector< sp<AMessage> > mFormatList; // Format of each track.
     sp<MetaData> mFileMeta;  // Metadata for the whole file.
     Mutex mMuxerLock;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxerBase.h b/media/libstagefright/include/media/stagefright/MediaMuxerBase.h
new file mode 100644
index 0000000..f02d510
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaMuxerBase.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_MUXER_BASE_H_
+#define MEDIA_MUXER_BASE_H_
+
+#include <utils/RefBase.h>
+#include "media/stagefright/foundation/ABase.h"
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+// MediaMuxer is used to mux multiple tracks into a video. Currently, we only
+// support a mp4 file as the output.
+// The expected calling order of the functions is:
+// Constructor -> addTrack+ -> start -> writeSampleData+ -> stop
+// If muxing operation need to be cancelled, the app is responsible for
+// deleting the output file after stop.
+struct MediaMuxerBase : public RefBase {
+public:
+    // Please update media/java/android/media/MediaMuxer.java if the
+    // OutputFormat is updated.
+    enum OutputFormat {
+        OUTPUT_FORMAT_MPEG_4      = 0,
+        OUTPUT_FORMAT_WEBM        = 1,
+        OUTPUT_FORMAT_THREE_GPP   = 2,
+        OUTPUT_FORMAT_HEIF        = 3,
+        OUTPUT_FORMAT_OGG         = 4,
+        OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
+    };
+
+    // Construct the muxer with the file descriptor. Note that the MediaMuxer
+    // will close this file at stop().
+    MediaMuxerBase() {};
+
+    virtual ~MediaMuxerBase() {};
+
+    /**
+     * Add a track with its format information. This should be
+     * called before start().
+     * @param format the track's format.
+     * @return the track's index or negative number if error.
+     */
+    virtual ssize_t addTrack(const sp<AMessage> &format) = 0;
+
+    /**
+     * Start muxing. Make sure all the tracks have been added before
+     * calling this.
+     */
+    virtual status_t start() = 0;
+
+    /**
+     * Set the orientation hint.
+     * @param degrees The rotation degrees. It has to be either 0,
+     *                90, 180 or 270.
+     * @return OK if no error.
+     */
+    virtual status_t setOrientationHint(int degrees) = 0;
+
+    /**
+     * Set the location.
+     * @param latitude The latitude in degree x 1000. Its value must be in the range
+     * [-900000, 900000].
+     * @param longitude The longitude in degree x 1000. Its value must be in the range
+     * [-1800000, 1800000].
+     * @return OK if no error.
+     */
+    virtual status_t setLocation(int latitude, int longitude) = 0;
+
+    /**
+     * Stop muxing.
+     * This method is a blocking call. Depending on how
+     * much data is bufferred internally, the time needed for stopping
+     * the muxer may be time consuming. UI thread is
+     * not recommended for launching this call.
+     * @return OK if no error.
+     */
+    virtual status_t stop() = 0;
+
+    /**
+     * Send a sample buffer for muxing.
+     * The buffer can be reused once this method returns. Typically,
+     * this function won't be blocked for very long, and thus there
+     * is no need to use a separate thread calling this method to
+     * push a buffer.
+     * @param buffer the incoming sample buffer.
+     * @param trackIndex the buffer's track index number.
+     * @param timeUs the buffer's time stamp.
+     * @param flags the only supported flag for now is
+     *              MediaCodec::BUFFER_FLAG_SYNCFRAME.
+     * @return OK if no error.
+     */
+    virtual status_t writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
+                             int64_t timeUs, uint32_t flags) = 0 ;
+
+    /**
+     * Gets the number of tracks added successfully.  Should be called in
+     * INITIALIZED(after constructor) or STARTED(after start()) state.
+     * @return the number of tracks or -1 in wrong state.
+     */
+    virtual ssize_t getTrackCount() = 0;
+
+    /**
+     * Gets the format of the track by their index.
+     * @param idx : index of the track whose format is wanted.
+     * @return smart pointer to AMessage containing the format details.
+     */
+    virtual sp<AMessage> getTrackFormat(size_t idx) = 0;
+
+private:
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaMuxerBase);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_MUXER_BASE_H_
+
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 17b1abf..9f20185 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -58,6 +58,7 @@
     virtual void updatePayloadType(int32_t /*payloadType*/) {}
     virtual void updateSocketNetwork(int64_t /*socketNetwork*/) {}
     virtual uint32_t getSequenceNum() { return 0; }
+    virtual uint64_t getAccumulativeBytes() { return 0; }
 
 protected:
     virtual ~MediaWriter() {}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 940bd86..408872f 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -264,6 +264,11 @@
     // Slow-motion markers
     kKeySlowMotionMarkers = 'slmo', // raw data, byte array following spec for
                                     // MediaFormat#KEY_SLOW_MOTION_MARKERS
+
+    kKeySampleFileOffset = 'sfof', // int64_t, sample's offset in a media file.
+    kKeyLastSampleIndexInChunk = 'lsic',  //int64_t, index of last sample in a chunk.
+    kKeySampleTimeBeforeAppend = 'lsba', // int64_t, timestamp of last sample of a track.
+
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index d8f2b00..6aa7c0f 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -100,6 +100,10 @@
 
     status_t getAudioPresentations(size_t trackIdx, AudioPresentationCollection *presentations);
 
+    status_t setPlaybackId(const String8& playbackId);
+
+    const char* getName() const;
+
 protected:
     virtual ~NuMediaExtractor();
 
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 7a71f52..9a7bad9 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_defaults {
     name: "renderfright_defaults",
     cflags: [
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
index b7f8cb4..04da9a5 100644
--- a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
@@ -404,7 +404,7 @@
     }
 
     mImageManager = std::make_unique<ImageManager>(this);
-    mImageManager->initThread();
+    mImageManager->initThread(args.realtime);
     mDrawingBuffer = createFramebuffer();
     sp<GraphicBuffer> buf =
             new GraphicBuffer(1, 1, PIXEL_FORMAT_RGBA_8888, 1,
diff --git a/media/libstagefright/renderfright/gl/ImageManager.cpp b/media/libstagefright/renderfright/gl/ImageManager.cpp
index 6256649..5b0cf52 100644
--- a/media/libstagefright/renderfright/gl/ImageManager.cpp
+++ b/media/libstagefright/renderfright/gl/ImageManager.cpp
@@ -32,14 +32,16 @@
 
 ImageManager::ImageManager(GLESRenderEngine* engine) : mEngine(engine) {}
 
-void ImageManager::initThread() {
+void ImageManager::initThread(bool realtime) {
     mThread = std::thread([this]() { threadMain(); });
     pthread_setname_np(mThread.native_handle(), "ImageManager");
-    // Use SCHED_FIFO to minimize jitter
-    struct sched_param param = {0};
-    param.sched_priority = 2;
-    if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, &param) != 0) {
-        ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+    if (realtime) {
+        // Use SCHED_FIFO to minimize jitter
+        struct sched_param param = {0};
+        param.sched_priority = 2;
+        if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, &param) != 0) {
+            ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+        }
     }
 }
 
diff --git a/media/libstagefright/renderfright/gl/ImageManager.h b/media/libstagefright/renderfright/gl/ImageManager.h
index be67de8..6be8e3c 100644
--- a/media/libstagefright/renderfright/gl/ImageManager.h
+++ b/media/libstagefright/renderfright/gl/ImageManager.h
@@ -42,7 +42,7 @@
     // Starts the background thread for the ImageManager
     // We need this to guarantee that the class is fully-constructed before the
     // thread begins running.
-    void initThread();
+    void initThread(bool realtime);
     void cacheAsync(const sp<GraphicBuffer>& buffer, const std::shared_ptr<Barrier>& barrier)
             EXCLUDES(mMutex);
     status_t cache(const sp<GraphicBuffer>& buffer);
diff --git a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
index af2870f..373d07b 100644
--- a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
+++ b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
@@ -206,6 +206,7 @@
     bool supportsBackgroundBlur;
     RenderEngine::ContextPriority contextPriority;
     RenderEngine::RenderEngineType renderEngineType;
+    bool realtime;
 
     struct Builder;
 
@@ -215,7 +216,8 @@
                              bool _enableProtectedContext, bool _precacheToneMapperShaderOnly,
                              bool _supportsBackgroundBlur,
                              RenderEngine::ContextPriority _contextPriority,
-                             RenderEngine::RenderEngineType _renderEngineType)
+                             RenderEngine::RenderEngineType _renderEngineType,
+                             bool _realtime)
           : pixelFormat(_pixelFormat),
             imageCacheSize(_imageCacheSize),
             useColorManagement(_useColorManagement),
@@ -223,7 +225,8 @@
             precacheToneMapperShaderOnly(_precacheToneMapperShaderOnly),
             supportsBackgroundBlur(_supportsBackgroundBlur),
             contextPriority(_contextPriority),
-            renderEngineType(_renderEngineType) {}
+            renderEngineType(_renderEngineType),
+            realtime(_realtime) {}
     RenderEngineCreationArgs() = delete;
 };
 
@@ -262,10 +265,15 @@
         this->renderEngineType = renderEngineType;
         return *this;
     }
+    Builder& setRealtime(bool realtime) {
+        this->realtime = realtime;
+        return *this;
+    }
     RenderEngineCreationArgs build() const {
         return RenderEngineCreationArgs(pixelFormat, imageCacheSize, useColorManagement,
                                         enableProtectedContext, precacheToneMapperShaderOnly,
-                                        supportsBackgroundBlur, contextPriority, renderEngineType);
+                                        supportsBackgroundBlur, contextPriority, renderEngineType,
+                                        realtime);
     }
 
 private:
@@ -278,6 +286,7 @@
     bool supportsBackgroundBlur = false;
     RenderEngine::ContextPriority contextPriority = RenderEngine::ContextPriority::MEDIUM;
     RenderEngine::RenderEngineType renderEngineType = RenderEngine::RenderEngineType::GLES;
+    bool realtime = true;
 };
 
 class BindNativeBufferAsFramebuffer {
diff --git a/media/libstagefright/renderfright/tests/Android.bp b/media/libstagefright/renderfright/tests/Android.bp
index 9fee646..e4b13fb 100644
--- a/media/libstagefright/renderfright/tests/Android.bp
+++ b/media/libstagefright/renderfright/tests/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_test {
     name: "librenderfright_test",
     defaults: ["surfaceflinger_defaults"],
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 72a377d..2f93d5d 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -112,24 +112,25 @@
 ARTPAssembler::AssemblyStatus AAVCAssembler::addNALUnit(
         const sp<ARTPSource> &source) {
     List<sp<ABuffer> > *queue = source->queue();
+    const uint32_t firstRTPTime = source->mFirstRtpTime;
 
     if (queue->empty()) {
         return NOT_ENOUGH_DATA;
     }
 
     sp<ABuffer> buffer = *queue->begin();
-    uint32_t rtpTime;
-    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
     buffer->meta()->setObject("source", source);
 
+    int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+
     int64_t startTime = source->mFirstSysTime / 1000;
     int64_t nowTime = ALooper::GetNowUs() / 1000;
     int64_t playedTime = nowTime - startTime;
-    int64_t playedTimeRtp =
-        source->mFirstRtpTime + (((uint32_t)playedTime) * (source->mClockRate / 1000));
-    const uint32_t jitterTime =
-        (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
-    uint32_t expiredTimeInJb = rtpTime + jitterTime;
+
+    int64_t playedTimeRtp = source->mFirstRtpTime + playedTime * (int64_t)source->mClockRate / 1000;
+    const int64_t jitterTime = source->mJbTimeMs * (int64_t)source->mClockRate / 1000;
+
+    int64_t expiredTimeInJb = rtpTime + jitterTime;
     bool isExpired = expiredTimeInJb <= (playedTimeRtp);
     bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
     bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
@@ -154,11 +155,11 @@
 
     if (isTooLate300) {
         ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
-              ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+                (long long)(playedTimeRtp - expiredTimeInJb), buffer->int32Data());
         printNowTimeUs(startTime, nowTime, playedTime);
         printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
 
-        mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+        mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTime);
     }
 
     if (mNextExpectedSeqNoValid) {
@@ -564,14 +565,25 @@
     msg->post();
 }
 
-int32_t AAVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+inline int64_t AAVCAssembler::findRTPTime(
+        const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
+    /* If you want to +, -, * rtpTime, recommend to declare rtpTime as int64_t.
+       Because rtpTime can be near UINT32_MAX. Beware the overflow. */
+    int64_t rtpTime = 0;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
+    int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
+    return rtpTime | overflowMask;
+}
+
+int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
+        uint32_t first, int64_t play, int64_t jit) {
     sp<ABuffer> buffer = *(queue->begin());
-    uint32_t rtpTime;
     int32_t nextSeqNo = buffer->int32Data();
 
     Queue::const_iterator it = queue->begin();
     while (it != queue->end()) {
-        CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+        int64_t rtpTime = findRTPTime(first, *it);
         // if pkt in time exists, that should be the next pivot
         if (rtpTime + jit >= play) {
             nextSeqNo = (*it)->int32Data();
@@ -613,9 +625,9 @@
             (long long)start, (long long)now, (long long)play);
 }
 
-inline void AAVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
-    ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
-            rtp, (long long)play, exp, isExp);
+inline void AAVCAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
+    ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
+            (long long)rtp, (long long)play, (long long)exp, isExp);
 }
 
 ARTPAssembler::AssemblyStatus AAVCAssembler::assembleMore(
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 79fc7c2..9d71e2f 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -63,12 +63,13 @@
 
     void submitAccessUnit();
 
-    int32_t pickProperSeq(const Queue *q, uint32_t jit, int64_t play);
+    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+    int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
     bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
             size_t avail, float goodRatio);
     int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
     void printNowTimeUs(int64_t start, int64_t now, int64_t play);
-    void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+    void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
 
     DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
 };
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index 148a0ba..553ea08 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -122,6 +122,7 @@
 ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
         const sp<ARTPSource> &source) {
     List<sp<ABuffer> > *queue = source->queue();
+    const uint32_t firstRTPTime = source->mFirstRtpTime;
 
     if (queue->empty()) {
         return NOT_ENOUGH_DATA;
@@ -129,15 +130,15 @@
 
     sp<ABuffer> buffer = *queue->begin();
     buffer->meta()->setObject("source", source);
-    uint32_t rtpTime;
-    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+
     int64_t startTime = source->mFirstSysTime / 1000;
     int64_t nowTime = ALooper::GetNowUs() / 1000;
     int64_t playedTime = nowTime - startTime;
-    int64_t playedTimeRtp = source->mFirstRtpTime +
-        (((uint32_t)playedTime) * (source->mClockRate / 1000));
-    const uint32_t jitterTime = (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
-    uint32_t expiredTimeInJb = rtpTime + jitterTime;
+    int64_t playedTimeRtp = source->mFirstRtpTime + playedTime * (int64_t)source->mClockRate / 1000;
+    const int64_t jitterTime = source->mJbTimeMs * (int64_t)source->mClockRate / 1000;
+
+    int64_t expiredTimeInJb = rtpTime + jitterTime;
     bool isExpired = expiredTimeInJb <= (playedTimeRtp);
     bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
     bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
@@ -162,11 +163,11 @@
 
     if (isTooLate300) {
         ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
-              ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+                (long long)(playedTimeRtp - expiredTimeInJb), buffer->int32Data());
         printNowTimeUs(startTime, nowTime, playedTime);
         printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
 
-        mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+        mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTime);
     }
 
     if (mNextExpectedSeqNoValid) {
@@ -577,14 +578,25 @@
     msg->post();
 }
 
-int32_t AHEVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+inline int64_t AHEVCAssembler::findRTPTime(
+        const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
+    /* If you want to +, -, * rtpTime, recommend to declare rtpTime as int64_t.
+       Because rtpTime can be near UINT32_MAX. Beware the overflow. */
+    int64_t rtpTime = 0;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
+    int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
+    return rtpTime | overflowMask;
+}
+
+int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
+        uint32_t first, int64_t play, int64_t jit) {
     sp<ABuffer> buffer = *(queue->begin());
-    uint32_t rtpTime;
     int32_t nextSeqNo = buffer->int32Data();
 
     Queue::const_iterator it = queue->begin();
     while (it != queue->end()) {
-        CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+        int64_t rtpTime = findRTPTime(first, *it);
         // if pkt in time exists, that should be the next pivot
         if (rtpTime + jit >= play) {
             nextSeqNo = (*it)->int32Data();
@@ -626,12 +638,11 @@
             (long long)start, (long long)now, (long long)play);
 }
 
-inline void AHEVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
-    ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
-            rtp, (long long)play, exp, isExp);
+inline void AHEVCAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
+    ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
+            (long long)rtp, (long long)play, (long long)exp, isExp);
 }
 
-
 ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
         const sp<ARTPSource> &source) {
     AssemblyStatus status = addNALUnit(source);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index 16fc1c8..bf1cded 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -64,12 +64,13 @@
 
     void submitAccessUnit();
 
-    int32_t pickProperSeq(const Queue *queue, uint32_t jit, int64_t play);
-    bool recycleUnit(uint32_t start, uint32_t end, uint32_t conneceted,
+    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+    int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
+    bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
              size_t avail, float goodRatio);
     int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
     void printNowTimeUs(int64_t start, int64_t now, int64_t play);
-    void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+    void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
 
     DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
 };
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 97a9bbb..61c06d1 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -464,6 +464,22 @@
                     ALOGD("Send FIR immediately for lost Packets");
                     send(&*it, buffer);
                 }
+
+                buffer->setRange(0, 0);
+                it->mSources.valueAt(i)->addTMMBR(buffer, mTargetBitrate);
+                mTargetBitrate = -1;
+                if (buffer->size() > 0) {
+                    ALOGV("Sending TMMBR...");
+                    ssize_t n = send(&*it, buffer);
+
+                    if (n != (ssize_t)buffer->size()) {
+                        ALOGW("failed to send RTCP TMMBR (%s).",
+                                n >= 0 ? "connection gone" : strerror(errno));
+
+                        it = mStreams.erase(it);
+                        continue;
+                    }
+                }
             }
 
             ++it;
@@ -509,16 +525,14 @@
 
                 ssize_t n = send(s, buffer);
 
-                if (n <= 0) {
+                if (n != (ssize_t)buffer->size()) {
                     ALOGW("failed to send RTCP receiver report (%s).",
-                         n == 0 ? "connection gone" : strerror(errno));
+                            n >= 0 ? "connection gone" : strerror(errno));
 
                     it = mStreams.erase(it);
                     continue;
                 }
 
-                CHECK_EQ(n, (ssize_t)buffer->size());
-
                 mLastReceiverReportTimeUs = nowUs;
             }
 
@@ -862,6 +876,12 @@
 
     sp<ARTPSource> source = findSource(s, id);
 
+    // Report a final stastics to be used for rtp data usage.
+    int64_t nowUs = ALooper::GetNowUs();
+    int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+    int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+    source->notifyPktInfo(bitrate, true /* isRegular */);
+
     source->byeReceived();
 
     return OK;
@@ -1079,6 +1099,28 @@
         mCumulativeBytes = 0;
         mLastBitrateReportTimeUs = nowUs;
     }
+    else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+        int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+        int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+        mLastEarlyNotifyTimeUs = nowUs;
+
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
+            StreamInfo *s = &*it;
+            if (s->mIsInjected) {
+                ++it;
+                continue;
+            }
+            for (size_t i = 0; i < s->mSources.size(); ++i) {
+                sp<ARTPSource> source = s->mSources.valueAt(i);
+                if (source->isNeedToEarlyNotify()) {
+                    source->notifyPktInfo(bitrate, false /* isRegular */);
+                    mLastEarlyNotifyTimeUs = nowUs + (1000000ll * 3600 * 24); // after 1 day
+                }
+            }
+            ++it;
+        }
+    }
     else if (mLastBitrateReportTimeUs + 1000000ll <= nowUs) {
         int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
         int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
@@ -1101,31 +1143,15 @@
             }
 
             buffer->setRange(0, 0);
-
             for (size_t i = 0; i < s->mSources.size(); ++i) {
                 sp<ARTPSource> source = s->mSources.valueAt(i);
-                source->notifyPktInfo(bitrate, nowUs);
-                source->addTMMBR(buffer, mTargetBitrate);
-            }
-            if (buffer->size() > 0) {
-                ALOGV("Sending TMMBR...");
-
-                ssize_t n = send(s, buffer);
-
-                if (n <= 0) {
-                    ALOGW("failed to send RTCP TMMBR (%s).",
-                         n == 0 ? "connection gone" : strerror(errno));
-
-                    it = mStreams.erase(it);
-                    continue;
-                }
-
-                CHECK_EQ(n, (ssize_t)buffer->size());
+                source->notifyPktInfo(bitrate, true /* isRegular */);
             }
             ++it;
         }
         mCumulativeBytes = 0;
         mLastBitrateReportTimeUs = nowUs;
+        mLastEarlyNotifyTimeUs = nowUs;
     }
 }
 void ARTPConnection::onInjectPacket(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index 7c8218f..a37ac0e 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -84,6 +84,7 @@
     bool mPollEventPending;
     int64_t mLastReceiverReportTimeUs;
     int64_t mLastBitrateReportTimeUs;
+    int64_t mLastEarlyNotifyTimeUs;
 
     int32_t mSelfID;
     int32_t mTargetBitrate;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index c611f6f..3fdf8e4 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -34,6 +34,8 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 
+#include <strings.h>
+
 namespace android {
 
 static uint32_t kSourceID = 0xdeadbeef;
@@ -380,21 +382,24 @@
     data[14] = (mID >> 8) & 0xff;
     data[15] = mID & 0xff;
 
-    int32_t exp, mantissa;
+    // Find the first bit '1' from left & right side of the value.
+    int32_t leftEnd = 31 - __builtin_clz(targetBitrate);
+    int32_t rightEnd = ffs(targetBitrate) - 1;
 
-    // Round off to the nearest 2^4th
-    ALOGI("UE -> Op Req Rx bitrate : %d ", targetBitrate & 0xfffffff0);
-    for (exp=4 ; exp < 32 ; exp++)
-        if (((targetBitrate >> exp) & 0x01) != 0)
-            break;
-    mantissa = targetBitrate >> exp;
+    // Mantissa have only 17bit space by RTCP specification.
+    if ((leftEnd - rightEnd) > 16) {
+        rightEnd = leftEnd - 16;
+    }
+    int32_t mantissa = targetBitrate >> rightEnd;
 
-    data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
-    data[17] =                        (mantissa & 0x07f80) >> 7;
-    data[18] =                        (mantissa & 0x0007f) << 1;
+    data[16] = ((rightEnd << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                             (mantissa & 0x07f80) >> 7;
+    data[18] =                             (mantissa & 0x0007f) << 1;
     data[19] = 40;              // 40 bytes overhead;
 
     buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+    ALOGI("UE -> Op Req Rx bitrate : %d ", mantissa << rightEnd);
 }
 
 int ARTPSource::addNACK(const sp<ABuffer> &buffer) {
@@ -512,10 +517,22 @@
     mIssueFIRRequests = enable;
 }
 
-void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t /*time*/) {
+bool ARTPSource::isNeedToEarlyNotify() {
+    uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+    int32_t intervalExpectedInNow = expected - mPrevExpected;
+    int32_t intervalReceivedInNow = mNumBuffersReceived - mPrevNumBuffersReceived;
+
+    if (intervalExpectedInNow - intervalReceivedInNow > 5)
+        return true;
+    return false;
+}
+
+void ARTPSource::notifyPktInfo(int32_t bitrate, bool isRegular) {
+    int32_t payloadType = isRegular ? RTP_QUALITY : RTP_QUALITY_EMC;
+
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("rtcp-event", 1);
-    notify->setInt32("payload-type", 102);
+    notify->setInt32("payload-type", payloadType);
     notify->setInt32("feedback-type", 0);
     // sending target bitrate up to application to share rtp quality.
     notify->setInt32("bit-rate", bitrate);
@@ -526,9 +543,11 @@
     notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
     notify->post();
 
-    uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
-    mPrevExpected = expected;
-    mPrevNumBuffersReceived = mNumBuffersReceived;
+    if (isRegular) {
+        uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+        mPrevExpected = expected;
+        mPrevNumBuffersReceived = mNumBuffersReceived;
+    }
 }
 
 void ARTPSource::onIssueFIRByAssembler() {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index ea683a0..c51fd8a 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -40,6 +40,17 @@
             const sp<ASessionDescription> &sessionDesc, size_t index,
             const sp<AMessage> &notify);
 
+    enum {
+        RTP_FIRST_PACKET = 100,
+        RTCP_FIRST_PACKET = 101,
+        RTP_QUALITY = 102,
+        RTP_QUALITY_EMC = 103,
+        RTCP_TSFB = 205,
+        RTCP_PSFB = 206,
+        RTP_CVO = 300,
+        RTP_AUTODOWN = 400,
+    };
+
     void processRTPPacket(const sp<ABuffer> &buffer);
     void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
     void byeReceived();
@@ -55,7 +66,8 @@
     void setSelfID(const uint32_t selfID);
     void setJbTime(const uint32_t jbTimeMs);
     void setPeriodicFIR(bool enable);
-    void notifyPktInfo(int32_t bitrate, int64_t time);
+    bool isNeedToEarlyNotify();
+    void notifyPktInfo(int32_t bitrate, bool isRegular);
     // FIR needs to be sent by missing packet or broken video image.
     void onIssueFIRByAssembler();
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 76afb04..ec70952 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -20,8 +20,6 @@
 
 #include "ARTPWriter.h"
 
-#include <fcntl.h>
-
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -32,6 +30,9 @@
 #include <media/stagefright/MetaData.h>
 #include <utils/ByteOrder.h>
 
+#include <fcntl.h>
+#include <strings.h>
+
 #define PT      97
 #define PT_STR  "97"
 
@@ -46,10 +47,12 @@
 #define H265_NALU_SPS 0x21
 #define H265_NALU_PPS 0x22
 
-#define LINK_HEADER_SIZE 14
-#define IP_HEADER_SIZE 20
+#define IPV4_HEADER_SIZE 20
+#define IPV6_HEADER_SIZE 40
 #define UDP_HEADER_SIZE 8
-#define TCPIP_HEADER_SIZE (LINK_HEADER_SIZE + IP_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIPV4_HEADER_SIZE (IPV4_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIPV6_HEADER_SIZE (IPV6_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIP_HEADER_SIZE TCPIPV4_HEADER_SIZE
 #define RTP_HEADER_SIZE 12
 #define RTP_HEADER_EXT_SIZE 8
 #define RTP_FU_HEADER_SIZE 2
@@ -62,6 +65,9 @@
 static const size_t kMaxPacketSize = 1280;
 static char kCNAME[255] = "someone@somewhere";
 
+static const size_t kTrafficRecorderMaxEntries = 128;
+static const size_t kTrafficRecorderMaxTimeSpanMs = 2000;
+
 static int UniformRand(int limit) {
     return ((double)rand() * limit) / RAND_MAX;
 }
@@ -71,7 +77,8 @@
       mFd(dup(fd)),
       mLooper(new ALooper),
       mReflector(new AHandlerReflector<ARTPWriter>(this)),
-      mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
+      mTrafficRec(new TrafficRecorder<uint32_t /* Time */, Bytes>(
+              kTrafficRecorderMaxEntries, kTrafficRecorderMaxTimeSpanMs)) {
     CHECK_GE(fd, 0);
     mIsIPv6 = false;
 
@@ -122,7 +129,8 @@
       mFd(dup(fd)),
       mLooper(new ALooper),
       mReflector(new AHandlerReflector<ARTPWriter>(this)),
-      mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
+      mTrafficRec(new TrafficRecorder<uint32_t /* Time */, Bytes>(
+              kTrafficRecorderMaxEntries, kTrafficRecorderMaxTimeSpanMs)) {
     CHECK_GE(fd, 0);
     mIsIPv6 = false;
 
@@ -135,7 +143,8 @@
     mSPSBuf = NULL;
     mPPSBuf = NULL;
 
-    mSeqNo = seqNo;
+    initState();
+    mSeqNo = seqNo;     // Must use explicit # of seq for RTP continuity
 
 #if LOG_TO_FILES
     mRTPFd = open(
@@ -186,6 +195,29 @@
     mFd = -1;
 }
 
+void ARTPWriter::initState() {
+    if (mSourceID == 0)
+        mSourceID = rand();
+    mPayloadType = 0;
+    if (mSeqNo == 0)
+        mSeqNo = UniformRand(65536);
+    mRTPTimeBase = 0;
+    mNumRTPSent = 0;
+    mNumRTPOctetsSent = 0;
+    mLastRTPTime = 0;
+    mLastNTPTime = 0;
+
+    mOpponentID = 0;
+    mBitrate = 192000;
+
+    mNumSRsSent = 0;
+    mRTPCVOExtMap = -1;
+    mRTPCVODegrees = 0;
+    mRTPSockNetwork = 0;
+
+    mMode = INVALID;
+}
+
 status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
     mSource = source;
     return OK;
@@ -203,21 +235,7 @@
     }
 
     mFlags &= ~kFlagEOS;
-    if (mSourceID == 0)
-        mSourceID = rand();
-    if (mSeqNo == 0)
-        mSeqNo = UniformRand(65536);
-    mRTPTimeBase = 0;
-    mNumRTPSent = 0;
-    mNumRTPOctetsSent = 0;
-    mLastRTPTime = 0;
-    mLastNTPTime = 0;
-    mOpponentID = 0;
-    mBitrate = 192000;
-    mNumSRsSent = 0;
-    mRTPCVOExtMap = -1;
-    mRTPCVODegrees = 0;
-    mRTPSockNetwork = 0;
+    initState();
 
     const char *mime;
     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
@@ -246,7 +264,6 @@
     if (params->findInt64(kKeySocketNetwork, &sockNetwork))
         updateSocketNetwork(sockNetwork);
 
-    mMode = INVALID;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMode = H264;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
@@ -600,7 +617,8 @@
         ALOGW("packets can not be sent. ret=%d, buf=%d", (int)n, (int)buffer->size());
     } else {
         // Record current traffic & Print bits while last 1sec (1000ms)
-        mTrafficRec->writeBytes(buffer->size());
+        mTrafficRec->writeBytes(buffer->size() +
+                (mIsIPv6 ? TCPIPV6_HEADER_SIZE : TCPIPV4_HEADER_SIZE));
         mTrafficRec->printAccuBitsForLastPeriod(1000, 1000);
     }
 
@@ -729,21 +747,24 @@
     data[14] = (mOpponentID >> 8) & 0xff;
     data[15] = mOpponentID & 0xff;
 
-    int32_t exp, mantissa;
+    // Find the first bit '1' from left & right side of the value.
+    int32_t leftEnd = 31 - __builtin_clz(mBitrate);
+    int32_t rightEnd = ffs(mBitrate) - 1;
 
-    // Round off to the nearest 2^4th
-    ALOGI("UE -> Op Noti Tx bitrate : %d ", mBitrate & 0xfffffff0);
-    for (exp=4 ; exp < 32 ; exp++)
-        if (((mBitrate >> exp) & 0x01) != 0)
-            break;
-    mantissa = mBitrate >> exp;
+    // Mantissa have only 17bit space by RTCP specification.
+    if ((leftEnd - rightEnd) > 16) {
+        rightEnd = leftEnd - 16;
+    }
+    int32_t mantissa = mBitrate >> rightEnd;
 
-    data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
-    data[17] =                        (mantissa & 0x07f80) >> 7;
-    data[18] =                        (mantissa & 0x0007f) << 1;
+    data[16] = ((rightEnd << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                             (mantissa & 0x07f80) >> 7;
+    data[18] =                             (mantissa & 0x0007f) << 1;
     data[19] = 40;              // 40 bytes overhead;
 
     buffer->setRange(buffer->offset(), buffer->size() + 20);
+
+    ALOGI("UE -> Op Noti Tx bitrate : %d ", mantissa << rightEnd);
 }
 
 // static
@@ -1362,6 +1383,10 @@
     return mSeqNo;
 }
 
+uint64_t ARTPWriter::getAccumulativeBytes() {
+    return mTrafficRec->readBytesForTotal();
+}
+
 static size_t getFrameSize(bool isWide, unsigned FT) {
     static const size_t kFrameSizeNB[8] = {
         95, 103, 118, 134, 148, 159, 204, 244
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 6f25a66..28d6ec5 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -53,6 +53,7 @@
     void updateSocketDscp(int32_t dscp);
     void updateSocketNetwork(int64_t socketNetwork);
     uint32_t getSequenceNum();
+    virtual uint64_t getAccumulativeBytes() override;
 
     virtual void onMessageReceived(const sp<AMessage> &msg);
     virtual void setTMMBNInfo(uint32_t opponentID, uint32_t bitrate);
@@ -118,7 +119,8 @@
 
     uint32_t mOpponentID;
     uint32_t mBitrate;
-    sp<TrafficRecorder<uint32_t, size_t> > mTrafficRec;
+    typedef uint64_t Bytes;
+    sp<TrafficRecorder<uint32_t /* Time */, Bytes> > mTrafficRec;
 
     int32_t mNumSRsSent;
     int32_t mRTPCVOExtMap;
@@ -135,6 +137,7 @@
 
     static uint64_t GetNowNTP();
 
+    void initState();
     void onRead(const sp<AMessage> &msg);
     void onSendSR(const sp<AMessage> &msg);
 
diff --git a/media/libstagefright/rtsp/TrafficRecorder.h b/media/libstagefright/rtsp/TrafficRecorder.h
index f8e7c03..8ba8f90 100644
--- a/media/libstagefright/rtsp/TrafficRecorder.h
+++ b/media/libstagefright/rtsp/TrafficRecorder.h
@@ -27,44 +27,49 @@
 template <class Time, class Bytes>
 class TrafficRecorder : public RefBase {
 private:
+    constexpr static size_t kMinNumEntries = 4;
+    constexpr static size_t kMaxNumEntries = 1024;
+
     size_t mSize;
     size_t mSizeMask;
     Time *mTimeArray = NULL;
     Bytes *mBytesArray = NULL;
-    size_t mHeadIdx = 0;
-    size_t mTailIdx = 0;
+    size_t mHeadIdx;
+    size_t mTailIdx;
 
-    Time mClock = 0;
-    Time mLastTimeOfPrint = 0;
-    Bytes mAccuBytesOfPrint = 0;
+    int mLastReadIdx;
+
+    const Time mRecordLimit;
+    Time mClock;
+    Time mLastTimeOfPrint;
+    Bytes mAccuBytes;
+
 public:
-    TrafficRecorder();
-    TrafficRecorder(size_t size);
+    TrafficRecorder(size_t size, Time accuTimeLimit);
     virtual ~TrafficRecorder();
 
     void init();
-
     void updateClock(Time now);
-
+    Bytes readBytesForTotal();
     Bytes readBytesForLastPeriod(Time period);
     void writeBytes(Bytes bytes);
-
     void printAccuBitsForLastPeriod(Time period, Time unit);
 };
 
 template <class Time, class Bytes>
-TrafficRecorder<Time, Bytes>::TrafficRecorder() {
-    TrafficRecorder(128);
-}
-
-template <class Time, class Bytes>
-TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size) {
-    size_t exp;
-    for (exp = 0; exp < 32; exp++) {
-        if (size <= (1ul << exp)) {
-            break;
-        }
+TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size, Time recordLimit)
+    : mRecordLimit(recordLimit) {
+    if (size > kMaxNumEntries) {
+        LOG(VERBOSE) << "Limiting TrafficRecorder size to " << kMaxNumEntries;
+        size = kMaxNumEntries;
+    } else if (size < kMinNumEntries) {
+        LOG(VERBOSE) << "Limiting TrafficRecorder size to " << kMaxNumEntries;
+        size = kMinNumEntries;
     }
+
+    size_t exp = ((sizeof(size_t) == 8) ?
+                  64 - __builtin_clzl(size - 1) :
+                  32 - __builtin_clz(size - 1));
     mSize = (1ul << exp);         // size = 2^exp
     mSizeMask = mSize - 1;
 
@@ -84,9 +89,15 @@
 template <class Time, class Bytes>
 void TrafficRecorder<Time, Bytes>::init() {
     mHeadIdx = 0;
-    mTailIdx = 0;
-    mTimeArray[0] = 0;
-    mBytesArray[0] = 0;
+    mTailIdx = mSizeMask;
+    for (int i = 0 ; i < mSize ; i++) {
+        mTimeArray[i] = 0;
+        mBytesArray[i] = 0;
+    }
+    mClock = 0;
+    mLastReadIdx = 0;
+    mLastTimeOfPrint = 0;
+    mAccuBytes = 0;
 }
 
 template <class Time, class Bytes>
@@ -95,54 +106,71 @@
 }
 
 template <class Time, class Bytes>
-Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
-    Bytes bytes = 0;
+Bytes TrafficRecorder<Time, Bytes>::readBytesForTotal() {
+    return mAccuBytes;
+}
 
-    size_t i = mTailIdx;
-    while (i != mHeadIdx) {
-        LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i] << " \t EndOfPeriod " << mClock - period;
+template <class Time, class Bytes>
+Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
+    // Not enough data
+    if (period > mClock)
+        return 0;
+
+    Bytes bytes = 0;
+    int i = mHeadIdx;
+    while (i != mTailIdx) {
+        LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i]
+                << " \t EndOfPeriod " << mClock - period
+                << "\t\t Bytes:" << mBytesArray[i] << "\t\t Accu: " << bytes;
         if (mTimeArray[i] < mClock - period) {
             break;
         }
         bytes += mBytesArray[i];
-        i = (i + mSize - 1) & mSizeMask;
+        i = (i - 1) & mSizeMask;
     }
-    mHeadIdx = i;
+    mLastReadIdx = (i + 1) & mSizeMask;
+
     return bytes;
 }
 
 template <class Time, class Bytes>
 void TrafficRecorder<Time, Bytes>::writeBytes(Bytes bytes) {
-    size_t writeIdx;
-    if (mClock == mTimeArray[mTailIdx]) {
-        writeIdx = mTailIdx;
+    int writeIdx;
+    if (mClock == mTimeArray[mHeadIdx]) {
+        writeIdx = mHeadIdx;
         mBytesArray[writeIdx] += bytes;
     } else {
-        writeIdx = (mTailIdx + 1) % mSize;
+        writeIdx = (mHeadIdx + 1) & mSizeMask;
         mTimeArray[writeIdx] = mClock;
         mBytesArray[writeIdx] = bytes;
     }
 
     LOG(VERBOSE) << "WRITE " << writeIdx << " time " << mClock;
-    if (writeIdx == mHeadIdx) {
-        LOG(WARNING) << "Traffic recorder size exceeded at " << mHeadIdx;
-        mHeadIdx = (mHeadIdx + 1) & mSizeMask;
+    if (writeIdx == mTailIdx) {
+        mTailIdx = (mTailIdx + 1) & mSizeMask;
     }
 
-    mTailIdx = writeIdx;
-    mAccuBytesOfPrint += bytes;
+    mHeadIdx = writeIdx;
+    mAccuBytes += bytes;
 }
 
 template <class Time, class Bytes>
 void TrafficRecorder<Time, Bytes>::printAccuBitsForLastPeriod(Time period, Time unit) {
-    Time duration = mClock - mLastTimeOfPrint;
-    float numOfUnit = (float)duration / unit;
-    if (duration > period) {
-        ALOGD("Actual Tx period %.0f ms \t %.0f Bits/Unit",
-              numOfUnit * 1000.f, mAccuBytesOfPrint * 8.f / numOfUnit);
-        mLastTimeOfPrint = mClock;
-        mAccuBytesOfPrint = 0;
-        init();
+    Time timeSinceLastPrint = mClock - mLastTimeOfPrint;
+    if (timeSinceLastPrint < period)
+        return;
+
+    Bytes sum = readBytesForLastPeriod(period);
+    Time readPeriod = mClock - mTimeArray[mLastReadIdx];
+
+    float numOfUnit = (float)(readPeriod) / (unit + FLT_MIN);
+    ALOGD("Actual Tx period %.3f unit \t %.0f bytes (%.0f Kbits)/Unit",
+          numOfUnit, sum / numOfUnit, sum * 8.f / numOfUnit / 1000.f);
+    mLastTimeOfPrint = mClock;
+
+    if (mClock - mTimeArray[mTailIdx] < mRecordLimit) {
+        // Size is not enough to record bytes for mRecordLimit period
+        ALOGW("Traffic recorder size is not enough. mRecordLimit %d", mRecordLimit);
     }
 }
 
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 65e74e6..0097830 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -30,6 +30,7 @@
         "libgui",
         "libbinder",
         "liblog",
+        "media_permission-aidl-cpp",
     ],
     include_dirs: [
         "frameworks/av/media/libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
index 03e9b43..969c6e1 100644
--- a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
@@ -17,6 +17,7 @@
 //          dylan.katz@leviathansecurity.com
 
 #include <android-base/file.h>
+#include <android/media/permission/Identity.h>
 #include <ctype.h>
 #include <media/mediarecorder.h>
 #include <media/stagefright/MPEG4Writer.h>
@@ -39,6 +40,8 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 std::string getFourCC(FuzzedDataProvider *fdp) {
     std::string fourCC = fdp->ConsumeRandomLengthString(4);
     // Replace any existing nulls
@@ -163,9 +166,11 @@
     StandardWriters writerType = dataProvider.ConsumeEnum<StandardWriters>();
     sp<MediaWriter> writer = createWriter(tf.fd, writerType, fileMeta);
 
-    std::string packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
-
-    sp<MediaRecorder> mr = new MediaRecorder(String16(packageName.c_str()));
+    Identity i;
+    i.packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
+    i.uid = dataProvider.ConsumeIntegral<int32_t>();
+    i.pid = dataProvider.ConsumeIntegral<int32_t>();
+    sp<MediaRecorder> mr = new MediaRecorder(i);
     writer->setListener(mr);
 
     uint8_t baseOpLen = operations.size();
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index d00a50f..06e36ad 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -285,17 +285,20 @@
     // 1) Client thread calls stop(); MediaCodec looper thread calls
     //    initiateShutdown(); shutdown is being handled at the component thread.
     // 2) Error occurred, but the shutdown operation is still being done.
-    // 3) MediaCodec looper thread handles the error.
-    // 4) Component thread completes shutdown and posts onStopCompleted()
+    // 3) Another error occurred during the shutdown operation.
+    // 4) MediaCodec looper thread handles the error.
+    // 5) Client releases the codec upon the error; previous shutdown is still
+    //    going on.
+    // 6) Component thread completes shutdown and posts onStopCompleted();
+    //    Shutdown from release also completes.
 
     static const AString kCodecName{"test.codec"};
     static const AString kCodecOwner{"nobody"};
     static const AString kMediaType{"video/x-test"};
 
-    std::promise<void> errorOccurred;
     sp<MockCodec> mockCodec;
     std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
-        [&mockCodec, &errorOccurred](const AString &, const char *) {
+        [&mockCodec](const AString &, const char *) {
             mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
                 // No mock setup, as we don't expect any buffer operations
                 // in this scenario.
@@ -314,13 +317,17 @@
                     mockCodec->callback()->onStartCompleted();
                 });
             ON_CALL(*mockCodec, initiateShutdown(true))
-                .WillByDefault([mockCodec, &errorOccurred](bool) {
+                .WillByDefault([mockCodec](bool) {
+                    // 2)
                     mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
-                    // Mark that 1) and 2) are complete.
-                    errorOccurred.set_value();
+                    // 3)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
                 });
             ON_CALL(*mockCodec, initiateShutdown(false))
                 .WillByDefault([mockCodec](bool) {
+                    // Previous stop finished now.
+                    mockCodec->callback()->onStopCompleted();
+                    // Release also finished.
                     mockCodec->callback()->onReleaseCompleted();
                 });
             return mockCodec;
@@ -332,19 +339,13 @@
     ASSERT_NE(nullptr, codec) << "Codec must not be null";
     ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
 
-    std::thread([mockCodec, &errorOccurred]{
-        // Simulate component thread that handles stop()
-        errorOccurred.get_future().wait();
-        // Error occurred but shutdown request still got processed.
-        mockCodec->callback()->onStopCompleted();
-    }).detach();
-
     codec->configure(new AMessage, nullptr, nullptr, 0);
     codec->start();
-    codec->stop();
-    // Sleep here to give time for the MediaCodec looper thread
-    // to process the messages.
+    // stop() will fail because of the error
+    EXPECT_NE(OK, codec->stop());
+    // sleep here so that the looper thread can handle all the errors.
     std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    // upon receiving the error, client tries to release the codec.
     codec->release();
     looper->stop();
 }
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 68752cd..3ceacfe 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -40,6 +40,7 @@
         "libstagefright_foundation",
         "libutils",
         "liblog",
+        "media_permission-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/media/libstagefright/writer_fuzzers/README.md b/media/libstagefright/writer_fuzzers/README.md
index 0d21031..6f95ecc 100644
--- a/media/libstagefright/writer_fuzzers/README.md
+++ b/media/libstagefright/writer_fuzzers/README.md
@@ -29,7 +29,7 @@
 
 | Parameter| Valid Values| Configured Value|
 |------------- |-------------| ----- |
-| `mime` | 0. `audio/3gpp` 1. `audio/amr-wb` 2. `audio/vorbis` 3. `audio/opus` 4. `audio/mp4a-latm` 5. `video/avc` 6. `video/hevc` 7. `video/mp4v-es` 8. `video/3gpp` 9. `video/x-vnd.on2.vp8` 10. `video/x-vnd.on2.vp9` | All the bits of 2nd byte of data for first track and 11th byte of data for second track (if present) modulus 10 |
+| `mime` | 0. `audio/3gpp` 1. `audio/amr-wb` 2. `audio/vorbis` 3. `audio/opus` 4. `audio/mp4a-latm` 5. `audio/mpeg` 6. `audio/mpeg-L1` 7. `audio/mpeg-L2` 8. `audio/midi` 9. `audio/qcelp` 10. `audio/g711-alaw` 11. `audio/g711-mlaw` 12. `audio/flac` 13. `audio/aac-adts` 14. `audio/gsm` 15. `audio/ac3` 16. `audio/eac3` 17. `audio/eac3-joc` 18. `audio/ac4` 19. `audio/scrambled` 20. `audio/alac` 21. `audio/x-ms-wma` 22. `audio/x-adpcm-ms` 23. `audio/x-adpcm-dvi-ima` 24. `video/avc` 25. `video/hevc` 26. `video/mp4v-es` 27. `video/3gpp` 28. `video/x-vnd.on2.vp8` 29. `video/x-vnd.on2.vp9` 30. `video/av01` 31. `video/mpeg2` 32. `video/dolby-vision` 33. `video/scrambled` 34. `video/divx` 35. `video/divx3` 36. `video/xvid` 37. `video/x-motion-jpeg` 38. `text/3gpp-tt` 39. `application/x-subrip` 40. `text/vtt` 41. `text/cea-608` 42. `text/cea-708` 43. `application/x-id3v4` | All the bits of 2nd byte of data for first track and 11th byte of data for second track and 20th byte of data for third track(if present) modulus 44 |
 | `channel-count` | In the range `0 to INT32_MAX` | All the bits of 3rd byte to 6th bytes of data if first track is audio and 12th to 15th bytes of data if second track is audio |
 | `sample-rate` | In the range `1 to INT32_MAX` | All the bits of 7th byte to 10th bytes of data if first track is audio and 16th to 19th bytes of data if second track is audio |
 | `height` | In the range `0 to INT32_MAX` | All the bits of 3rd byte to 6th bytes of data if first track is video and 12th to 15th bytes of data if second track is video |
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index 844db39..ee7af70 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -53,7 +53,7 @@
     return mNumCsds[trackIndex];
 }
 
-vector<FrameData> WriterFuzzerBase::BufferSource::getFrameList(int32_t trackIndex) {
+vector<FrameData> &WriterFuzzerBase::BufferSource::getFrameList(int32_t trackIndex) {
     return mFrameList[trackIndex];
 }
 
@@ -92,9 +92,8 @@
                 } else {
                     break;
                 }
-                mFrameList[trackIndex].insert(
-                    mFrameList[trackIndex].begin(),
-                    FrameData{static_cast<int32_t>(bufferSize), flags, pts, framePtr});
+                mFrameList[trackIndex].insert(mFrameList[trackIndex].begin(),
+                                              FrameData{bufferSize, flags, pts, framePtr});
                 bytesRemaining -= (frameSize + kMarkerSize + kMarkerSuffixSize);
                 --mReadIndex;
             }
@@ -105,31 +104,36 @@
          * Scenario where input data does not contain the custom frame markers.
          * Hence feed the entire data as single frame.
          */
-        mFrameList[0].emplace_back(
-            FrameData{static_cast<int32_t>(mSize - readIndexStart), 0, 0, mData + readIndexStart});
+        mFrameList[0].emplace_back(FrameData{mSize - readIndexStart, 0, 0, mData + readIndexStart});
     }
 }
 bool WriterFuzzerBase::BufferSource::getTrackInfo(int32_t trackIndex) {
-    if (mSize <= mReadIndex + 2 * sizeof(int) + sizeof(uint8_t)) {
+    if (mSize <= mReadIndex + sizeof(uint8_t)) {
         return false;
     }
     size_t mimeTypeIdx = mData[mReadIndex] % kSupportedMimeTypes;
     char *mime = (char *)supportedMimeTypes[mimeTypeIdx].c_str();
     mParams[trackIndex].mime = mime;
-    ++mReadIndex;
+    mReadIndex += sizeof(uint8_t);
 
-    if (!strncmp(mime, "audio/", 6)) {
-        copy(mData + mReadIndex, mData + mReadIndex + sizeof(int),
-             reinterpret_cast<char *>(&mParams[trackIndex].channelCount));
-        copy(mData + mReadIndex + sizeof(int), mData + mReadIndex + 2 * sizeof(int),
-             reinterpret_cast<char *>(&mParams[trackIndex].sampleRate));
+    if (mSize > mReadIndex + 2 * sizeof(int32_t)) {
+        if (!strncmp(mime, "audio/", 6)) {
+            copy(mData + mReadIndex, mData + mReadIndex + sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].channelCount));
+            copy(mData + mReadIndex + sizeof(int32_t), mData + mReadIndex + 2 * sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].sampleRate));
+        } else if (!strncmp(mime, "video/", 6)) {
+            copy(mData + mReadIndex, mData + mReadIndex + sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].height));
+            copy(mData + mReadIndex + sizeof(int32_t), mData + mReadIndex + 2 * sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].width));
+        }
+        mReadIndex += 2 * sizeof(int32_t);
     } else {
-        copy(mData + mReadIndex, mData + mReadIndex + sizeof(int),
-             reinterpret_cast<char *>(&mParams[trackIndex].height));
-        copy(mData + mReadIndex + sizeof(int), mData + mReadIndex + 2 * sizeof(int),
-             reinterpret_cast<char *>(&mParams[trackIndex].width));
+        if (strncmp(mime, "text/", 5) && strncmp(mime, "application/", 12)) {
+            return false;
+        }
     }
-    mReadIndex += 2 * sizeof(int);
     return true;
 }
 
@@ -173,7 +177,7 @@
         }
         format->setInt32("channel-count", params.channelCount);
         format->setInt32("sample-rate", params.sampleRate);
-    } else {
+    } else if (!strncmp(params.mime, "video/", 6)) {
         format->setInt32("width", params.width);
         format->setInt32("height", params.height);
     }
@@ -193,11 +197,10 @@
     mWriter->start(mFileMeta.get());
 }
 
-void WriterFuzzerBase::sendBuffersToWriter(sp<MediaAdapter> &currentTrack, int32_t trackIndex) {
-    int32_t numCsds = mBufferSource->getNumCsds(trackIndex);
+void WriterFuzzerBase::sendBuffersToWriter(sp<MediaAdapter> &currentTrack, int32_t trackIndex,
+                                           int32_t startFrameIndex, int32_t endFrameIndex) {
     vector<FrameData> bufferInfo = mBufferSource->getFrameList(trackIndex);
-    int32_t range = bufferInfo.size();
-    for (int idx = numCsds; idx < range; ++idx) {
+    for (int idx = startFrameIndex; idx < endFrameIndex; ++idx) {
         sp<ABuffer> buffer = new ABuffer((void *)bufferInfo[idx].buf, bufferInfo[idx].size);
         MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
 
@@ -209,7 +212,7 @@
 
         // Just set the kKeyDecodingTime as the presentation time for now.
         sampleMetaData.setInt64(kKeyDecodingTime, bufferInfo[idx].timeUs);
-        if (bufferInfo[idx].flags == 1) {
+        if (bufferInfo[idx].flags == SampleFlag::SYNC_FLAG) {
             sampleMetaData.setInt32(kKeyIsSyncFrame, true);
         }
 
@@ -218,6 +221,28 @@
     }
 }
 
+void WriterFuzzerBase::sendBuffersInterleave(int32_t numTracks, uint8_t numBuffersInterleave) {
+    int32_t currentFrameIndex[numTracks], remainingNumFrames[numTracks], numTrackFramesDone;
+    for (int32_t idx = 0; idx < numTracks; ++idx) {
+        currentFrameIndex[idx] = mBufferSource->getNumCsds(idx);
+        remainingNumFrames[idx] = mBufferSource->getFrameList(idx).size() - currentFrameIndex[idx];
+    }
+    do {
+        numTrackFramesDone = numTracks;
+        for (int32_t idx = 0; idx < numTracks; ++idx) {
+            if (remainingNumFrames[idx] > 0) {
+                int32_t numFramesInterleave =
+                    min(remainingNumFrames[idx], static_cast<int32_t>(numBuffersInterleave));
+                sendBuffersToWriter(mCurrentTrack[idx], idx, currentFrameIndex[idx],
+                                    currentFrameIndex[idx] + numFramesInterleave);
+                currentFrameIndex[idx] += numFramesInterleave;
+                remainingNumFrames[idx] -= numFramesInterleave;
+                --numTrackFramesDone;
+            }
+        }
+    } while (numTrackFramesDone < numTracks);
+}
+
 void WriterFuzzerBase::initFileWriterAndProcessData(const uint8_t *data, size_t size) {
     if (!createOutputFile()) {
         return;
@@ -225,6 +250,14 @@
     if (!createWriter()) {
         return;
     }
+
+    if (size < 1) {
+        return;
+    }
+    uint8_t numBuffersInterleave = (data[0] == 0 ? 1 : data[0]);
+    ++data;
+    --size;
+
     mBufferSource = new BufferSource(data, size);
     if (!mBufferSource) {
         return;
@@ -246,9 +279,7 @@
             addWriterSource(idx);
         }
         start();
-        for (int32_t idx = 0; idx < mNumTracks; ++idx) {
-            sendBuffersToWriter(mCurrentTrack[idx], idx);
-        }
+        sendBuffersInterleave(mNumTracks, numBuffersInterleave);
         for (int32_t idx = 0; idx < mNumTracks; ++idx) {
             if (mCurrentTrack[idx]) {
                 mCurrentTrack[idx]->stop();
diff --git a/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h b/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
index da06463..4315322 100644
--- a/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
+++ b/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
@@ -34,7 +34,7 @@
 using namespace std;
 
 constexpr uint32_t kMimeSize = 128;
-constexpr uint8_t kMaxTrackCount = 2;
+constexpr uint8_t kMaxTrackCount = 3;
 constexpr uint32_t kMaxCSDStrlen = 16;
 constexpr uint32_t kCodecConfigFlag = 32;
 
@@ -49,25 +49,65 @@
 };
 
 struct FrameData {
-    int32_t size;
+    size_t size;
     uint8_t flags;
     int64_t timeUs;
     const uint8_t* buf;
 };
 
-static string supportedMimeTypes[] = {
-    "audio/3gpp",      "audio/amr-wb",        "audio/vorbis",        "audio/opus",
-    "audio/mp4a-latm", "video/avc",           "video/hevc",          "video/mp4v-es",
-    "video/3gpp",      "video/x-vnd.on2.vp8", "video/x-vnd.on2.vp9",
-};
+static string supportedMimeTypes[] = {"audio/3gpp",
+                                      "audio/amr-wb",
+                                      "audio/vorbis",
+                                      "audio/opus",
+                                      "audio/mp4a-latm",
+                                      "audio/mpeg",
+                                      "audio/mpeg-L1",
+                                      "audio/mpeg-L2",
+                                      "audio/midi",
+                                      "audio/qcelp",
+                                      "audio/g711-alaw",
+                                      "audio/g711-mlaw",
+                                      "audio/flac",
+                                      "audio/aac-adts",
+                                      "audio/gsm",
+                                      "audio/ac3",
+                                      "audio/eac3",
+                                      "audio/eac3-joc",
+                                      "audio/ac4",
+                                      "audio/scrambled",
+                                      "audio/alac",
+                                      "audio/x-ms-wma",
+                                      "audio/x-adpcm-ms",
+                                      "audio/x-adpcm-dvi-ima",
+                                      "video/avc",
+                                      "video/hevc",
+                                      "video/mp4v-es",
+                                      "video/3gpp",
+                                      "video/x-vnd.on2.vp8",
+                                      "video/x-vnd.on2.vp9",
+                                      "video/av01",
+                                      "video/mpeg2",
+                                      "video/dolby-vision",
+                                      "video/scrambled",
+                                      "video/divx",
+                                      "video/divx3",
+                                      "video/xvid",
+                                      "video/x-motion-jpeg",
+                                      "text/3gpp-tt",
+                                      "application/x-subrip",
+                                      "text/vtt",
+                                      "text/cea-608",
+                                      "text/cea-708",
+                                      "application/x-id3v4"};
 
-enum {
+enum SampleFlag {
     DEFAULT_FLAG = 0,
     SYNC_FLAG = 1,
     ENCRYPTED_FLAG = 2,
 };
 
-static uint8_t flagTypes[] = {DEFAULT_FLAG, SYNC_FLAG, ENCRYPTED_FLAG};
+static uint8_t flagTypes[] = {SampleFlag::DEFAULT_FLAG, SampleFlag::SYNC_FLAG,
+                              SampleFlag::ENCRYPTED_FLAG};
 
 class WriterFuzzerBase {
    public:
@@ -105,7 +145,10 @@
 
     void start();
 
-    void sendBuffersToWriter(sp<MediaAdapter>& currentTrack, int32_t trackIndex);
+    void sendBuffersToWriter(sp<MediaAdapter>& currentTrack, int32_t trackIndex,
+                             int32_t startFrameIndex, int32_t endFrameIndex);
+
+    void sendBuffersInterleave(int32_t numTracks, uint8_t numBuffersInterleave);
 
     void initFileWriterAndProcessData(const uint8_t* data, size_t size);
 
@@ -126,7 +169,7 @@
         void getFrameInfo();
         ConfigFormat getConfigFormat(int32_t trackIndex);
         int32_t getNumCsds(int32_t trackIndex);
-        vector<FrameData> getFrameList(int32_t trackIndex);
+        vector<FrameData>& getFrameList(int32_t trackIndex);
 
        private:
         bool isMarker() { return (memcmp(&mData[mReadIndex], kMarker, kMarkerSize) == 0); }
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index dbdb43c..67c6102 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -146,7 +146,10 @@
         };
     static std::vector<std::string> names = {
             prefixes[0] + variants[0] + ".xml",
-            prefixes[1] + variants[1] + ".xml"
+            prefixes[1] + variants[1] + ".xml",
+
+            // shaping information is not currently variant specific.
+            "media_codecs_shaping.xml"
         };
     return names;
 }
@@ -346,6 +349,8 @@
         status_t addAlias(const char **attrs);
         status_t addFeature(const char **attrs);
         status_t addLimit(const char **attrs);
+        status_t addMapping(const char **attrs);
+        status_t addTuning(const char **attrs);
         status_t addQuirk(const char **attrs, const char *prefix = nullptr);
         status_t addSetting(const char **attrs, const char *prefix = nullptr);
         status_t enterMediaCodec(const char **attrs, bool encoder);
@@ -428,7 +433,7 @@
         if (findFileInDirs(searchDirs, fileName, &path)) {
             err = parseXmlPath(path);
         } else {
-            ALOGD("Cannot find %s", path.c_str());
+            ALOGI("Did not find %s in search path", fileName.c_str());
         }
         res = combineStatus(res, err);
     }
@@ -438,7 +443,7 @@
 status_t MediaCodecsXmlParser::Impl::parseXmlPath(const std::string &path) {
     std::lock_guard<std::mutex> guard(mLock);
     if (!fileExists(path)) {
-        ALOGD("Cannot find %s", path.c_str());
+        ALOGV("Cannot find %s", path.c_str());
         mParsingStatus = combineStatus(mParsingStatus, NAME_NOT_FOUND);
         return NAME_NOT_FOUND;
     }
@@ -741,13 +746,19 @@
         {
             // ignore limits and features specified outside of type
             if (!mState->inType()
-                    && (strEq(name, "Limit") || strEq(name, "Feature") || strEq(name, "Variant"))) {
+                    && (strEq(name, "Limit") || strEq(name, "Feature")
+                        || strEq(name, "Variant") || strEq(name, "Mapping")
+                        || strEq(name, "Tuning"))) {
                 PLOGD("ignoring %s specified outside of a Type", name);
                 return;
             } else if (strEq(name, "Limit")) {
                 err = addLimit(attrs);
             } else if (strEq(name, "Feature")) {
                 err = addFeature(attrs);
+            } else if (strEq(name, "Mapping")) {
+                err = addMapping(attrs);
+            } else if (strEq(name, "Tuning")) {
+                err = addTuning(attrs);
             } else if (strEq(name, "Variant") && section != SECTION_VARIANT) {
                 err = limitVariants(attrs);
                 mState->enterSection(err == OK ? SECTION_VARIANT : SECTION_UNKNOWN);
@@ -981,7 +992,9 @@
     TypeMap::iterator typeIt;
     if (codecIt == mData->mCodecMap.end()) { // New codec name
         if (updating) {
-            return { NAME_NOT_FOUND, "MediaCodec: cannot update non-existing codec" };
+            std::string msg = "MediaCodec: cannot update non-existing codec: ";
+            msg = msg + name;
+            return { NAME_NOT_FOUND, msg };
         }
         // Create a new codec in mCodecMap
         codecIt = mData->mCodecMap.insert(Codec(name, CodecProperties())).first;
@@ -994,19 +1007,25 @@
         codecIt->second.order = mData->mCodecMap.size();
     } else { // Existing codec name
         if (!updating) {
-            return { ALREADY_EXISTS, "MediaCodec: cannot add existing codec" };
+            std::string msg = "MediaCodec: cannot add existing codec: ";
+            msg = msg + name;
+            return { ALREADY_EXISTS, msg };
         }
         if (type != nullptr) {
             typeIt = codecIt->second.typeMap.find(type);
             if (typeIt == codecIt->second.typeMap.end()) {
-                return { NAME_NOT_FOUND, "MediaCodec: cannot update non-existing type for codec" };
+                std::string msg = "MediaCodec: cannot update non-existing type for codec: ";
+                msg = msg + name;
+                return { NAME_NOT_FOUND, msg };
             }
         } else {
             // This should happen only when the codec has at most one type.
             typeIt = codecIt->second.typeMap.begin();
             if (typeIt == codecIt->second.typeMap.end()
                     || codecIt->second.typeMap.size() != 1) {
-                return { BAD_VALUE, "MediaCodec: cannot update codec without type specified" };
+                std::string msg = "MediaCodec: cannot update codec without type specified: ";
+                msg = msg + name;
+                return { BAD_VALUE, msg };
             }
         }
     }
@@ -1386,6 +1405,92 @@
     return OK;
 }
 
+status_t MediaCodecsXmlParser::Impl::Parser::addMapping(const char **attrs) {
+    CHECK(mState->inType());
+    size_t i = 0;
+    const char *a_name = nullptr;
+    const char *a_value = nullptr;
+    const char *a_kind = nullptr;
+
+    while (attrs[i] != nullptr) {
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Mapping: attribute '%s' is null", attrs[i]);
+            return BAD_VALUE;
+        }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "kind")) {
+            a_kind = attrs[++i];
+        } else if (strEq(attrs[i], "value")) {
+            a_value = attrs[++i];
+        } else {
+            PLOGD("Mapping: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
+        ++i;
+    }
+
+    // Every mapping must have all 3 fields
+    if (a_name == nullptr) {
+        PLOGD("Mapping with no 'name' attribute");
+        return BAD_VALUE;
+    }
+
+    if (a_kind == nullptr) {
+        PLOGD("Mapping with no 'kind' attribute");
+        return BAD_VALUE;
+    }
+
+    if (a_value == nullptr) {
+        PLOGD("Mapping with no 'value' attribute");
+        return BAD_VALUE;
+    }
+
+    mState->addDetail(std::string("mapping-") + a_kind + "-" + a_name, a_value);
+    return OK;
+}
+
+status_t MediaCodecsXmlParser::Impl::Parser::addTuning(const char **attrs) {
+    CHECK(mState->inType());
+    size_t i = 0;
+    const char *a_name = nullptr;
+    const char *a_value = nullptr;
+
+    while (attrs[i] != nullptr) {
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Mapping: attribute '%s' is null", attrs[i]);
+            return BAD_VALUE;
+        }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "value")) {
+            a_value = attrs[++i];
+        } else {
+            PLOGD("Tuning: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
+        ++i;
+    }
+
+    // Every tuning must have both fields
+    if (a_name == nullptr) {
+        PLOGD("Tuning with no 'name' attribute");
+        return BAD_VALUE;
+    }
+
+    if (a_value == nullptr) {
+        PLOGD("Tuning with no 'value' attribute");
+        return BAD_VALUE;
+    }
+
+    mState->addDetail(std::string("tuning-") + a_name, a_value);
+    return OK;
+}
+
 status_t MediaCodecsXmlParser::Impl::Parser::addAlias(const char **attrs) {
     CHECK(mState->inCodec());
     size_t i = 0;
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index 16c8af8..ecfd85e 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -65,6 +65,16 @@
     method public void set_default(String);
   }
 
+  public class Mapping {
+    ctor public Mapping();
+    method public String getKind();
+    method public String getName();
+    method public String getValue();
+    method public void setKind(String);
+    method public void setName(String);
+    method public void setValue(String);
+  }
+
   public class MediaCodec {
     ctor public MediaCodec();
     method public java.util.List<media.codecs.Alias> getAlias_optional();
@@ -73,9 +83,11 @@
     method public String getEnabled();
     method public java.util.List<media.codecs.Feature> getFeature_optional();
     method public java.util.List<media.codecs.Limit> getLimit_optional();
+    method public java.util.List<media.codecs.Mapping> getMapping_optional();
     method public String getName();
     method public java.util.List<media.codecs.Quirk> getQuirk_optional();
     method public String getRank();
+    method public java.util.List<media.codecs.Tuning> getTuning_optional();
     method public String getType();
     method public java.util.List<media.codecs.Type> getType_optional();
     method public String getUpdate();
@@ -125,6 +137,14 @@
     method public java.util.List<media.codecs.Setting> getVariant_optional();
   }
 
+  public class Tuning {
+    ctor public Tuning();
+    method public String getName();
+    method public String getValue();
+    method public void setName(String);
+    method public void setValue(String);
+  }
+
   public class Type {
     ctor public Type();
     method public java.util.List<media.codecs.Alias> getAlias();
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index 3b5681f..c9a7efc 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -63,6 +63,8 @@
             <xs:element name="Alias" type="Alias" minOccurs="0" maxOccurs="unbounded"/>
             <xs:element name="Limit" type="Limit" minOccurs="0" maxOccurs="unbounded"/>
             <xs:element name="Feature" type="Feature" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Mapping" type="Mapping" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Tuning" type="Tuning" minOccurs="0" maxOccurs="unbounded"/>
             <xs:element name="Variant" type="Variant" minOccurs="0" maxOccurs="unbounded"/>
         </xs:choice>
         <xs:attribute name="name" type="xs:string"/>
@@ -122,6 +124,15 @@
         <xs:attribute name="enabled" type="xs:string"/>
         <xs:attribute name="update" type="xs:string"/>
     </xs:complexType>
+    <xs:complexType name="Mapping">
+        <xs:attribute name="name" type="xs:string"/>
+        <xs:attribute name="kind" type="xs:string"/>
+        <xs:attribute name="value" type="xs:string"/>
+    </xs:complexType>
+    <xs:complexType name="Tuning">
+        <xs:attribute name="name" type="xs:string"/>
+        <xs:attribute name="value" type="xs:string"/>
+    </xs:complexType>
     <xs:complexType name="Include">
         <xs:attribute name="href" type="xs:string"/>
     </xs:complexType>
diff --git a/media/libstagefright/xmlparser/test/XMLParserTest.cpp b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
index 9ddd374..7629d97 100644
--- a/media/libstagefright/xmlparser/test/XMLParserTest.cpp
+++ b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
@@ -127,6 +127,24 @@
     setCodecProperties("test8.encoder", true, 8, {}, {}, {}, "audio/opus",
                        {pair<string, string>("max-limit1", "limit1Max")}, {}, "");
 
+    setCodecProperties("test9.encoder", true, 9, {}, {}, {}, "video/avc",
+           {
+                   pair<string, string>("mapping-sure-before", "after"),
+           },
+           {}, "");
+
+    setCodecProperties("test10.encoder", true, 10, {}, {}, {}, "video/hevc",
+           {
+                   pair<string, string>("mapping-fire-from", "to"),
+           },
+           {}, "");
+    setCodecProperties("test11.encoder", true, 11, {}, {}, {}, "video/av01",
+           {
+                   pair<string, string>("tuning-hungry", "yes"),
+                   pair<string, string>("tuning-pi", "3.1415"),
+           },
+           {}, "");
+
     setRoleProperties("audio_decoder.mp3", false, 1, "audio/mpeg", "test1.decoder",
                       {pair<string, string>("attribute::disabled", "present"),
                        pair<string, string>("rank", "4")});
@@ -162,6 +180,17 @@
     setRoleProperties("audio_encoder.opus", true, 8, "audio/opus", "test8.encoder",
                       {pair<string, string>("max-limit1", "limit1Max")});
 
+    setRoleProperties("video_encoder.avc", true, 9, "video/avc", "test9.encoder",
+                       {pair<string, string>("mapping-sure-before", "after")});
+
+    setRoleProperties("video_encoder.hevc", true, 10, "video/hevc", "test10.encoder",
+                       { pair<string, string>("mapping-fire-from", "to")});
+
+    setRoleProperties("video_encoder.av01", true, 11, "video/av01", "test11.encoder",
+                       {pair<string, string>("tuning-hungry", "yes"),
+                        pair<string, string>("tuning-pi", "3.1415")
+                       });
+
     setServiceAttribute(
             {pair<string, string>("domain-telephony", "0"), pair<string, string>("domain-tv", "0"),
              pair<string, string>("setting2", "0"), pair<string, string>("variant-variant1", "0")});
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
index a7299d3..8cae423 100644
--- a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
@@ -76,5 +76,17 @@
         <MediaCodec name="test8.encoder" type="audio/opus">
             <Limit name="limit1" max="limit1Max" />
         </MediaCodec>
+        <!-- entry for testing Mapping -->
+        <MediaCodec name="test9.encoder" type="video/avc" >
+            <Mapping kind="sure" name="before" value="after"/>
+        </MediaCodec>
+        <MediaCodec name="test10.encoder" type="video/hevc" >
+            <Mapping kind="fire" name="from" value="to"/>
+        </MediaCodec>
+        <!-- entry for testing Tuning -->
+        <MediaCodec name="test11.encoder" type="video/av01" >
+            <Tuning name="hungry" value="yes"/>
+            <Tuning name="pi" value="3.1415"/>
+        </MediaCodec>
     </Encoders>
 </Included>
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software.xml
index f23ed44..a5b4896 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software.xml
@@ -2,7 +2,7 @@
     <hal>
         <name>android.hardware.media.c2</name>
         <transport>hwbinder</transport>
-        <version>1.1</version>
+        <version>1.2</version>
         <interface>
             <name>IComponentStore</name>
             <instance>software</instance>
diff --git a/media/mtp/tests/MtpFfsHandleTest/Android.bp b/media/mtp/tests/MtpFfsHandleTest/Android.bp
index cd6e750..ec9c7a4 100644
--- a/media/mtp/tests/MtpFfsHandleTest/Android.bp
+++ b/media/mtp/tests/MtpFfsHandleTest/Android.bp
@@ -38,4 +38,3 @@
         "-Werror",
     ],
 }
-
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index 9cd4669..5365f4b 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_mtp_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_mtp_license"],
+}
+
 cc_fuzz {
     name: "mtp_fuzzer",
     srcs: [
diff --git a/media/mtp/tests/PosixAsyncIOTest/Android.bp b/media/mtp/tests/PosixAsyncIOTest/Android.bp
index 1d401b8..09cf6b7 100644
--- a/media/mtp/tests/PosixAsyncIOTest/Android.bp
+++ b/media/mtp/tests/PosixAsyncIOTest/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_mtp_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_mtp_license"],
+}
+
 cc_test {
     name: "posix_async_io_test",
     test_suites: ["device-tests"],
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 8172334..3007574 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -276,5 +276,9 @@
         },
     },
 
-    apex_available: ["com.android.media"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
 }
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index b019448..05115b9 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -40,6 +40,14 @@
      */
 
     AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
 };
 
 // TODO: this only supports ImageReader
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index b75901a..1067e24 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -72,6 +72,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return true;
         case AIMAGE_FORMAT_PRIVATE:
             // For private format, cpu usage is prohibited.
@@ -102,6 +103,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return 1;
         case AIMAGE_FORMAT_PRIVATE:
             return 0;
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 3af9771..6e9945d 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -380,12 +380,15 @@
     }
     Vector<uint8_t> session;
     status_t status = mObj->mDrm->openSession(DrmPlugin::kSecurityLevelMax, session);
-    if (status == OK) {
-        mObj->mIds.push_front(session);
-        List<idvec_t>::iterator iter = mObj->mIds.begin();
-        sessionId->ptr = iter->array();
-        sessionId->length = iter->size();
+    if (status != OK) {
+        sessionId->ptr = NULL;
+        sessionId->length = 0;
+        return translateStatus(status);
     }
+    mObj->mIds.push_front(session);
+    List<idvec_t>::iterator iter = mObj->mIds.begin();
+    sessionId->ptr = iter->array();
+    sessionId->length = iter->size();
     return AMEDIA_OK;
 }
 
@@ -489,6 +492,7 @@
     } else {
         keySetId->ptr = NULL;
         keySetId->length = 0;
+        return translateStatus(status);
     }
     return AMEDIA_OK;
 }
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 0c65e9e..07fc5de 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -419,6 +419,7 @@
 
 EXPORT
 media_status_t AMediaExtractor_getSampleFormat(AMediaExtractor *ex, AMediaFormat *fmt) {
+    ALOGV("AMediaExtractor_getSampleFormat");
     if (fmt == NULL) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
@@ -428,6 +429,9 @@
     if (err != OK) {
         return translate_error(err);
     }
+#ifdef LOG_NDEBUG
+    sampleMeta->dumpToLog();
+#endif
 
     sp<AMessage> meta;
     AMediaFormat_getFormat(fmt, &meta);
@@ -483,6 +487,19 @@
         meta->setBuffer(AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO, audioPresentationsData);
     }
 
+    int64_t val64;
+    if (sampleMeta->findInt64(kKeySampleFileOffset, &val64)) {
+        meta->setInt64("sample-file-offset", val64);
+        ALOGV("SampleFileOffset Found");
+    }
+    if (sampleMeta->findInt64(kKeyLastSampleIndexInChunk, &val64)) {
+        meta->setInt64("last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                       val64);
+        ALOGV("kKeyLastSampleIndexInChunk Found");
+    }
+
+    ALOGV("AMediaFormat_toString:%s", AMediaFormat_toString(fmt));
+
     return AMEDIA_OK;
 }
 
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 8e673ca..c1793ce 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -334,6 +334,7 @@
 EXPORT const char* AMEDIAFORMAT_KEY_IS_SYNC_FRAME = "is-sync-frame";
 EXPORT const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL = "i-frame-interval";
 EXPORT const char* AMEDIAFORMAT_KEY_LANGUAGE = "language";
+EXPORT const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK = "last-sample-index-in-chunk";
 EXPORT const char* AMEDIAFORMAT_KEY_LATENCY = "latency";
 EXPORT const char* AMEDIAFORMAT_KEY_LEVEL = "level";
 EXPORT const char* AMEDIAFORMAT_KEY_LOCATION = "location";
@@ -359,7 +360,9 @@
 EXPORT const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP = "push-blank-buffers-on-shutdown";
 EXPORT const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER = "repeat-previous-frame-after";
 EXPORT const char* AMEDIAFORMAT_KEY_ROTATION = "rotation-degrees";
+EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET = "sample-file-offset";
 EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_RATE = "sample-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND = "sample-time-before-append";
 EXPORT const char* AMEDIAFORMAT_KEY_SAR_HEIGHT = "sar-height";
 EXPORT const char* AMEDIAFORMAT_KEY_SAR_WIDTH = "sar-width";
 EXPORT const char* AMEDIAFORMAT_KEY_SEI = "sei";
@@ -383,6 +386,14 @@
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_ID = "track-id";
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_INDEX = "track-index";
 EXPORT const char* AMEDIAFORMAT_KEY_VALID_SAMPLES = "valid-samples";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MAX = "video-qp-b-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MIN = "video-qp-b-min";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MAX = "video-qp-i-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MIN = "video-qp-i-min";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_MAX = "video-qp-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_MIN = "video-qp-min";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_P_MAX = "video-qp-p-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_P_MIN = "video-qp-p-min";
 EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
 EXPORT const char* AMEDIAFORMAT_KEY_XMP_OFFSET = "xmp-offset";
 EXPORT const char* AMEDIAFORMAT_KEY_XMP_SIZE = "xmp-size";
diff --git a/media/ndk/NdkMediaMuxer.cpp b/media/ndk/NdkMediaMuxer.cpp
index d1992bf..1965e62 100644
--- a/media/ndk/NdkMediaMuxer.cpp
+++ b/media/ndk/NdkMediaMuxer.cpp
@@ -17,28 +17,24 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NdkMediaMuxer"
 
-
-#include <media/NdkMediaMuxer.h>
+#include <android_util_Binder.h>
+#include <jni.h>
+#include <media/IMediaHTTPService.h>
 #include <media/NdkMediaCodec.h>
 #include <media/NdkMediaErrorPriv.h>
 #include <media/NdkMediaFormatPriv.h>
-
-
-#include <utils/Log.h>
-#include <utils/StrongPointer.h>
+#include <media/NdkMediaMuxer.h>
+#include <media/stagefright/MediaAppender.h>
+#include <media/stagefright/MediaMuxer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaMuxer.h>
-#include <media/IMediaHTTPService.h>
-#include <android_util_Binder.h>
-
-#include <jni.h>
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
 
 using namespace android;
 
 struct AMediaMuxer {
-    sp<MediaMuxer> mImpl;
-
+    sp<MediaMuxerBase> mImpl;
 };
 
 extern "C" {
@@ -46,8 +42,15 @@
 EXPORT
 AMediaMuxer* AMediaMuxer_new(int fd, OutputFormat format) {
     ALOGV("ctor");
-    AMediaMuxer *mData = new AMediaMuxer();
-    mData->mImpl = new MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+    AMediaMuxer *mData = new (std::nothrow) AMediaMuxer();
+    if (mData == nullptr) {
+        return nullptr;
+    }
+    mData->mImpl = new (std::nothrow) MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+    if (mData->mImpl == nullptr) {
+        delete mData;
+        return nullptr;
+    }
     return mData;
 }
 
@@ -94,6 +97,34 @@
             muxer->mImpl->writeSampleData(buf, trackIdx, info->presentationTimeUs, info->flags));
 }
 
+EXPORT
+AMediaMuxer* AMediaMuxer_append(int fd, AppendMode mode) {
+    ALOGV("append");
+    AMediaMuxer* mData = new (std::nothrow) AMediaMuxer();
+    if (mData == nullptr) {
+        return nullptr;
+    }
+    mData->mImpl = MediaAppender::create(fd, (android::MediaAppender::AppendMode)mode);
+    if (mData->mImpl == nullptr) {
+        delete mData;
+        return nullptr;
+    }
+    return mData;
+}
+
+EXPORT
+ssize_t AMediaMuxer_getTrackCount(AMediaMuxer* muxer) {
+    return muxer->mImpl->getTrackCount();
+}
+
+EXPORT
+AMediaFormat* AMediaMuxer_getTrackFormat(AMediaMuxer* muxer, size_t idx) {
+    sp<AMessage> format = muxer->mImpl->getTrackFormat(idx);
+    if (format != nullptr) {
+        return AMediaFormat_fromMsg(&format);
+    }
+    return nullptr;
+}
 
 } // extern "C"
 
diff --git a/media/ndk/TEST_MAPPING b/media/ndk/TEST_MAPPING
index 1a81538..e420812 100644
--- a/media/ndk/TEST_MAPPING
+++ b/media/ndk/TEST_MAPPING
@@ -1,6 +1,7 @@
 // mappings for frameworks/av/media/ndk
 {
   "presubmit": [
-    { "name": "AImageReaderWindowHandleTest" }
+    { "name": "AImageReaderWindowHandleTest" },
+    { "name": "libmediandk_test" }
   ]
 }
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index e19dd3a..71bc6d9 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -50,7 +50,10 @@
  */
 typedef struct AImage AImage;
 
-// Formats not listed here will not be supported by AImageReader
+/**
+ * AImage supported formats: AImageReader only guarantees the support for the formats
+ * listed here.
+ */
 enum AIMAGE_FORMATS {
     /**
      * 32 bits RGBA format, 8 bits for each of the four channels.
@@ -813,7 +816,7 @@
  * Available since API level 26.
  *
  * @param image the {@link AImage} of interest.
- * @param outBuffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
+ * @param buffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
  *         handle.
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index d86f3c7..4bd7f2a 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -328,10 +328,10 @@
  * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
  * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
  * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
- * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
- * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
- * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
- * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * For example, you can obtain an EGLClientBuffer from the {@link AHardwareBuffer} by using
+ * eglGetNativeClientBufferANDROID extension and pass that EGLClientBuffer to
+ * eglCreateImageKHR to create an EGLImage resource type, which may then be bound to a
+ * texture via glEGLImageTargetTexture2DOES on supported devices. This can be useful for
  * transporting textures that may be shared cross-process.</p>
  * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
  * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
@@ -339,7 +339,7 @@
  *
  * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
  * especially if \c format is {@link AIMAGE_FORMAT_PRIVATE}, \c usage must not include either
- * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</p>
  *
  * @param width The default width in pixels of the Images that this reader will produce.
  * @param height The default height in pixels of the Images that this reader will produce.
@@ -358,7 +358,7 @@
  *   <th>Compatible usage flags</th>
  * </tr>
  * <tr>
- *   <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ *   <td>non-{@link AIMAGE_FORMAT_PRIVATE} formats defined in {@link NdkImage.h}
  * </td>
  *   <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
  *   {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
@@ -441,6 +441,10 @@
         AImageReader* reader,
         AHardwareBuffer* buffer);
 
+/**
+ * A listener to the AHardwareBuffer removal event, use
+ * {@link AImageReader_setBufferRemovedListener} to register the listener object to AImageReader.
+ */
 typedef struct AImageReader_BufferRemovedListener {
     /// Optional application context passed as the first parameter of the callback.
     void*                      context;
diff --git a/media/ndk/include/media/NdkMediaError.h b/media/ndk/include/media/NdkMediaError.h
index 2be1d6e..02fdc79 100644
--- a/media/ndk/include/media/NdkMediaError.h
+++ b/media/ndk/include/media/NdkMediaError.h
@@ -40,7 +40,11 @@
 
 __BEGIN_DECLS
 
+/**
+ * Media error message types returned from NDK media functions.
+ */
 typedef enum {
+    /** The requested media operation completed successfully. */
     AMEDIA_OK = 0,
 
     /**
@@ -55,14 +59,34 @@
     AMEDIACODEC_ERROR_RECLAIMED             = 1101,
 
     AMEDIA_ERROR_BASE                  = -10000,
+
+    /** The called media function failed with an unknown error. */
     AMEDIA_ERROR_UNKNOWN               = AMEDIA_ERROR_BASE,
+
+    /** The input media data is corrupt or incomplete. */
     AMEDIA_ERROR_MALFORMED             = AMEDIA_ERROR_BASE - 1,
+
+    /** The required operation or media formats are not supported. */
     AMEDIA_ERROR_UNSUPPORTED           = AMEDIA_ERROR_BASE - 2,
+
+    /** An invalid (or already closed) object is used in the function call. */
     AMEDIA_ERROR_INVALID_OBJECT        = AMEDIA_ERROR_BASE - 3,
+
+    /** At least one of the invalid parameters is used. */
     AMEDIA_ERROR_INVALID_PARAMETER     = AMEDIA_ERROR_BASE - 4,
+
+    /** The media object is not in the right state for the required operation. */
     AMEDIA_ERROR_INVALID_OPERATION     = AMEDIA_ERROR_BASE - 5,
+
+    /** Media stream ends while processing the requested operation. */
     AMEDIA_ERROR_END_OF_STREAM         = AMEDIA_ERROR_BASE - 6,
+
+    /** An Error occurred when the Media object is carrying IO operation. */
     AMEDIA_ERROR_IO                    = AMEDIA_ERROR_BASE - 7,
+
+    /** The required operation would have to be blocked (on I/O or others),
+     *   but blocking is not enabled.
+     */
     AMEDIA_ERROR_WOULD_BLOCK           = AMEDIA_ERROR_BASE - 8,
 
     AMEDIA_DRM_ERROR_BASE              = -20000,
@@ -77,10 +101,20 @@
     AMEDIA_DRM_LICENSE_EXPIRED         = AMEDIA_DRM_ERROR_BASE - 9,
 
     AMEDIA_IMGREADER_ERROR_BASE          = -30000,
+
+    /** There are no more image buffers to read/write image data. */
     AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE = AMEDIA_IMGREADER_ERROR_BASE - 1,
+
+    /** The AImage object has used up the allowed maximum image buffers. */
     AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED = AMEDIA_IMGREADER_ERROR_BASE - 2,
+
+    /** The required image buffer could not be locked to read. */
     AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE   = AMEDIA_IMGREADER_ERROR_BASE - 3,
+
+    /** The media data or buffer could not be unlocked. */
     AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE = AMEDIA_IMGREADER_ERROR_BASE - 4,
+
+    /** The media/buffer needs to be locked to perform the required operation. */
     AMEDIA_IMGREADER_IMAGE_NOT_LOCKED    = AMEDIA_IMGREADER_ERROR_BASE - 5,
 
 } media_status_t;
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index eb6d510..fbd855d 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -307,6 +307,18 @@
 extern const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_KEY_XMP_OFFSET __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_KEY_XMP_SIZE __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND __INTRODUCED_IN(31);
+
+extern const char* AMEDIAFORMAT_VIDEO_QP_B_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_B_MIN __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_I_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_I_MIN __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_MIN __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_P_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_P_MIN __INTRODUCED_IN(31);
 
 __END_DECLS
 
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index 519e249..d7eccb8 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -54,6 +54,17 @@
     AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP   = 2,
 } OutputFormat;
 
+typedef enum {
+    /* Last group of pictures(GOP) of video track can be incomplete, so it would be safe to
+     * scrap that and rewrite.  If both audio and video tracks are present in a file, then
+     * samples of audio track after last GOP of video would be scrapped too.
+     * If only audio track is present, then no sample would be discarded.
+     */
+    AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP = 0,
+    // Keep all existing samples as it is and append new samples after that only.
+    AMEDIAMUXER_APPEND_TO_EXISTING_DATA = 1,
+} AppendMode;
+
 /**
  * Create new media muxer.
  *
@@ -138,6 +149,46 @@
         size_t trackIdx, const uint8_t *data,
         const AMediaCodecBufferInfo *info) __INTRODUCED_IN(21);
 
+/**
+ * Creates a new media muxer for appending data to an existing MPEG4 file.
+ * This is a synchronous API call and could take a while to return if the existing file is large.
+ * Only works for MPEG4 files matching one of the following characteristics:
+ * <ul>
+ *    <li>a single audio track.</li>
+ *    <li>a single video track.</li>
+ *    <li>a single audio and a single video track.</li>
+ * </ul>
+ * @param fd Must be opened with read and write permission. Does not take ownership of
+ * this fd i.e., caller is responsible for closing fd.
+ * @param mode Specifies how data will be appended; the AppendMode enum describes
+ *             the possible methods for appending..
+ * @return Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
+ * {@link AMediaMuxer_delete} should be used to free the returned pointer.
+ *
+ * Available since API level 31.
+ */
+AMediaMuxer* AMediaMuxer_append(int fd, AppendMode mode) __INTRODUCED_IN(31);
+
+/**
+ * Returns the number of tracks added in the file passed to {@link AMediaMuxer_new} or
+ * the number of existing tracks in the file passed to {@link AMediaMuxer_append}.
+ * Should be called in INITIALIZED or STARTED state, otherwise returns -1.
+ *
+ * Available since API level 31.
+ */
+ssize_t AMediaMuxer_getTrackCount(AMediaMuxer*) __INTRODUCED_IN(31);
+
+/**
+ * Returns AMediaFormat of the added track with index idx in the file passed to
+ * {@link AMediaMuxer_new} or the AMediaFormat of the existing track with index idx
+ * in the file passed to {@link AMediaMuxer_append}.
+ * Should be called in INITIALIZED or STARTED state, otherwise returns nullptr.
+ * {@link AMediaFormat_delete} should be used to free the returned pointer.
+ *
+ * Available since API level 31.
+ */
+AMediaFormat* AMediaMuxer_getTrackFormat(AMediaMuxer* muxer, size_t idx) __INTRODUCED_IN(31);
+
 __END_DECLS
 
 #endif // _NDK_MEDIA_MUXER_H
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index eead681..7e9e57e 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -109,6 +109,7 @@
     AMEDIAFORMAT_KEY_IS_SYNC_FRAME; # var introduced=29
     AMEDIAFORMAT_KEY_I_FRAME_INTERVAL; # var introduced=21
     AMEDIAFORMAT_KEY_LANGUAGE; # var introduced=21
+    AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK; # var introduced=31
     AMEDIAFORMAT_KEY_LATENCY; # var introduced=28
     AMEDIAFORMAT_KEY_LEVEL; # var introduced=28
     AMEDIAFORMAT_KEY_LOCATION; # var introduced=29
@@ -134,6 +135,8 @@
     AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP; # var introduced=21
     AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER; # var introduced=21
     AMEDIAFORMAT_KEY_ROTATION; # var introduced=28
+    AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET; # var introduced=31
+    AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND; # var introduced=31
     AMEDIAFORMAT_KEY_SAMPLE_RATE; # var introduced=21
     AMEDIAFORMAT_KEY_SAR_HEIGHT; # var introduced=29
     AMEDIAFORMAT_KEY_SAR_WIDTH; # var introduced=29
@@ -286,7 +289,10 @@
     AMediaFormat_setString;
     AMediaFormat_toString;
     AMediaMuxer_addTrack;
+    AMediaMuxer_append; # introduced=31
     AMediaMuxer_delete;
+    AMediaMuxer_getTrackCount; # introduced=31
+    AMediaMuxer_getTrackFormat; # introduced=31
     AMediaMuxer_new;
     AMediaMuxer_setLocation;
     AMediaMuxer_setOrientationHint;
diff --git a/media/ndk/tests/Android.bp b/media/ndk/tests/Android.bp
new file mode 100644
index 0000000..984b3ee
--- /dev/null
+++ b/media/ndk/tests/Android.bp
@@ -0,0 +1,41 @@
+// Copyright (C) 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Headers module is in frameworks/av/Android.bp because modules are not allowed
+// to refer to headers in parent directories and the headers live in
+// frameworks/av/include.
+
+package {
+    default_applicable_licenses: ["frameworks_av_media_ndk_license"],
+}
+
+cc_test {
+    name: "libmediandk_test",
+    test_suites: ["device-tests"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediandk",
+        "libutils",
+    ],
+
+    srcs: [
+        "NdkMediaFormat_test.cpp",
+    ],
+}
diff --git a/media/ndk/tests/NdkMediaFormat_test.cpp b/media/ndk/tests/NdkMediaFormat_test.cpp
new file mode 100644
index 0000000..668d0a4
--- /dev/null
+++ b/media/ndk/tests/NdkMediaFormat_test.cpp
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaFormat_test"
+
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+
+class NdkMediaFormatTest : public ::testing::Test {
+};
+
+
+TEST(NdkMediaFormat_tests, test_create) {
+
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   AMediaFormat *fmt2 = AMediaFormat_new();
+
+   EXPECT_NE(fmt1, fmt2);
+   EXPECT_NE(fmt1, nullptr);
+   EXPECT_NE(fmt2, nullptr);
+
+   AMediaFormat_delete(fmt1);
+   AMediaFormat_delete(fmt2);
+}
+
+TEST(NdkMediaFormat_tests, test_int32) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   int32_t i32;
+   int64_t i64;
+   AMediaFormat_setInt32(fmt1, "five", 5);
+
+   EXPECT_TRUE(AMediaFormat_getInt32(fmt1, "five", &i32));
+   EXPECT_FALSE(AMediaFormat_getInt64(fmt1, "five", &i64));
+   EXPECT_EQ(i32, 5);
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_int64) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   int64_t i64;
+   AMediaFormat_setInt64(fmt1, "verylarge", INT64_MAX);
+
+   EXPECT_TRUE(AMediaFormat_getInt64(fmt1, "verylarge", &i64));
+   EXPECT_EQ(i64, INT64_MAX);
+
+   // return unchanged if not found
+   i64 = -1;
+   EXPECT_FALSE(AMediaFormat_getInt64(fmt1, "five", &i64));
+   EXPECT_EQ(i64, -1);
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_size) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   size_t size = -15;
+   AMediaFormat_setSize(fmt1, "small", 1);
+   AMediaFormat_setSize(fmt1, "medium", 10);
+   AMediaFormat_setSize(fmt1, "large", 100);
+   EXPECT_TRUE(AMediaFormat_getSize(fmt1, "medium", &size));
+   EXPECT_EQ(size, 10);
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_float) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   float f;
+   AMediaFormat_setFloat(fmt1, "boat", 1.5);
+   AMediaFormat_setFloat(fmt1, "ship", 0.5);
+   EXPECT_TRUE(AMediaFormat_getFloat(fmt1, "boat", &f));
+   EXPECT_EQ(f, 1.5);
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_double) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   double d;
+   AMediaFormat_setDouble(fmt1, "trouble", 100.5);
+   AMediaFormat_setDouble(fmt1, "dip", 0.5);
+   EXPECT_TRUE(AMediaFormat_getDouble(fmt1, "trouble", &d));
+   EXPECT_EQ(d, 100.5);
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_string) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   const char *content = "This is my test string";
+   const char *out = nullptr;
+   AMediaFormat_setString(fmt1, "stringtheory", content);
+   EXPECT_TRUE(AMediaFormat_getString(fmt1, "stringtheory", &out));
+   EXPECT_NE(out, nullptr);
+   EXPECT_EQ(strcmp(out,content), 0);
+
+   AMediaFormat_delete(fmt1);
+}
+
+
+TEST(NdkMediaFormat_tests, test_clear) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   int32_t i32;
+   AMediaFormat_setInt32(fmt1, "five", 5);
+   size_t size = -15;
+   AMediaFormat_setSize(fmt1, "medium", 10);
+   float f;
+   AMediaFormat_setFloat(fmt1, "boat", 1.5);
+
+   AMediaFormat_clear(fmt1);
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt1, "five", &i32));
+   EXPECT_FALSE(AMediaFormat_getSize(fmt1, "medium", &size));
+   EXPECT_FALSE(AMediaFormat_getFloat(fmt1, "boat", &f));
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_copy) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   AMediaFormat *fmt2 = AMediaFormat_new();
+
+   double d;
+   int32_t i32;
+
+   // test copy functionality (NB: we cleared everything just above here)
+   AMediaFormat_setDouble(fmt1, "trouble", 100.5);
+   EXPECT_TRUE(AMediaFormat_getDouble(fmt1, "trouble", &d));
+   EXPECT_FALSE(AMediaFormat_getDouble(fmt2, "trouble", &d));
+
+   EXPECT_EQ(AMEDIA_OK, AMediaFormat_copy(fmt2, fmt1));
+
+   EXPECT_TRUE(AMediaFormat_getDouble(fmt2, "trouble", &d));
+   EXPECT_EQ(d, 100.5);
+
+   AMediaFormat *fmt3 = nullptr;
+   EXPECT_NE(AMEDIA_OK, AMediaFormat_copy(fmt3, fmt1));
+   EXPECT_NE(AMEDIA_OK, AMediaFormat_copy(fmt1, fmt3));
+
+   // we should lose an entry when we copy over it
+   AMediaFormat_setInt32(fmt2, "vanishing", 50);
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt1, "vanishing", &i32));
+   EXPECT_TRUE(AMediaFormat_getInt32(fmt2, "vanishing", &i32));
+   EXPECT_EQ(AMEDIA_OK, AMediaFormat_copy(fmt2, fmt1));
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt2, "vanishing", &i32));
+
+   AMediaFormat_delete(fmt1);
+   AMediaFormat_delete(fmt2);
+}
+
+TEST(NdkMediaFormat_tests, test_buffer) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   typedef struct blockomem {
+        int leading;
+        int filled[100];
+        int trailing;
+   } block_t;
+   block_t buf = {};
+   buf.leading = 1;
+   buf.trailing = 2;
+   void *data;
+   size_t bsize;
+
+   AMediaFormat_setBuffer(fmt1, "mybuffer", &buf, sizeof(buf));
+   EXPECT_TRUE(AMediaFormat_getBuffer(fmt1, "mybuffer", &data, &bsize));
+   EXPECT_NE(&buf, data);
+   EXPECT_EQ(sizeof(buf), bsize);
+   block_t *bufp = (block_t*) data;
+   EXPECT_EQ(bufp->leading, buf.leading);
+   EXPECT_EQ(bufp->trailing, buf.trailing);
+   EXPECT_EQ(0, memcmp(&buf, data, bsize));
+
+   AMediaFormat_delete(fmt1);
+}
+
+} // namespace android
diff --git a/media/tests/SampleVideoEncoder/README.md b/media/tests/SampleVideoEncoder/README.md
new file mode 100644
index 0000000..2e275c5
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/README.md
@@ -0,0 +1,56 @@
+# B-Frames Encoding App
+
+This is a sample android application for encoding AVC/HEVC streams with B-Frames enabled. It uses MediaRecorder APIs to record B-frames enabled video from camera2 input and MediaCodec APIs to encode reference test vector using input surface.
+
+This page describes how to get started with the Encoder App and how to run the tests for it.
+
+
+# Getting Started
+
+This app uses the Gradle build system as well as Soong Build System.
+
+To build this project using Gradle build, use the "gradlew build" command or use "Import Project" in Android Studio.
+
+To build the app using Soong Build System, run the following command:
+```
+mmm frameworks/av/media/tests/SampleVideoEncoder/
+```
+
+The apk is generated at the following location:
+```
+out\target\product\sargo\testcases\SampleVideoEncoder\arm64\SampleVideoEncoder.apk
+```
+
+Command to install the apk:
+```
+adb install SampleVideoEncoder.apk
+```
+
+Command to launch the app:
+```
+adb shell am start -n "com.android.media.samplevideoencoder/com.android.media.samplevideoencoder.MainActivity"
+```
+
+After installing the app, a TextureView showing camera preview is dispalyed on one third of the screen. It also features checkboxes to select either avc/hevc and hw/sw codecs. It also has an option to select either MediaRecorder APIs or MediaCodec, along with the 'Start' button to start/stop recording.
+
+# Running Tests
+
+The app also contains a test, which will test the MediaCodec APIs for encoding avc/hevc streams with B-frames enabled. This does not require us to use application UI.
+
+## Running the tests using atest
+Note that atest command will install the SampleVideoEncoder app on the device.
+
+Command to run the tests:
+```
+atest SampleVideoEncoder
+```
+
+# Ouput
+
+The muxed ouptput video is saved in the app data at:
+```
+/storage/emulated/0/Android/data/com.android.media.samplevideoencoder/files/
+```
+
+The total number of I-frames, P-frames and B-frames after encoding has been done using MediaCodec APIs are displayed on the screen.
+The results of the tests can be obtained from the logcats of the test.
diff --git a/media/tests/SampleVideoEncoder/app/Android.bp b/media/tests/SampleVideoEncoder/app/Android.bp
new file mode 100644
index 0000000..58b219b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/Android.bp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+android_test {
+    name: "SampleVideoEncoder",
+
+    manifest: "src/main/AndroidManifest.xml",
+
+    srcs: ["src/**/*.java"],
+
+    sdk_version: "current",
+    min_sdk_version: "24", // N
+
+    resource_dirs: [
+        "src/main/res",
+    ],
+
+    static_libs: [
+        "androidx.annotation_annotation",
+        "androidx.appcompat_appcompat",
+        "androidx-constraintlayout_constraintlayout",
+        "junit",
+        "androidx.test.core",
+        "androidx.test.runner",
+        "hamcrest-library",
+    ],
+
+    javacflags: [
+        "-Xlint:deprecation",
+        "-Xlint:unchecked",
+    ],
+}
diff --git a/media/tests/SampleVideoEncoder/app/AndroidTest.xml b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
new file mode 100644
index 0000000..91f4304
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Runs SampleVideoEncoder Tests">
+    <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+        <option name="cleanup-apks" value="false" />
+        <option name="test-file-name" value="SampleVideoEncoder.apk" />
+    </target_preparer>
+
+    <option name="test-tag" value="SampleVideoEncoder" />
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="com.android.media.samplevideoencoder" />
+        <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
+        <option name="hidden-api-checks" value="false"/>
+    </test>
+</configuration>
diff --git a/media/tests/SampleVideoEncoder/app/build.gradle b/media/tests/SampleVideoEncoder/app/build.gradle
new file mode 100644
index 0000000..cc54981
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/build.gradle
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+apply plugin: 'com.android.application'
+
+android {
+    compileSdkVersion 30
+    buildToolsVersion "30.0.2"
+
+    defaultConfig {
+        applicationId "com.android.media.samplevideoencoder"
+        minSdkVersion 24
+        targetSdkVersion 30
+        versionCode 1
+        versionName "1.0"
+        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+    }
+
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+        }
+    }
+}
+
+dependencies {
+    implementation fileTree(dir: "libs", include: ["*.jar"])
+    implementation 'androidx.appcompat:appcompat:1.2.0'
+    implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
+    testImplementation 'junit:junit:4.13.1'
+    androidTestImplementation 'androidx.test:runner:1.3.0'
+    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
+    androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
+}
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
new file mode 100644
index 0000000..1ef332e
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder.tests;
+
+import androidx.test.platform.app.InstrumentationRegistry;
+
+import android.content.Context;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import com.android.media.samplevideoencoder.MediaCodecSurfaceEncoder;
+import com.android.media.samplevideoencoder.R;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+
+@RunWith(Parameterized.class)
+public class SampleVideoEncoderTest {
+    private static final String TAG = SampleVideoEncoderTest.class.getSimpleName();
+    private final Context mContext;
+    private int mMaxBFrames;
+    private int mInputResId;
+    private String mMime;
+    private boolean mIsSoftwareEncoder;
+
+    @Parameterized.Parameters
+    public static Collection<Object[]> inputFiles() {
+        return Arrays.asList(new Object[][]{
+                // Parameters: MimeType, isSoftwareEncoder, maxBFrames
+                {MediaFormat.MIMETYPE_VIDEO_AVC, false, 1},
+                {MediaFormat.MIMETYPE_VIDEO_AVC, true, 1},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, false, 1},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, true, 1}});
+    }
+
+    public SampleVideoEncoderTest(String mimeType, boolean isSoftwareEncoder, int maxBFrames) {
+        this.mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
+        this.mInputResId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+        this.mMime = mimeType;
+        this.mIsSoftwareEncoder = isSoftwareEncoder;
+        this.mMaxBFrames = maxBFrames;
+    }
+
+    private String getOutputPath() {
+        File dir = mContext.getExternalFilesDir(null);
+        if (dir == null) {
+            Log.e(TAG, "Cannot get external directory path to save output video");
+            return null;
+        }
+        String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+        Log.i(TAG, "Output video is saved at: " + videoPath);
+        return videoPath;
+    }
+
+    @Test
+    public void testMediaSurfaceEncoder() throws IOException, InterruptedException {
+        String outputFilePath = getOutputPath();
+        MediaCodecSurfaceEncoder surfaceEncoder =
+                new MediaCodecSurfaceEncoder(mContext, mInputResId, mMime, mIsSoftwareEncoder,
+                        outputFilePath, mMaxBFrames);
+        int encodingStatus = surfaceEncoder.startEncodingSurface();
+        assertThat(encodingStatus, is(equalTo(0)));
+        int[] frameNumArray = surfaceEncoder.getFrameTypes();
+        Log.i(TAG, "Results: I-Frames: " + frameNumArray[0] + "; P-Frames: " + frameNumArray[1] +
+                "\n " + "; B-Frames:" + frameNumArray[2]);
+        assertNotEquals("Encoder mime: " + mMime + " isSoftware: " + mIsSoftwareEncoder +
+                " failed to generate B Frames", frameNumArray[2], 0);
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..b17541d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.media.samplevideoencoder">
+
+    <uses-permission android:name="android.permission.CAMERA"/>
+    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+
+    <application
+        android:configChanges="orientation"
+        android:screenOrientation="portrait"
+        android:allowBackup="true"
+        android:icon="@mipmap/ic_launcher"
+        android:label="@string/app_name"
+        android:roundIcon="@mipmap/ic_launcher_round"
+        android:supportsRtl="true"
+        android:theme="@style/AppTheme">
+        <activity android:name="com.android.media.samplevideoencoder.MainActivity">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+        android:targetPackage="com.android.media.samplevideoencoder"
+        android:label="SampleVideoEncoder Test"/>
+
+</manifest>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java
new file mode 100644
index 0000000..a3ea4c7
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+public class AutoFitTextureView extends TextureView {
+
+    public AutoFitTextureView(Context context) {
+        this(context, null);
+    }
+
+    public AutoFitTextureView(Context context, AttributeSet attrs) {
+        this(context, attrs, 0);
+    }
+
+    public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
+        super(context, attrs, defStyle);
+    }
+
+    public void setAspectRatio(int width, int height) {
+        if (width < 0 || height < 0) {
+            throw new IllegalArgumentException("Size cannot be negative.");
+        }
+        requestLayout();
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
new file mode 100644
index 0000000..a7a353c
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
@@ -0,0 +1,671 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import androidx.appcompat.app.AppCompatActivity;
+import androidx.core.app.ActivityCompat;
+
+import android.Manifest;
+import android.app.Activity;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaRecorder;
+
+import android.os.AsyncTask;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.view.Surface;
+import android.view.View;
+import android.view.TextureView;
+import android.widget.Button;
+import android.widget.CheckBox;
+
+import java.io.File;
+import java.io.IOException;
+
+import android.util.Log;
+import android.util.Size;
+import android.widget.RadioGroup;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import static java.lang.Boolean.FALSE;
+import static java.lang.Boolean.TRUE;
+
+public class MainActivity extends AppCompatActivity
+        implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback {
+
+    private static final String TAG = "SampleVideoEncoder";
+    private static final String[] RECORD_PERMISSIONS =
+            {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
+    private static final int REQUEST_RECORD_PERMISSIONS = 1;
+    private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
+    private static final int VIDEO_BITRATE = 8000000 /* 8 Mbps */;
+    private static final int VIDEO_FRAMERATE = 30;
+
+    /**
+     * Constant values to frame types assigned here are internal to this app.
+     * These values does not correspond to the actual values defined in avc/hevc specifications.
+     */
+    public static final int FRAME_TYPE_I = 0;
+    public static final int FRAME_TYPE_P = 1;
+    public static final int FRAME_TYPE_B = 2;
+
+    private String mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
+    private String mOutputVideoPath = null;
+
+    private final boolean mIsFrontCamera = true;
+    private boolean mIsCodecSoftware = false;
+    private boolean mIsMediaRecorder = true;
+    private boolean mIsRecording;
+
+    private AutoFitTextureView mTextureView;
+    private TextView mTextView;
+    private CameraDevice mCameraDevice;
+    private CameraCaptureSession mPreviewSession;
+    private CaptureRequest.Builder mPreviewBuilder;
+    private MediaRecorder mMediaRecorder;
+    private Size mVideoSize;
+    private Size mPreviewSize;
+
+    private Handler mBackgroundHandler;
+    private HandlerThread mBackgroundThread;
+
+    private Button mStartButton;
+
+    private int[] mFrameTypeOccurrences;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.activity_main);
+
+        final RadioGroup radioGroup_mime = findViewById(R.id.radio_group_mime);
+        radioGroup_mime.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
+            @Override
+            public void onCheckedChanged(RadioGroup group, int checkedId) {
+                if (checkedId == R.id.avc) {
+                    mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
+                } else {
+                    mMime = MediaFormat.MIMETYPE_VIDEO_HEVC;
+                }
+            }
+        });
+
+        final RadioGroup radioGroup_codec = findViewById(R.id.radio_group_codec);
+        radioGroup_codec.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
+            @Override
+            public void onCheckedChanged(RadioGroup group, int checkedId) {
+                mIsCodecSoftware = checkedId == R.id.sw;
+            }
+        });
+
+        final CheckBox checkBox_mr = findViewById(R.id.checkBox_media_recorder);
+        final CheckBox checkBox_mc = findViewById(R.id.checkBox_media_codec);
+        mTextureView = findViewById(R.id.texture);
+        mTextView = findViewById(R.id.textViewResults);
+
+        checkBox_mr.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                boolean checked = ((CheckBox) v).isChecked();
+                if (checked) {
+                    checkBox_mc.setChecked(false);
+                    mIsMediaRecorder = TRUE;
+                    for (int i = 0; i < radioGroup_codec.getChildCount(); i++) {
+                        radioGroup_codec.getChildAt(i).setEnabled(false);
+                    }
+                }
+            }
+        });
+        checkBox_mc.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                boolean checked = ((CheckBox) v).isChecked();
+                if (checked) {
+                    checkBox_mr.setChecked(false);
+                    mIsMediaRecorder = FALSE;
+                    for (int i = 0; i < radioGroup_codec.getChildCount(); i++) {
+                        radioGroup_codec.getChildAt(i).setEnabled(true);
+                    }
+                }
+            }
+        });
+        mStartButton = findViewById(R.id.start_button);
+        mStartButton.setOnClickListener(this);
+    }
+
+    @Override
+    public void onClick(View v) {
+        if (v.getId() == R.id.start_button) {
+            mTextView.setText(null);
+            if (mIsMediaRecorder) {
+                if (mIsRecording) {
+                    stopRecordingVideo();
+                } else {
+                    mStartButton.setEnabled(false);
+                    startRecordingVideo();
+                }
+            } else {
+                mStartButton.setEnabled(false);
+                mOutputVideoPath = getVideoPath(MainActivity.this);
+                MediaCodecSurfaceAsync codecAsyncTask = new MediaCodecSurfaceAsync(this);
+                codecAsyncTask.execute(
+                        "Encoding reference test vector with MediaCodec APIs using surface");
+            }
+        }
+    }
+
+    private static class MediaCodecSurfaceAsync extends AsyncTask<String, String, Integer> {
+
+        private final WeakReference<MainActivity> activityReference;
+
+        MediaCodecSurfaceAsync(MainActivity context) {
+            activityReference = new WeakReference<>(context);
+        }
+
+        @Override
+        protected Integer doInBackground(String... strings) {
+            MainActivity mainActivity = activityReference.get();
+            int resId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+            int encodingStatus = 1;
+            MediaCodecSurfaceEncoder codecSurfaceEncoder =
+                    new MediaCodecSurfaceEncoder(mainActivity.getApplicationContext(), resId,
+                            mainActivity.mMime, mainActivity.mIsCodecSoftware,
+                            mainActivity.mOutputVideoPath);
+            try {
+                encodingStatus = codecSurfaceEncoder.startEncodingSurface();
+                mainActivity.mFrameTypeOccurrences = codecSurfaceEncoder.getFrameTypes();
+            } catch (IOException | InterruptedException e) {
+                e.printStackTrace();
+            }
+            return encodingStatus;
+        }
+
+        @Override
+        protected void onPostExecute(Integer encodingStatus) {
+            MainActivity mainActivity = activityReference.get();
+            mainActivity.mStartButton.setEnabled(true);
+            if (encodingStatus == 0) {
+                Toast.makeText(mainActivity.getApplicationContext(), "Encoding Completed",
+                        Toast.LENGTH_SHORT).show();
+                mainActivity.mTextView.append("\n Encoded stream contains: ");
+                mainActivity.mTextView.append("\n Number of I-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_I]);
+                mainActivity.mTextView.append("\n Number of P-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_P]);
+                mainActivity.mTextView.append("\n Number of B-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_B]);
+            } else {
+                Toast.makeText(mainActivity.getApplicationContext(),
+                        "Error occurred while " + "encoding", Toast.LENGTH_SHORT).show();
+            }
+            mainActivity.mOutputVideoPath = null;
+            super.onPostExecute(encodingStatus);
+        }
+    }
+
+    private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
+            new TextureView.SurfaceTextureListener() {
+
+                @Override
+                public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
+                                                      int height) {
+                    openCamera(width, height);
+                }
+
+                @Override
+                public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
+                                                        int height) {
+                    configureTransform(width, height);
+                    Log.v(TAG, "Keeping camera preview size fixed");
+                }
+
+                @Override
+                public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+                    return true;
+                }
+
+                @Override
+                public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+                }
+            };
+
+
+    private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
+
+        @Override
+        public void onOpened(CameraDevice cameraDevice) {
+            mCameraDevice = cameraDevice;
+            startPreview();
+            mCameraOpenCloseLock.release();
+        }
+
+        @Override
+        public void onDisconnected(CameraDevice cameraDevice) {
+            mCameraOpenCloseLock.release();
+            cameraDevice.close();
+            mCameraDevice = null;
+        }
+
+        @Override
+        public void onError(CameraDevice cameraDevice, int error) {
+            mCameraOpenCloseLock.release();
+            cameraDevice.close();
+            mCameraDevice = null;
+            Activity activity = MainActivity.this;
+            activity.finish();
+        }
+    };
+
+    private boolean shouldShowRequestPermissionRationale(String[] recordPermissions) {
+        for (String permission : recordPermissions) {
+            if (ActivityCompat.shouldShowRequestPermissionRationale(this, permission)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private void requestRecordPermissions() {
+        if (!shouldShowRequestPermissionRationale(RECORD_PERMISSIONS)) {
+            ActivityCompat.requestPermissions(this, RECORD_PERMISSIONS, REQUEST_RECORD_PERMISSIONS);
+        }
+    }
+
+    @Override
+    public void onRequestPermissionsResult(int requestCode, String[] permissions,
+                                           int[] grantResults) {
+        if (requestCode == REQUEST_RECORD_PERMISSIONS) {
+            if (grantResults.length == RECORD_PERMISSIONS.length) {
+                for (int result : grantResults) {
+                    if (result != PackageManager.PERMISSION_GRANTED) {
+                        Log.e(TAG, "Permission is not granted");
+                        break;
+                    }
+                }
+            }
+        } else {
+            super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+        }
+    }
+
+    @SuppressWarnings("MissingPermission")
+    private void openCamera(int width, int height) {
+        if (!hasPermissionGranted(RECORD_PERMISSIONS)) {
+            Log.e(TAG, "Camera does not have permission to record video");
+            requestRecordPermissions();
+            return;
+        }
+        final Activity activity = MainActivity.this;
+        if (activity == null || activity.isFinishing()) {
+            Log.e(TAG, "Activity not found");
+            return;
+        }
+        CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+        try {
+            Log.v(TAG, "Acquire Camera");
+            if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
+                throw new RuntimeException("Timed out waiting to lock camera opening");
+            }
+            Log.d(TAG, "Camera Acquired");
+
+            String cameraId = manager.getCameraIdList()[0];
+            if (mIsFrontCamera) {
+                cameraId = manager.getCameraIdList()[1];
+            }
+
+            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+            StreamConfigurationMap map =
+                    characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+            mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
+            mPreviewSize =
+                    chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height,
+                            mVideoSize);
+            mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
+            configureTransform(width, height);
+            mMediaRecorder = new MediaRecorder();
+            manager.openCamera(cameraId, mStateCallback, null);
+        } catch (InterruptedException | CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void closeCamera() {
+        try {
+            mCameraOpenCloseLock.acquire();
+            closePreviewSession();
+            if (null != mCameraDevice) {
+                mCameraDevice.close();
+                mCameraDevice = null;
+            }
+            if (null != mMediaRecorder) {
+                mMediaRecorder.release();
+                mMediaRecorder = null;
+            }
+        } catch (InterruptedException e) {
+            throw new RuntimeException("Interrupted while trying to lock camera closing.");
+        } finally {
+            mCameraOpenCloseLock.release();
+        }
+    }
+
+    private static Size chooseVideoSize(Size[] choices) {
+        for (Size size : choices) {
+            if (size.getWidth() == size.getHeight() * 16 / 9 && size.getWidth() <= 1920) {
+                return size;
+            }
+        }
+        Log.e(TAG, "Couldn't find any suitable video size");
+        return choices[choices.length - 1];
+    }
+
+    private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
+        List<Size> bigEnough = new ArrayList<>();
+        int w = aspectRatio.getWidth();
+        int h = aspectRatio.getHeight();
+        for (Size option : choices) {
+            if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= width &&
+                    option.getHeight() >= height) {
+                bigEnough.add(option);
+            }
+        }
+
+        // Pick the smallest of those, assuming we found any
+        if (bigEnough.size() > 0) {
+            return Collections.min(bigEnough, new CompareSizesByArea());
+        } else {
+            Log.e(TAG, "Couldn't find any suitable preview size");
+            return choices[0];
+        }
+    }
+
+    private boolean hasPermissionGranted(String[] recordPermissions) {
+        for (String permission : recordPermissions) {
+            if (ActivityCompat.checkSelfPermission(MainActivity.this, permission) !=
+                    PackageManager.PERMISSION_GRANTED) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+        startBackgroundThread();
+        if (mTextureView.isAvailable()) {
+            openCamera(mTextureView.getWidth(), mTextureView.getHeight());
+        } else {
+            mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
+        }
+    }
+
+    @Override
+    public void onPause() {
+        closeCamera();
+        stopBackgroundThread();
+        super.onPause();
+    }
+
+    private void startBackgroundThread() {
+        mBackgroundThread = new HandlerThread("CameraBackground");
+        mBackgroundThread.start();
+        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
+    }
+
+    private void stopBackgroundThread() {
+        mBackgroundThread.quitSafely();
+        try {
+            mBackgroundThread.join();
+            mBackgroundThread = null;
+            mBackgroundHandler = null;
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void startRecordingVideo() {
+        if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
+            Toast.makeText(MainActivity.this, "Cannot start recording.", Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Cannot start recording.");
+            return;
+        }
+        try {
+            closePreviewSession();
+            setUpMediaRecorder();
+            SurfaceTexture texture = mTextureView.getSurfaceTexture();
+            assert texture != null;
+            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+            mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+            List<Surface> surfaces = new ArrayList<>();
+
+            // Set up Surface for the camera preview
+            Surface previewSurface = new Surface(texture);
+            surfaces.add(previewSurface);
+            mPreviewBuilder.addTarget(previewSurface);
+
+            // Set up Surface for the MediaRecorder
+            Surface recorderSurface = mMediaRecorder.getSurface();
+            surfaces.add(recorderSurface);
+            mPreviewBuilder.addTarget(recorderSurface);
+
+            //Start a capture session
+            mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
+
+                @Override
+                public void onConfigured(CameraCaptureSession session) {
+                    mPreviewSession = session;
+                    updatePreview();
+                    MainActivity.this.runOnUiThread(new Runnable() {
+                        @Override
+                        public void run() {
+                            mIsRecording = true;
+                            mMediaRecorder.start();
+                            mStartButton.setText(R.string.stop);
+                            mStartButton.setEnabled(true);
+                        }
+                    });
+                }
+
+                @Override
+                public void onConfigureFailed(CameraCaptureSession session) {
+                    Log.e(TAG, "Failed to configure. Cannot start Recording");
+                }
+            }, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void setUpMediaRecorder() {
+        final Activity activity = MainActivity.this;
+        if (activity == null) {
+            Toast.makeText(MainActivity.this, "Error occurred while setting up the MediaRecorder",
+                    Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Error occurred while setting up the MediaRecorder");
+            return;
+        }
+        try {
+            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+            mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+            mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
+        } catch (IllegalStateException e) {
+            e.printStackTrace();
+        }
+        if (mOutputVideoPath == null) {
+            mOutputVideoPath = getVideoPath(MainActivity.this);
+        }
+        mMediaRecorder.setOutputFile(mOutputVideoPath);
+        mMediaRecorder.setVideoEncodingBitRate(VIDEO_BITRATE);
+        mMediaRecorder.setVideoFrameRate(VIDEO_FRAMERATE);
+        mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
+        mMediaRecorder.setOrientationHint(270);
+        if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+            mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.HEVC);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+                mMediaRecorder.setVideoEncodingProfileLevel(
+                        MediaCodecInfo.CodecProfileLevel.HEVCProfileMain,
+                        MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel4);
+            }
+        } else {
+            mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+                mMediaRecorder.setVideoEncodingProfileLevel(
+                        MediaCodecInfo.CodecProfileLevel.AVCProfileMain,
+                        MediaCodecInfo.CodecProfileLevel.AVCLevel4);
+            }
+        }
+        mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
+        try {
+            mMediaRecorder.prepare();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private String getVideoPath(Activity activity) {
+        File dir = activity.getApplicationContext().getExternalFilesDir(null);
+        if (dir == null) {
+            Log.e(TAG, "Cannot get external directory path to save output video");
+            return null;
+        }
+        String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+        Log.d(TAG, "Output video is saved at: " + videoPath);
+        return videoPath;
+    }
+
+    private void closePreviewSession() {
+        if (mPreviewSession != null) {
+            mPreviewSession.close();
+            mPreviewSession = null;
+        }
+    }
+
+    private void stopRecordingVideo() {
+        mIsRecording = false;
+        mStartButton.setText(R.string.start);
+        mMediaRecorder.stop();
+        mMediaRecorder.reset();
+        Toast.makeText(MainActivity.this, "Recording Finished", Toast.LENGTH_SHORT).show();
+        mOutputVideoPath = null;
+        startPreview();
+    }
+
+    private void startPreview() {
+        if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
+            return;
+        }
+        try {
+            closePreviewSession();
+            SurfaceTexture texture = mTextureView.getSurfaceTexture();
+            assert texture != null;
+            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+            mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+            Surface previewSurface = new Surface(texture);
+            mPreviewBuilder.addTarget(previewSurface);
+
+            mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
+                    new CameraCaptureSession.StateCallback() {
+
+                        @Override
+                        public void onConfigured(CameraCaptureSession session) {
+                            mPreviewSession = session;
+                            updatePreview();
+                        }
+
+                        @Override
+                        public void onConfigureFailed(CameraCaptureSession session) {
+                            Toast.makeText(MainActivity.this,
+                                    "Configure Failed; Cannot start " + "preview",
+                                    Toast.LENGTH_SHORT).show();
+                            Log.e(TAG, "Configure failed; Cannot start preview");
+                        }
+                    }, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void updatePreview() {
+        if (mCameraDevice == null) {
+            Toast.makeText(MainActivity.this, "Camera not found; Cannot update " + "preview",
+                    Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Camera not found; Cannot update preview");
+            return;
+        }
+        try {
+            mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
+            HandlerThread thread = new HandlerThread("Camera preview");
+            thread.start();
+            mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void configureTransform(int viewWidth, int viewHeight) {
+        Activity activity = MainActivity.this;
+        if (null == mTextureView || null == mPreviewSize || null == activity) {
+            return;
+        }
+        Matrix matrix = new Matrix();
+        RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+        RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
+        float centerX = viewRect.centerX();
+        float centerY = viewRect.centerY();
+        bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+        matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+        float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),
+                (float) viewWidth / mPreviewSize.getWidth());
+        matrix.postScale(scale, scale, centerX, centerY);
+        mTextureView.setTransform(matrix);
+    }
+
+    static class CompareSizesByArea implements Comparator<Size> {
+        @Override
+        public int compare(Size lhs, Size rhs) {
+            return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
+                    (long) rhs.getWidth() * rhs.getHeight());
+        }
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java
new file mode 100644
index 0000000..88ce73b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+class CodecAsyncHandler extends MediaCodec.Callback {
+    private static final String TAG = CodecAsyncHandler.class.getSimpleName();
+    private final Lock mLock = new ReentrantLock();
+    private final Condition mCondition = mLock.newCondition();
+    private final LinkedList<Pair<Integer, MediaCodec.BufferInfo>> mCbInputQueue;
+    private final LinkedList<Pair<Integer, MediaCodec.BufferInfo>> mCbOutputQueue;
+    private volatile boolean mSignalledError;
+
+    CodecAsyncHandler() {
+        mCbInputQueue = new LinkedList<>();
+        mCbOutputQueue = new LinkedList<>();
+        mSignalledError = false;
+    }
+
+    void clearQueues() {
+        mLock.lock();
+        mCbInputQueue.clear();
+        mCbOutputQueue.clear();
+        mLock.unlock();
+    }
+
+    void resetContext() {
+        clearQueues();
+        mSignalledError = false;
+    }
+
+    @Override
+    public void onInputBufferAvailable(MediaCodec codec, int bufferIndex) {
+        mLock.lock();
+        mCbInputQueue.add(new Pair<>(bufferIndex, (MediaCodec.BufferInfo) null));
+        mCondition.signalAll();
+        mLock.unlock();
+    }
+
+    @Override
+    public void onOutputBufferAvailable(MediaCodec codec, int bufferIndex,
+                                        MediaCodec.BufferInfo info) {
+        mLock.lock();
+        mCbOutputQueue.add(new Pair<>(bufferIndex, info));
+        mCondition.signalAll();
+        mLock.unlock();
+    }
+
+    @Override
+    public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+        mLock.lock();
+        mSignalledError = true;
+        mCondition.signalAll();
+        mLock.unlock();
+        Log.e(TAG, "Received media codec error : " + e.getMessage());
+    }
+
+    @Override
+    public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+        Log.i(TAG, "Output format changed: " + format.toString());
+    }
+
+    void setCallBack(MediaCodec codec, boolean isCodecInAsyncMode) {
+        if (isCodecInAsyncMode) {
+            codec.setCallback(this);
+        }
+    }
+
+    Pair<Integer, MediaCodec.BufferInfo> getOutput() throws InterruptedException {
+        Pair<Integer, MediaCodec.BufferInfo> element = null;
+        mLock.lock();
+        while (!mSignalledError) {
+            if (mCbOutputQueue.isEmpty()) {
+                mCondition.await();
+            } else {
+                element = mCbOutputQueue.remove(0);
+                break;
+            }
+        }
+        mLock.unlock();
+        return element;
+    }
+
+    Pair<Integer, MediaCodec.BufferInfo> getWork() throws InterruptedException {
+        Pair<Integer, MediaCodec.BufferInfo> element = null;
+        mLock.lock();
+        while (!mSignalledError) {
+            if (mCbInputQueue.isEmpty() && mCbOutputQueue.isEmpty()) {
+                mCondition.await();
+            } else {
+                if (!mCbOutputQueue.isEmpty()) {
+                    element = mCbOutputQueue.remove(0);
+                    break;
+                }
+                if (!mCbInputQueue.isEmpty()) {
+                    element = mCbInputQueue.remove(0);
+                    break;
+                }
+            }
+        }
+        mLock.unlock();
+        return element;
+    }
+
+    boolean hasSeenError() {
+        return mSignalledError;
+    }
+}
+
+abstract public class MediaCodecBase {
+    static ArrayList<String> selectCodecs(String mime, ArrayList<MediaFormat> formats,
+                                          String[] features, boolean isEncoder,
+                                          boolean isSoftware) {
+
+        MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+        MediaCodecInfo[] codecInfos = codecList.getCodecInfos();
+        ArrayList<String> listOfCodecs = new ArrayList<>();
+        for (MediaCodecInfo codecInfo : codecInfos) {
+            if (isEncoder) {
+                if (!codecInfo.isEncoder()) continue;
+            } else {
+                if (codecInfo.isEncoder()) continue;
+            }
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && codecInfo.isAlias()) continue;
+            String[] types = codecInfo.getSupportedTypes();
+            for (String type : types) {
+                if (type.equalsIgnoreCase(mime)) {
+                    boolean isOk = true;
+                    MediaCodecInfo.CodecCapabilities codecCapabilities =
+                            codecInfo.getCapabilitiesForType(type);
+                    if (formats != null) {
+                        for (MediaFormat format : formats) {
+                            if (!codecCapabilities.isFormatSupported(format)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (features != null) {
+                        for (String feature : features) {
+                            if (!codecCapabilities.isFeatureSupported(feature)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (isSoftware) {
+                        if (codecInfo.getName().contains("software") ||
+                                codecInfo.getName().contains("android") ||
+                                codecInfo.getName().contains("google")) {
+                            if (isOk) listOfCodecs.add(codecInfo.getName());
+                        }
+                    } else {
+                        if (codecInfo.getName().contains("software") ||
+                                codecInfo.getName().contains("android") ||
+                                codecInfo.getName().contains("google")) {
+                            continue;
+                        } else {
+                            if (isOk) listOfCodecs.add(codecInfo.getName());
+                        }
+                    }
+                }
+            }
+        }
+        return listOfCodecs;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
new file mode 100644
index 0000000..011c38c
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
@@ -0,0 +1,385 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class MediaCodecSurfaceEncoder {
+    private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+    private static final boolean DEBUG = false;
+    private static final int VIDEO_BITRATE = 8000000  /*8 Mbps*/;
+    private static final int VIDEO_FRAMERATE = 30;
+    private final Context mActivityContext;
+    private final int mResID;
+    private final int mMaxBFrames;
+    private final String mMime;
+    private final String mOutputPath;
+    private int mTrackID = -1;
+    private int mFrameNum = 0;
+    private int[] mFrameTypeOccurrences = {0, 0, 0};
+
+    private Surface mSurface;
+    private MediaExtractor mExtractor;
+    private MediaCodec mDecoder;
+    private MediaCodec mEncoder;
+    private MediaMuxer mMuxer;
+
+    private final boolean mIsCodecSoftware;
+    private boolean mSawDecInputEOS;
+    private boolean mSawDecOutputEOS;
+    private boolean mSawEncOutputEOS;
+    private int mDecOutputCount;
+    private int mEncOutputCount;
+
+    private final CodecAsyncHandler mAsyncHandleEncoder = new CodecAsyncHandler();
+    private final CodecAsyncHandler mAsyncHandleDecoder = new CodecAsyncHandler();
+
+    public MediaCodecSurfaceEncoder(Context context, int resId, String mime, boolean isSoftware,
+                                    String outputPath, int maxBFrames) {
+        mActivityContext = context;
+        mResID = resId;
+        mMime = mime;
+        mIsCodecSoftware = isSoftware;
+        mOutputPath = outputPath;
+        mMaxBFrames = maxBFrames;
+    }
+
+    public MediaCodecSurfaceEncoder(Context context, int resId, String mime, boolean isSoftware,
+                                    String outputPath) {
+        // Default value of MediaFormat.KEY_MAX_B_FRAMES is set to 1, if not passed as a parameter.
+        this(context, resId, mime, isSoftware, outputPath, 1);
+    }
+
+    public int startEncodingSurface() throws IOException, InterruptedException {
+        MediaFormat decoderFormat = setUpSource();
+        if (decoderFormat == null) {
+            return -1;
+        }
+
+        String decoderMime = decoderFormat.getString(MediaFormat.KEY_MIME);
+        ArrayList<String> listOfDeocders =
+                MediaCodecBase.selectCodecs(decoderMime, null, null, false, mIsCodecSoftware);
+        if (listOfDeocders.isEmpty()) {
+            Log.e(TAG, "No suitable decoder found for mime: " + decoderMime);
+            return -1;
+        }
+        mDecoder = MediaCodec.createByCodecName(listOfDeocders.get(0));
+
+        MediaFormat encoderFormat = setUpEncoderFormat(decoderFormat);
+        ArrayList<String> listOfEncoders =
+                MediaCodecBase.selectCodecs(mMime, null, null, true, mIsCodecSoftware);
+        if (listOfEncoders.isEmpty()) {
+            Log.e(TAG, "No suitable encoder found for mime: " + mMime);
+            return -1;
+        }
+
+        boolean muxOutput = true;
+        for (String encoder : listOfEncoders) {
+            mEncoder = MediaCodec.createByCodecName(encoder);
+            mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+            if (muxOutput) {
+                int muxerFormat = MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+                mMuxer = new MediaMuxer(mOutputPath, muxerFormat);
+            }
+            configureCodec(decoderFormat, encoderFormat);
+            mEncoder.start();
+            mDecoder.start();
+            doWork(Integer.MAX_VALUE);
+            queueEOS();
+            waitForAllEncoderOutputs();
+            if (muxOutput) {
+                if (mTrackID != -1) {
+                    mMuxer.stop();
+                    mTrackID = -1;
+                }
+                if (mMuxer != null) {
+                    mMuxer.release();
+                    mMuxer = null;
+                }
+            }
+            mDecoder.reset();
+            mEncoder.reset();
+            mSurface.release();
+            mSurface = null;
+            Log.i(TAG, "Number of I-frames = " + mFrameTypeOccurrences[FRAME_TYPE_I]);
+            Log.i(TAG, "Number of P-frames = " + mFrameTypeOccurrences[FRAME_TYPE_P]);
+            Log.i(TAG, "Number of B-frames = " + mFrameTypeOccurrences[FRAME_TYPE_B]);
+        }
+        mEncoder.release();
+        mDecoder.release();
+        mExtractor.release();
+        return 0;
+    }
+
+    private MediaFormat setUpSource() throws IOException {
+        mExtractor = new MediaExtractor();
+        AssetFileDescriptor fd = mActivityContext.getResources().openRawResourceFd(mResID);
+        mExtractor.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
+        for (int trackID = 0; trackID < mExtractor.getTrackCount(); trackID++) {
+            MediaFormat format = mExtractor.getTrackFormat(trackID);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            if (mime.startsWith("video/")) {
+                mExtractor.selectTrack(trackID);
+                format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+                        MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
+                return format;
+            }
+        }
+        mExtractor.release();
+        return null;
+    }
+
+    private MediaFormat setUpEncoderFormat(MediaFormat decoderFormat) {
+        MediaFormat encoderFormat = new MediaFormat();
+        encoderFormat.setString(MediaFormat.KEY_MIME, mMime);
+        encoderFormat
+                .setInteger(MediaFormat.KEY_WIDTH, decoderFormat.getInteger(MediaFormat.KEY_WIDTH));
+        encoderFormat.setInteger(MediaFormat.KEY_HEIGHT,
+                decoderFormat.getInteger(MediaFormat.KEY_HEIGHT));
+        encoderFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VIDEO_FRAMERATE);
+        encoderFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
+        encoderFormat.setFloat(MediaFormat.KEY_I_FRAME_INTERVAL, 1.0f);
+        encoderFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+        if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+            encoderFormat.setInteger(MediaFormat.KEY_PROFILE,
+                    MediaCodecInfo.CodecProfileLevel.HEVCProfileMain);
+            encoderFormat.setInteger(MediaFormat.KEY_LEVEL,
+                    MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel4);
+        } else {
+            encoderFormat.setInteger(MediaFormat.KEY_PROFILE,
+                    MediaCodecInfo.CodecProfileLevel.AVCProfileMain);
+            encoderFormat
+                    .setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel4);
+        }
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+            encoderFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, mMaxBFrames);
+        } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+            encoderFormat.setInteger(MediaFormat.KEY_LATENCY, 1);
+        }
+        return encoderFormat;
+    }
+
+    private void resetContext() {
+        mAsyncHandleDecoder.resetContext();
+        mAsyncHandleEncoder.resetContext();
+        mSawDecInputEOS = false;
+        mSawDecOutputEOS = false;
+        mSawEncOutputEOS = false;
+        mDecOutputCount = 0;
+        mEncOutputCount = 0;
+        mFrameNum = 0;
+        Arrays.fill(mFrameTypeOccurrences, 0);
+    }
+
+    private void configureCodec(MediaFormat decFormat, MediaFormat encFormat) {
+        resetContext();
+        mAsyncHandleEncoder.setCallBack(mEncoder, true);
+        mEncoder.configure(encFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+        mSurface = mEncoder.createInputSurface();
+        if (!mSurface.isValid()) {
+            Log.e(TAG, "Surface is not valid");
+            return;
+        }
+        mAsyncHandleDecoder.setCallBack(mDecoder, true);
+        mDecoder.configure(decFormat, mSurface, null, 0);
+        Log.d(TAG, "Codec configured");
+        if (DEBUG) {
+            Log.d(TAG, "Encoder Output format: " + mEncoder.getOutputFormat());
+        }
+    }
+
+    private void dequeueDecoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mSawDecOutputEOS = true;
+        }
+        if (DEBUG) {
+            Log.d(TAG,
+                    "output: id: " + bufferIndex + " flags: " + info.flags + " size: " + info.size +
+                            " timestamp: " + info.presentationTimeUs);
+        }
+        if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+            mDecOutputCount++;
+        }
+        mDecoder.releaseOutputBuffer(bufferIndex, mSurface != null);
+    }
+
+    private void enqueueDecoderInput(int bufferIndex) {
+        ByteBuffer inputBuffer = mDecoder.getInputBuffer(bufferIndex);
+        int size = mExtractor.readSampleData(inputBuffer, 0);
+        if (size < 0) {
+            enqueueDecoderEOS(bufferIndex);
+        } else {
+            long pts = mExtractor.getSampleTime();
+            int extractorFlags = mExtractor.getSampleFlags();
+            int codecFlags = 0;
+            if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+            }
+            if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+            }
+            if (!mExtractor.advance()) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+                mSawDecInputEOS = true;
+            }
+            if (DEBUG) {
+                Log.d(TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts +
+                        " flags: " + codecFlags);
+            }
+            mDecoder.queueInputBuffer(bufferIndex, 0, size, pts, codecFlags);
+        }
+    }
+
+    private void doWork(int frameLimit) throws InterruptedException {
+        int frameCount = 0;
+        while (!hasSeenError() && !mSawDecInputEOS && frameCount < frameLimit) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
+            if (element != null) {
+                int bufferID = element.first;
+                MediaCodec.BufferInfo info = element.second;
+                if (info != null) {
+                    // <id, info> corresponds to output callback.
+                    dequeueDecoderOutput(bufferID, info);
+                } else {
+                    // <id, null> corresponds to input callback.
+                    enqueueDecoderInput(bufferID);
+                    frameCount++;
+                }
+            }
+            // check decoder EOS
+            if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
+            // encoder output
+            if (mDecOutputCount - mEncOutputCount > mMaxBFrames) {
+                tryEncoderOutput();
+            }
+        }
+    }
+
+    private void queueEOS() throws InterruptedException {
+        while (!mAsyncHandleDecoder.hasSeenError() && !mSawDecInputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
+            if (element != null) {
+                int bufferID = element.first;
+                MediaCodec.BufferInfo info = element.second;
+                if (info != null) {
+                    dequeueDecoderOutput(bufferID, info);
+                } else {
+                    enqueueDecoderEOS(element.first);
+                }
+            }
+        }
+
+        while (!hasSeenError() && !mSawDecOutputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> decOp = mAsyncHandleDecoder.getOutput();
+            if (decOp != null) dequeueDecoderOutput(decOp.first, decOp.second);
+            if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
+            if (mDecOutputCount - mEncOutputCount > mMaxBFrames) {
+                tryEncoderOutput();
+            }
+        }
+    }
+
+    private void tryEncoderOutput() throws InterruptedException {
+        if (!hasSeenError() && !mSawEncOutputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleEncoder.getOutput();
+            if (element != null) {
+                dequeueEncoderOutput(element.first, element.second);
+            }
+        }
+    }
+
+    private void waitForAllEncoderOutputs() throws InterruptedException {
+        while (!hasSeenError() && !mSawEncOutputEOS) {
+            tryEncoderOutput();
+        }
+    }
+
+    private void enqueueDecoderEOS(int bufferIndex) {
+        if (!mSawDecInputEOS) {
+            mDecoder.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+            mSawDecInputEOS = true;
+            Log.d(TAG, "Queued End of Stream");
+        }
+    }
+
+    private void dequeueEncoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if (DEBUG) {
+            Log.d(TAG, "encoder output: id: " + bufferIndex + " flags: " + info.flags + " size: " +
+                    info.size + " timestamp: " + info.presentationTimeUs);
+        }
+        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mSawEncOutputEOS = true;
+        }
+        if (info.size > 0) {
+            ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex);
+            // Parse the buffer to get the frame type
+            if (DEBUG) Log.d(TAG, "[ Frame : " + (mFrameNum++) + " ]");
+            int frameTypeResult = -1;
+            if (mMime == MediaFormat.MIMETYPE_VIDEO_AVC) {
+                frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromAVC(buf);
+            } else if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC){
+                frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromHEVC(buf);
+            } else {
+                Log.e(TAG, "Mime type " + mMime + " is not supported.");
+                return;
+            }
+            if (frameTypeResult != -1) {
+                mFrameTypeOccurrences[frameTypeResult]++;
+            }
+
+            if (mMuxer != null) {
+                if (mTrackID == -1) {
+                    mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat());
+                    mMuxer.start();
+                }
+                mMuxer.writeSampleData(mTrackID, buf, info);
+            }
+            if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+                mEncOutputCount++;
+            }
+        }
+        mEncoder.releaseOutputBuffer(bufferIndex, false);
+    }
+
+    private boolean hasSeenError() {
+        return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError();
+    }
+
+    public int[] getFrameTypes() {
+        return mFrameTypeOccurrences;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
new file mode 100644
index 0000000..efff4fd
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class NalUnitUtil {
+    private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+    private static final boolean DEBUG = false;
+
+    public static int findNalUnit(byte[] dataArray, int pos, int limit) {
+        int startOffset = 0;
+        if (limit - pos < 4) {
+            return startOffset;
+        }
+        if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 1) {
+            startOffset = 3;
+        } else {
+            if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 0 &&
+                    dataArray[pos + 3] == 1) {
+                startOffset = 4;
+            }
+        }
+        return startOffset;
+    }
+
+    private static int getAVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+        return dataArray[nalUnitOffset] & 0x1F;
+    }
+
+    private static int parseAVCNALUnitData(byte[] dataArray, int offset, int limit) {
+        ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+        bitArray.reset(dataArray, offset, limit);
+
+        bitArray.skipBit(); // forbidden_zero_bit
+        bitArray.readBits(2); // nal_ref_idc
+        bitArray.skipBits(5); // nal_unit_type
+
+        bitArray.readUEV(); // first_mb_in_slice
+        if (!bitArray.canReadUEV()) {
+            return -1;
+        }
+        int sliceType = bitArray.readUEV();
+        if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+        if (sliceType == 0) {
+            return FRAME_TYPE_P;
+        } else if (sliceType == 1) {
+            return FRAME_TYPE_B;
+        } else if (sliceType == 2) {
+            return FRAME_TYPE_I;
+        } else {
+            return -1;
+        }
+    }
+
+    private static int getHEVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+        return (dataArray[nalUnitOffset] & 0x7E) >> 1;
+    }
+
+    private static int parseHEVCNALUnitData(byte[] dataArray, int offset, int limit,
+                                            int nalUnitType) {
+        // nal_unit_type values from H.265/HEVC Table 7-1.
+        final int BLA_W_LP = 16;
+        final int RSV_IRAP_VCL23 = 23;
+
+        ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+        bitArray.reset(dataArray, offset, limit);
+
+        bitArray.skipBit(); // forbidden zero bit
+        bitArray.readBits(6); // nal_unit_header
+        bitArray.readBits(6); // nuh_layer_id
+        bitArray.readBits(3); // nuh_temporal_id_plus1
+
+        // Parsing slice_segment_header values from H.265/HEVC Table 7.3.6.1
+        boolean first_slice_segment = bitArray.readBit(); // first_slice_segment_in_pic_flag
+        if (!first_slice_segment) return -1;
+        if (nalUnitType >= BLA_W_LP && nalUnitType <= RSV_IRAP_VCL23) {
+            bitArray.readBit();  // no_output_of_prior_pics_flag
+        }
+        bitArray.readUEV(); // slice_pic_parameter_set_id
+        // Assume num_extra_slice_header_bits element of PPS data to be 0
+        int sliceType = bitArray.readUEV();
+        if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+        if (sliceType == 0) {
+            return FRAME_TYPE_B;
+        } else if (sliceType == 1) {
+            return FRAME_TYPE_P;
+        } else if (sliceType == 2) {
+            return FRAME_TYPE_I;
+        } else {
+            return -1;
+        }
+    }
+
+    public static int getStandardizedFrameTypesFromAVC(ByteBuffer buf) {
+        int limit = buf.limit();
+        byte[] dataArray = new byte[buf.remaining()];
+        buf.get(dataArray);
+        int frameType = -1;
+        for (int pos = 0; pos + 3 < limit; ) {
+            int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+            if (startOffset != 0) {
+                int nalUnitType = getAVCNalUnitType(dataArray, (pos + startOffset));
+                if (DEBUG) {
+                    Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+                    Log.d(TAG, "NalUnitType = " + nalUnitType);
+                }
+                // SLICE_NAL = 1; IDR_SLICE_NAL = 5
+                if (nalUnitType == 1 || nalUnitType == 5) {
+                    frameType = parseAVCNALUnitData(dataArray, (pos + startOffset),
+                            (limit - pos - startOffset));
+                    break;
+                }
+                pos += 3;
+            } else {
+                pos++;
+            }
+        }
+        return frameType;
+    }
+
+    public static int getStandardizedFrameTypesFromHEVC(ByteBuffer buf) {
+        int limit = buf.limit();
+        byte[] dataArray = new byte[buf.remaining()];
+        buf.get(dataArray);
+        int frameType = -1;
+        for (int pos = 0; pos + 3 < limit; ) {
+            int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+            if (startOffset != 0) {
+                int nalUnitType = NalUnitUtil.getHEVCNalUnitType(dataArray, (pos + startOffset));
+                if (DEBUG) {
+                    Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+                    Log.d(TAG, "NalUnitType = " + nalUnitType);
+                }
+                // Parse NALUnits containing slice_headers which lies in the range of 0 to 21
+                if (nalUnitType >= 0 && nalUnitType <= 21) {
+                    frameType = parseHEVCNALUnitData(dataArray, (pos + startOffset),
+                            (limit - pos - startOffset), nalUnitType);
+                    break;
+                }
+                pos += 3;
+            } else {
+                pos++;
+            }
+        }
+        return frameType;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
new file mode 100644
index 0000000..e4bfaa3
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+public class ParsableBitArray {
+    public byte[] data;
+    private int byteOffset;
+    private int bitOffset;
+    private int byteLimit;
+
+    public ParsableBitArray(byte[] dataArray) {
+        this(dataArray, dataArray.length);
+    }
+
+    public ParsableBitArray(byte[] dataArray, int limit) {
+        this.data = dataArray;
+        byteLimit = limit;
+    }
+
+    public void reset(byte[] data, int offset, int limit) {
+        this.data = data;
+        byteOffset = offset;
+        bitOffset = 0;
+        byteLimit = limit;
+    }
+
+    public void skipBit() {
+        if (++bitOffset == 8) {
+            bitOffset = 0;
+            byteOffset++;
+        }
+    }
+
+    public void skipBits(int numBits) {
+        int numBytes = numBits / 8;
+        byteOffset += numBytes;
+        bitOffset += numBits - (numBytes * 8);
+        if (bitOffset > 7) {
+            byteOffset++;
+            bitOffset -= 8;
+        }
+    }
+
+    public boolean readBit() {
+        boolean returnValue = (data[byteOffset] & (0x80 >> bitOffset)) != 0;
+        skipBit();
+        return returnValue;
+    }
+
+    public int readBits(int numBits) {
+        if (numBits == 0) {
+            return 0;
+        }
+        int returnValue = 0;
+        bitOffset += numBits;
+        while (bitOffset > 8) {
+            bitOffset -= 8;
+            returnValue |= (data[byteOffset++] & 0xFF) << bitOffset;
+        }
+        returnValue |= (data[byteOffset] & 0xFF) >> (8 - bitOffset);
+        returnValue &= 0xFFFFFFFF >>> (32 - numBits);
+        if (bitOffset == 8) {
+            bitOffset = 0;
+            byteOffset++;
+        }
+        return returnValue;
+    }
+
+    public boolean canReadUEV() {
+        int initialByteOffset = byteOffset;
+        int initialBitOffset = bitOffset;
+        int leadingZeros = 0;
+        while (byteOffset < byteLimit && !readBit()) {
+            leadingZeros++;
+        }
+        boolean hitLimit = byteOffset == byteLimit;
+        byteOffset = initialByteOffset;
+        bitOffset = initialBitOffset;
+        return !hitLimit && canReadBits(leadingZeros * 2 + 1);
+    }
+
+    public int readUEV() {
+        int leadingZeros = 0;
+        while (!readBit()) {
+            leadingZeros++;
+        }
+        return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
+    }
+
+    public boolean canReadBits(int numBits) {
+        int oldByteOffset = byteOffset;
+        int numBytes = numBits / 8;
+        int newByteOffset = byteOffset + numBytes;
+        int newBitOffset = bitOffset + numBits - (numBytes * 8);
+        if (newBitOffset > 7) {
+            newByteOffset++;
+            newBitOffset -= 8;
+        }
+        for (int i = oldByteOffset + 1; i <= newByteOffset && newByteOffset < byteLimit; i++) {
+            if (shouldSkipByte(i)) {
+                // Skip the byte and check three bytes ahead.
+                newByteOffset++;
+                i += 2;
+            }
+        }
+        return newByteOffset < byteLimit || (newByteOffset == byteLimit && newBitOffset == 0);
+    }
+
+    private boolean shouldSkipByte(int offset) {
+        return (2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 &&
+                data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00);
+    }
+
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
new file mode 100644
index 0000000..2b068d1
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
@@ -0,0 +1,30 @@
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:aapt="http://schemas.android.com/aapt"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportWidth="108"
+    android:viewportHeight="108">
+    <path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
+        <aapt:attr name="android:fillColor">
+            <gradient
+                android:endX="85.84757"
+                android:endY="92.4963"
+                android:startX="42.9492"
+                android:startY="49.59793"
+                android:type="linear">
+                <item
+                    android:color="#44000000"
+                    android:offset="0.0" />
+                <item
+                    android:color="#00000000"
+                    android:offset="1.0" />
+            </gradient>
+        </aapt:attr>
+    </path>
+    <path
+        android:fillColor="#FFFFFF"
+        android:fillType="nonZero"
+        android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
+        android:strokeWidth="1"
+        android:strokeColor="#00000000" />
+</vector>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml b/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml
new file mode 100644
index 0000000..07d5da9
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+<?xml version="1.0" encoding="utf-8"?>
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportWidth="108"
+    android:viewportHeight="108">
+    <path
+        android:fillColor="#3DDC84"
+        android:pathData="M0,0h108v108h-108z" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M9,0L9,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,0L19,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,0L29,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,0L39,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,0L49,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,0L59,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,0L69,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,0L79,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M89,0L89,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M99,0L99,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,9L108,9"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,19L108,19"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,29L108,29"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,39L108,39"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,49L108,49"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,59L108,59"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,69L108,69"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,79L108,79"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,89L108,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,99L108,99"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,29L89,29"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,39L89,39"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,49L89,49"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,59L89,59"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,69L89,69"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,79L89,79"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,19L29,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,19L39,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,19L49,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,19L59,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,19L69,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,19L79,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+</vector>
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..017012d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="utf-8"?>
+<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:id="@+id/container"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:layout_gravity="center"
+    tools:context="com.android.media.samplevideoencoder.MainActivity">
+
+    <com.android.media.samplevideoencoder.AutoFitTextureView
+        android:id="@+id/texture"
+        android:layout_width="wrap_content"
+        android:layout_height="300dp"
+        android:layout_alignParentStart="true"
+        android:layout_alignParentTop="true"
+        android:layout_marginBottom="16dp"
+        android:gravity="center"
+        app:layout_constraintBottom_toTopOf="@+id/checkBox_media_recorder"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toTopOf="parent" />
+
+    <CheckBox
+        android:id="@+id/checkBox_media_recorder"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="90dp"
+        android:layout_marginTop="10dp"
+        android:fontFamily="sans-serif-medium"
+        android:text="@string/media_recorder"
+        android:textAppearance="@style/TextAppearance.AppCompat.Large"
+        android:textStyle="normal"
+        android:checked="true"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/texture"/>
+
+    <CheckBox
+        android:id="@+id/checkBox_media_codec"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="90dp"
+        android:fontFamily="sans-serif-medium"
+        android:text="@string/media_codec"
+        android:textAppearance="@style/TextAppearance.AppCompat.Large"
+        android:textStyle="normal"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_recorder" />
+
+    <RadioGroup
+        android:id="@+id/radio_group_mime"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="40dp"
+        android:layout_marginTop="10dp"
+        android:orientation="vertical"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintBottom_toTopOf="@+id/frameLayout2"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_codec">
+
+        <RadioButton
+            android:id="@+id/avc"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:checked="true"
+            android:text="@string/avc" />
+
+        <RadioButton
+            android:id="@+id/hevc"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/hevc" />
+    </RadioGroup>
+
+    <RadioGroup
+        android:id="@+id/radio_group_codec"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="10dp"
+        android:layout_marginEnd="40dp"
+        android:orientation="vertical"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_codec">
+
+        <RadioButton
+            android:id="@+id/hw"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:checked="true"
+            android:enabled="false"
+            android:text="@string/hardware" />
+
+        <RadioButton
+            android:id="@+id/sw"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:enabled="false"
+            android:text="@string/software" />
+    </RadioGroup>
+
+    <FrameLayout
+        android:id="@+id/frameLayout2"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_below="@id/radio_group_mime"
+        android:layout_alignParentStart="true"
+        android:layout_alignParentBottom="true"
+        android:layout_marginTop="10dp"
+        android:background="@color/colorPrimary"
+        app:layout_constraintTop_toBottomOf="@+id/radio_group_mime"
+        tools:layout_editor_absoluteX="80dp">
+
+        <Button
+            android:id="@+id/start_button"
+            android:layout_width="108dp"
+            android:layout_height="wrap_content"
+            android:layout_gravity="center"
+            android:gravity="center"
+            android:text="@string/start_button"
+            tools:layout_editor_absoluteX="155dp"
+            tools:layout_editor_absoluteY="455dp" />
+
+    </FrameLayout>
+
+    <TextView
+        android:id="@+id/textViewResults"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="10dp"
+        android:fontFamily="sans-serif-medium"
+        android:textSize="18sp"
+        android:textStyle="normal"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf = "@+id/frameLayout2" />
+
+</androidx.constraintlayout.widget.ConstraintLayout>
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4 b/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4
new file mode 100644
index 0000000..6204008
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4
Binary files differ
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..4faecfa
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <color name="colorPrimary">#6200EE</color>
+    <color name="colorPrimaryDark">#3700B3</color>
+    <color name="colorAccent">#03DAC5</color>
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..f825a7f
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml
@@ -0,0 +1,13 @@
+<resources>
+    <string name="app_name">SampleVideoEncoder</string>
+    <string name="media_recorder">MediaRecorder</string>
+    <string name="media_codec">MediaCodec</string>
+    <string name="start_button">Start</string>
+    <string name="stop">Stop</string>
+    <string name="start">Start</string>
+    <string name="avc">AVC</string>
+    <string name="hevc">HEVC</string>
+    <string name="hardware">H/W</string>
+    <string name="software">S/W</string>
+
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..fac9291
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml
@@ -0,0 +1,10 @@
+<resources>
+    <!-- Base application theme. -->
+    <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
+        <!-- Customize your theme here. -->
+        <item name="colorPrimary">@color/colorPrimary</item>
+        <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
+        <item name="colorAccent">@color/colorAccent</item>
+    </style>
+
+</resources>
\ No newline at end of file
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/tests/SampleVideoEncoder/build.gradle
similarity index 65%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/tests/SampleVideoEncoder/build.gradle
index 65756e8..4ca0c7e 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/tests/SampleVideoEncoder/build.gradle
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright (C) 2020 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,19 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+buildscript {
+    repositories {
+        google()
+        jcenter()
+    }
+    dependencies {
+        classpath 'com.android.tools.build:gradle:4.1.1'
+    }
+}
+
+allprojects {
+    repositories {
+        google()
+        jcenter()
+    }
+}
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/gradle.properties b/media/tests/SampleVideoEncoder/gradle.properties
new file mode 100644
index 0000000..5ae443b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle.properties
@@ -0,0 +1,4 @@
+# Project-wide Gradle settings.
+org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..f6b961f
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..a9a12eb
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Wed Dec 16 10:06:45 IST 2020
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
diff --git a/media/tests/SampleVideoEncoder/gradlew b/media/tests/SampleVideoEncoder/gradlew
new file mode 100644
index 0000000..cccdd3d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+    echo "$*"
+}
+
+die () {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=$((i+1))
+    done
+    case $i in
+        (0) set -- ;;
+        (1) set -- "$args0" ;;
+        (2) set -- "$args0" "$args1" ;;
+        (3) set -- "$args0" "$args1" "$args2" ;;
+        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Escape application args
+save () {
+    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+    echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+  cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/media/tests/SampleVideoEncoder/gradlew.bat b/media/tests/SampleVideoEncoder/gradlew.bat
new file mode 100644
index 0000000..f955316
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem  Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/media/tests/SampleVideoEncoder/settings.gradle b/media/tests/SampleVideoEncoder/settings.gradle
new file mode 100644
index 0000000..4d3c3a5
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/settings.gradle
@@ -0,0 +1,2 @@
+include ':app'
+rootProject.name = "SampleVideoEncoder"
\ No newline at end of file
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index ec77bd0..52dc0cf 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -37,8 +37,11 @@
     ],
     static_libs: [
         "libc_malloc_debug_backtrace",
+        "libbatterystats_aidl",
+        "libprocessinfoservice_aidl",
     ],
     shared_libs: [
+        "libaudioclient_aidl_conversion",
         "libaudioutils", // for clock.h
         "libbinder",
         "libcutils",
@@ -47,6 +50,10 @@
         "libhidlbase",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
+        "media_permission-aidl-cpp",
+    ],
+    export_static_lib_headers: [
+        "libbatterystats_aidl",
     ],
 
     logtags: ["EventLogTags.logtags"],
@@ -62,6 +69,10 @@
         "libmedia_headers",
     ],
 
+    export_shared_lib_headers: [
+        "media_permission-aidl-cpp"
+    ],
+
     include_dirs: [
         // For DEBUGGER_SIGNAL
         "system/core/debuggerd/include",
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 19225d3..e212794 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -21,9 +21,9 @@
 #include <media/stagefright/ProcessInfo.h>
 
 #include <binder/IPCThreadState.h>
-#include <binder/IProcessInfoService.h>
 #include <binder/IServiceManager.h>
 #include <private/android_filesystem_config.h>
+#include <processinfo/IProcessInfoService.h>
 
 namespace android {
 
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 2c8a452..e2e1043 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -23,6 +23,9 @@
 #include <binder/PermissionCache.h>
 #include "mediautils/ServiceUtilities.h"
 #include <system/audio-hal-enums.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionUtil.h>
+#include <android/media/permission/Identity.h>
 
 #include <iterator>
 #include <algorithm>
@@ -37,14 +40,16 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 static const String16 sAndroidPermissionRecordAudio("android.permission.RECORD_AUDIO");
 static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
 static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
-        const String16& opPackageName, uid_t uid) {
-    if (opPackageName.size() > 0) {
-        return opPackageName;
+        const std::optional<String16> opPackageName, uid_t uid) {
+    if (opPackageName.has_value() && opPackageName.value().size() > 0) {
+        return opPackageName.value();
     }
     // In some cases the calling code has no access to the package it runs under.
     // For example, code using the wilhelm framework's OpenSL-ES APIs. In this
@@ -57,7 +62,7 @@
     permissionController.getPackagesForUid(uid, packages);
     if (packages.isEmpty()) {
         ALOGE("No packages for uid %d", uid);
-        return opPackageName; // empty string
+        return String16();
     }
     return packages[0];
 }
@@ -74,47 +79,52 @@
   }
 }
 
-static bool checkRecordingInternal(const String16& opPackageName, pid_t pid,
-        uid_t uid, bool start, audio_source_t source) {
+static bool checkRecordingInternal(const Identity& identity, const String16& msg,
+        bool start, audio_source_t source) {
     // Okay to not track in app ops as audio server or media server is us and if
     // device is rooted security model is considered compromised.
     // system_server loses its RECORD_AUDIO permission when a secondary
     // user is active, but it is a core system service so let it through.
     // TODO(b/141210120): UserManager.DISALLOW_RECORD_AUDIO should not affect system user 0
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
     if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return true;
 
     // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
     // may open a record track on behalf of a client.  Note that pid may be a tid.
     // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
     PermissionController permissionController;
-    const bool ok = permissionController.checkPermission(sAndroidPermissionRecordAudio, pid, uid);
+    const bool ok = permissionController.checkPermission(sAndroidPermissionRecordAudio,
+            identity.pid, identity.uid);
     if (!ok) {
         ALOGE("Request requires %s", String8(sAndroidPermissionRecordAudio).c_str());
         return false;
     }
 
     String16 resolvedOpPackageName = resolveCallingPackage(
-            permissionController, opPackageName, uid);
+            permissionController, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                identity.packageName.value_or(""))), uid);
     if (resolvedOpPackageName.size() == 0) {
         return false;
     }
 
+
     AppOpsManager appOps;
     const int32_t op = getOpForSource(source);
     if (start) {
-        if (int32_t mode = appOps.startOpNoThrow(
-                        op, uid, resolvedOpPackageName, /*startIfModeDefault*/ false);
-                mode != AppOpsManager::MODE_ALLOWED) {
+        if (int32_t mode = appOps.startOpNoThrow(op, identity.uid,
+            resolvedOpPackageName, /*startIfModeDefault*/ false,
+            VALUE_OR_FATAL(aidl2legacy_optional_string_view_optional_String16(
+                identity.attributionTag)), msg) != AppOpsManager::MODE_ALLOWED) {
             ALOGE("Request start for \"%s\" (uid %d) denied by app op: %d, mode: %d",
-                    String8(resolvedOpPackageName).c_str(), uid, op, mode);
+                String8(resolvedOpPackageName).c_str(), identity.uid, op, mode);
             return false;
         }
     } else {
         // Always use OP_RECORD_AUDIO for checks at creation time.
-        if (int32_t mode = appOps.checkOp(op, uid, resolvedOpPackageName);
-                mode != AppOpsManager::MODE_ALLOWED) {
+        if (int32_t mode = appOps.checkOp(op, uid,
+            resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
             ALOGE("Request check for \"%s\" (uid %d) denied by app op: %d, mode: %d",
-                    String8(resolvedOpPackageName).c_str(), uid, op, mode);
+                String8(resolvedOpPackageName).c_str(), identity.uid, op, mode);
             return false;
         }
     }
@@ -122,22 +132,25 @@
     return true;
 }
 
-bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
-    return checkRecordingInternal(opPackageName, pid, uid, /*start*/ false, AUDIO_SOURCE_DEFAULT);
+bool recordingAllowed(const Identity& identity) {
+    return checkRecordingInternal(identity, String16(), /*start*/ false, AUDIO_SOURCE_DEFAULT);
 }
 
-bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid, audio_source_t source) {
-     return checkRecordingInternal(opPackageName, pid, uid, /*start*/ true, source);
+bool startRecording(const Identity& identity, const String16& msg, audio_source_t source) {
+     return checkRecordingInternal(identity, msg, /*start*/ true, source);
 }
 
-void finishRecording(const String16& opPackageName, uid_t uid, audio_source_t source) {
+void finishRecording(const Identity& identity, audio_source_t source) {
     // Okay to not track in app ops as audio server is us and if
     // device is rooted security model is considered compromised.
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
     if (isAudioServerOrRootUid(uid)) return;
 
     PermissionController permissionController;
     String16 resolvedOpPackageName = resolveCallingPackage(
-            permissionController, opPackageName, uid);
+            permissionController,
+            VALUE_OR_FATAL(aidl2legacy_string_view_String16(identity.packageName.value_or(""))),
+            VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid)));
     if (resolvedOpPackageName.size() == 0) {
         return;
     }
@@ -145,10 +158,14 @@
     AppOpsManager appOps;
 
     const int32_t op = getOpForSource(source);
-    appOps.finishOp(op, uid, resolvedOpPackageName);
+    appOps.finishOp(op, identity.uid, resolvedOpPackageName,
+        VALUE_OR_FATAL(aidl2legacy_optional_string_view_optional_String16(
+            identity.attributionTag)));
 }
 
-bool captureAudioOutputAllowed(pid_t pid, uid_t uid) {
+bool captureAudioOutputAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureAudioOutput("android.permission.CAPTURE_AUDIO_OUTPUT");
     bool ok = PermissionCache::checkPermission(sCaptureAudioOutput, pid, uid);
@@ -156,7 +173,9 @@
     return ok;
 }
 
-bool captureMediaOutputAllowed(pid_t pid, uid_t uid) {
+bool captureMediaOutputAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureMediaOutput("android.permission.CAPTURE_MEDIA_OUTPUT");
     bool ok = PermissionCache::checkPermission(sCaptureMediaOutput, pid, uid);
@@ -164,7 +183,9 @@
     return ok;
 }
 
-bool captureTunerAudioInputAllowed(pid_t pid, uid_t uid) {
+bool captureTunerAudioInputAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureTunerAudioInput("android.permission.CAPTURE_TUNER_AUDIO_INPUT");
     bool ok = PermissionCache::checkPermission(sCaptureTunerAudioInput, pid, uid);
@@ -172,7 +193,9 @@
     return ok;
 }
 
-bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid) {
+bool captureVoiceCommunicationOutputAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    uid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureVoiceCommOutput(
         "android.permission.CAPTURE_VOICE_COMMUNICATION_OUTPUT");
@@ -181,9 +204,11 @@
     return ok;
 }
 
-bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
+bool captureHotwordAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    uid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     // CAPTURE_AUDIO_HOTWORD permission implies RECORD_AUDIO permission
-    bool ok = recordingAllowed(opPackageName, pid, uid);
+    bool ok = recordingAllowed(identity);
 
     if (ok) {
         static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD");
@@ -205,11 +230,12 @@
 }
 
 bool modifyAudioRoutingAllowed() {
-    return modifyAudioRoutingAllowed(
-        IPCThreadState::self()->getCallingPid(), IPCThreadState::self()->getCallingUid());
+    return modifyAudioRoutingAllowed(getCallingIdentity());
 }
 
-bool modifyAudioRoutingAllowed(pid_t pid, uid_t uid) {
+bool modifyAudioRoutingAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
     // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
     bool ok = PermissionCache::checkPermission(sModifyAudioRouting, pid, uid);
@@ -219,11 +245,12 @@
 }
 
 bool modifyDefaultAudioEffectsAllowed() {
-    return modifyDefaultAudioEffectsAllowed(
-        IPCThreadState::self()->getCallingPid(), IPCThreadState::self()->getCallingUid());
+    return modifyDefaultAudioEffectsAllowed(getCallingIdentity());
 }
 
-bool modifyDefaultAudioEffectsAllowed(pid_t pid, uid_t uid) {
+bool modifyDefaultAudioEffectsAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
 
     static const String16 sModifyDefaultAudioEffectsAllowed(
@@ -244,14 +271,18 @@
     return ok;
 }
 
-bool modifyPhoneStateAllowed(pid_t pid, uid_t uid) {
+bool modifyPhoneStateAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     bool ok = PermissionCache::checkPermission(sModifyPhoneState, pid, uid);
     ALOGE_IF(!ok, "Request requires %s", String8(sModifyPhoneState).c_str());
     return ok;
 }
 
 // privileged behavior needed by Dialer, Settings, SetupWizard and CellBroadcastReceiver
-bool bypassInterruptionPolicyAllowed(pid_t pid, uid_t uid) {
+bool bypassInterruptionPolicyAllowed(const Identity& identity) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(identity.pid));
     static const String16 sWriteSecureSettings("android.permission.WRITE_SECURE_SETTINGS");
     bool ok = PermissionCache::checkPermission(sModifyPhoneState, pid, uid)
         || PermissionCache::checkPermission(sWriteSecureSettings, pid, uid)
@@ -261,6 +292,13 @@
     return ok;
 }
 
+Identity getCallingIdentity() {
+  Identity identity = Identity();
+  identity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
+  identity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+  return identity;
+}
+
 status_t checkIMemory(const sp<IMemory>& iMemory)
 {
     if (iMemory == 0) {
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 59d74de..819e146 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -39,10 +39,9 @@
     static std::atomic<int> curAudioHalPids = 0;
 
     if (update) {
-        audioHalPids[(curAudioHalPids + 1) % kNumAudioHalPidsVectors] = *pids;
-        curAudioHalPids++;
+        audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
     } else {
-        *pids = audioHalPids[curAudioHalPids];
+        *pids = audioHalPids[curAudioHalPids % kNumAudioHalPidsVectors];
     }
 }
 
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index 80882b2..b245834 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -10,11 +10,13 @@
 cc_defaults {
     name: "libmediautils_fuzzer_defaults",
     shared_libs: [
+        "libbatterystats_aidl",
         "libbinder",
         "libcutils",
         "liblog",
         "libmediautils",
         "libutils",
+        "media_permission-aidl-cpp",
     ],
 
     cflags: [
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 4521853..130feee 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 #define LOG_TAG "BatteryNotifierFuzzer"
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
 #include <binder/IServiceManager.h>
 #include <utils/String16.h>
 #include <android/log.h>
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
index f4c815c..2f9e780 100644
--- a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -19,12 +19,15 @@
 #include <functional>
 #include  <type_traits>
 
+#include <android/media/permission/Identity.h>
 #include "fuzzer/FuzzedDataProvider.h"
 #include "mediautils/ServiceUtilities.h"
 
 static constexpr int kMaxOperations = 50;
 static constexpr int kMaxStringLen = 256;
 
+using android::media::permission::Identity;
+
 const std::vector<std::function<void(FuzzedDataProvider*, android::MediaPackageManager)>>
     operations = {
         [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
@@ -43,27 +46,33 @@
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider data_provider(data, size);
-    uid_t uid = data_provider.ConsumeIntegral<uid_t>();
-    pid_t pid = data_provider.ConsumeIntegral<pid_t>();
+    int32_t uid = data_provider.ConsumeIntegral<int32_t>();
+    int32_t pid = data_provider.ConsumeIntegral<int32_t>();
     audio_source_t source = static_cast<audio_source_t>(data_provider
         .ConsumeIntegral<std::underlying_type_t<audio_source_t>>());
 
+    std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+    std::string msgStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+    android::String16 msgStr16(packageNameStr.c_str());
+    Identity identity;
+    identity.packageName = packageNameStr;
+    identity.uid = uid;
+    identity.pid = pid;
+
     // There is not state here, and order is not significant,
     // so we can simply call all of the target functions
     android::isServiceUid(uid);
     android::isAudioServerUid(uid);
     android::isAudioServerOrSystemServerUid(uid);
     android::isAudioServerOrMediaServerUid(uid);
-    std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
-    android::String16 opPackageName(packageNameStr.c_str());
-    android::recordingAllowed(opPackageName, pid, uid);
-    android::startRecording(opPackageName, pid, uid, source);
-    android::finishRecording(opPackageName, uid, source);
-    android::captureAudioOutputAllowed(pid, uid);
-    android::captureMediaOutputAllowed(pid, uid);
-    android::captureHotwordAllowed(opPackageName, pid, uid);
-    android::modifyPhoneStateAllowed(uid, pid);
-    android::bypassInterruptionPolicyAllowed(uid, pid);
+    android::recordingAllowed(identity);
+    android::startRecording(identity, msgStr16, source);
+    android::finishRecording(identity, source);
+    android::captureAudioOutputAllowed(identity);
+    android::captureMediaOutputAllowed(identity);
+    android::captureHotwordAllowed(identity);
+    android::modifyPhoneStateAllowed(identity);
+    android::bypassInterruptionPolicyAllowed(identity);
     android::settingsAllowed();
     android::modifyAudioRoutingAllowed();
     android::modifyDefaultAudioEffectsAllowed();
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index a4e42ad..3812d7a 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -17,7 +17,7 @@
 #ifndef MEDIA_BATTERY_NOTIFIER_H
 #define MEDIA_BATTERY_NOTIFIER_H
 
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
 #include <utils/Singleton.h>
 #include <utils/String8.h>
 
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 03965db..9a3c6fb 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -25,6 +25,7 @@
 #include <cutils/multiuser.h>
 #include <private/android_filesystem_config.h>
 #include <system/audio-hal-enums.h>
+#include <android/media/permission/Identity.h>
 
 #include <map>
 #include <optional>
@@ -79,22 +80,25 @@
     }
 }
 
-bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
-bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid, audio_source_t source);
-void finishRecording(const String16& opPackageName, uid_t uid, audio_source_t source);
-bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
-bool captureMediaOutputAllowed(pid_t pid, uid_t uid);
-bool captureTunerAudioInputAllowed(pid_t pid, uid_t uid);
-bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid);
-bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
+bool recordingAllowed(const media::permission::Identity& identity);
+bool startRecording(const media::permission::Identity& identity,
+    const String16& msg, audio_source_t source);
+void finishRecording(const media::permission::Identity& identity, audio_source_t source);
+bool captureAudioOutputAllowed(const media::permission::Identity& identity);
+bool captureMediaOutputAllowed(const media::permission::Identity& identity);
+bool captureTunerAudioInputAllowed(const media::permission::Identity& identity);
+bool captureVoiceCommunicationOutputAllowed(const media::permission::Identity& identity);
+bool captureHotwordAllowed(const media::permission::Identity& identity);
 bool settingsAllowed();
 bool modifyAudioRoutingAllowed();
-bool modifyAudioRoutingAllowed(pid_t pid, uid_t uid);
+bool modifyAudioRoutingAllowed(const media::permission::Identity& identity);
 bool modifyDefaultAudioEffectsAllowed();
-bool modifyDefaultAudioEffectsAllowed(pid_t pid, uid_t uid);
+bool modifyDefaultAudioEffectsAllowed(const media::permission::Identity& identity);
 bool dumpAllowed();
-bool modifyPhoneStateAllowed(pid_t pid, uid_t uid);
-bool bypassInterruptionPolicyAllowed(pid_t pid, uid_t uid);
+bool modifyPhoneStateAllowed(const media::permission::Identity& identity);
+bool bypassInterruptionPolicyAllowed(const media::permission::Identity& identity);
+
+media::permission::Identity getCallingIdentity();
 
 status_t checkIMemory(const sp<IMemory>& iMemory);
 
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index bb413c3..6593d56 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "media_synchronization_tests",
 
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 518ef9a..a7d47fb 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -74,12 +74,14 @@
         "libmediautils",
         "libnbaio",
         "libnblog",
+        "libpermission",
         "libpowermanager",
         "libmediautils",
         "libmemunreachable",
         "libmedia_helper",
         "libshmemcompat",
         "libvibrator",
+        "media_permission-aidl-cpp",
     ],
 
     static_libs: [
@@ -93,6 +95,11 @@
         "libmedia_headers",
     ],
 
+    export_shared_lib_headers: [
+        "libpermission",
+        "media_permission-aidl-cpp",
+    ],
+
     cflags: [
         "-DSTATE_QUEUE_INSTANTIATIONS=\"StateQueueInstantiations.cpp\"",
         "-fvisibility=hidden",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index dc2c171..7cdac30 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -85,15 +85,6 @@
 
 #include "TypedLogger.h"
 
-#define VALUE_OR_FATAL(result)                   \
-    ({                                           \
-       auto _tmp = (result);                     \
-       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),           \
-                           "Failed result (%d)", \
-                           _tmp.error());        \
-       std::move(_tmp.value());                  \
-     })
-
 // ----------------------------------------------------------------------------
 
 // Note: the following macro is used for extremely verbose logging message.  In
@@ -112,6 +103,7 @@
 namespace android {
 
 using media::IEffectClient;
+using media::permission::Identity;
 
 static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
 static const char kHardwareLockedString[] = "Hardware lock is taken\n";
@@ -207,10 +199,21 @@
       mDeviceEffectManager(this),
       mSystemReady(false)
 {
+    // Move the audio session unique ID generator start base as time passes to limit risk of
+    // generating the same ID again after an audioserver restart.
+    // This is important because clients will reuse previously allocated audio session IDs
+    // when reconnecting after an audioserver restart and newly allocated IDs may conflict with
+    // active clients.
+    // Moving the base by 1 for each elapsed second is a good compromise between avoiding overlap
+    // between allocation ranges and not reaching wrap around too soon.
+    timespec ts{};
+    clock_gettime(CLOCK_MONOTONIC, &ts);
+    // zero ID has a special meaning, so start allocation at least at AUDIO_UNIQUE_ID_USE_MAX
+    uint32_t sessionBase = (uint32_t)std::max((long)1, ts.tv_sec);
     // unsigned instead of audio_unique_id_use_t, because ++ operator is unavailable for enum
     for (unsigned use = AUDIO_UNIQUE_ID_USE_UNSPECIFIED; use < AUDIO_UNIQUE_ID_USE_MAX; use++) {
-        // zero ID has a special meaning, so unavailable
-        mNextUniqueIds[use] = AUDIO_UNIQUE_ID_USE_MAX;
+        mNextUniqueIds[use] =
+                ((use == AUDIO_UNIQUE_ID_USE_SESSION) ? sessionBase : 1) * AUDIO_UNIQUE_ID_USE_MAX;
     }
 
 #if 1
@@ -275,6 +278,21 @@
   return NO_ERROR;
 }
 
+status_t AudioFlinger::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    Mutex::Autolock _l(mLock);
+    mAudioVibratorInfos = vibratorInfos;
+    return NO_ERROR;
+}
+
+// getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
+const media::AudioVibratorInfo* AudioFlinger::getDefaultVibratorInfo_l() {
+    if (mAudioVibratorInfos.empty()) {
+        return nullptr;
+    }
+    return &mAudioVibratorInfos.front();
+}
+
 AudioFlinger::~AudioFlinger()
 {
     while (!mRecordThreads.isEmpty()) {
@@ -364,7 +382,7 @@
 
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
-                                            &streamType, client.clientPid, client.clientUid,
+                                            &streamType, client.identity,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
@@ -375,9 +393,7 @@
         ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
                                               actualSessionId,
-                                              client.clientPid,
-                                              client.clientUid,
-                                              client.packageName,
+                                              client.identity,
                                               config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
     }
@@ -413,13 +429,18 @@
 /* static */
 int AudioFlinger::onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
     sp<os::IExternalVibratorService> evs = getExternalVibratorService();
-    if (evs != 0) {
+    if (evs != nullptr) {
         int32_t ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
+            ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
             return ret;
         }
     }
+    ALOGD("%s, start external vibration with intensity as MUTE due to %s",
+            __func__,
+            evs == nullptr ? "external vibration service not found"
+                           : "error when querying intensity");
     return static_cast<int>(os::HapticScale::MUTE);
 }
 
@@ -776,27 +797,33 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     std::vector<audio_io_handle_t> secondaryOutputs;
 
-    bool updatePid = (input.clientInfo.clientPid == -1);
+    // TODO b/182392553: refactor or make clearer
+    pid_t clientPid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.identity.pid));
+    bool updatePid = (clientPid == (pid_t)-1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid = input.clientInfo.clientUid;
+    uid_t clientUid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.identity.uid));
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
     audio_attributes_t localAttr = input.attr;
 
+    Identity adjIdentity = input.clientInfo.identity;
     if (!isAudioServerOrMediaServerUid(callingUid)) {
         ALOGW_IF(clientUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, clientUid);
+        adjIdentity.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
         clientUid = callingUid;
         updatePid = true;
     }
-    pid_t clientPid = input.clientInfo.clientPid;
     const pid_t callingPid = IPCThreadState::self()->getCallingPid();
     if (updatePid) {
-        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
+        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
                  __func__, callingUid, callingPid, clientPid);
         clientPid = callingPid;
+        adjIdentity.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
 
     audio_session_t sessionId = input.sessionId;
@@ -811,7 +838,7 @@
     output.outputId = AUDIO_IO_HANDLE_NONE;
     output.selectedDeviceId = input.selectedDeviceId;
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
-                                            clientPid, clientUid, &input.config, input.flags,
+                                            adjIdentity, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
@@ -876,9 +903,8 @@
                                       &output.frameCount, &output.notificationFrameCount,
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
-                                      callingPid, input.clientInfo.clientTid, clientUid,
-                                      &lStatus, portId, input.audioTrackCallback,
-                                      input.opPackageName);
+                                      callingPid, adjIdentity, input.clientInfo.clientTid,
+                                      &lStatus, portId, input.audioTrackCallback);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -2036,23 +2062,25 @@
     output.buffers.clear();
     output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    bool updatePid = (input.clientInfo.clientPid == -1);
+    // TODO b/182392553: refactor or clean up
+    Identity adjIdentity = input.clientInfo.identity;
+    bool updatePid = (adjIdentity.pid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid = input.clientInfo.clientUid;
+    const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(adjIdentity.uid));
     if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(clientUid != callingUid,
+        ALOGW_IF(currentUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        clientUid = callingUid;
+                __FUNCTION__, callingUid, currentUid);
+        adjIdentity.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
         updatePid = true;
     }
-    pid_t clientPid = input.clientInfo.clientPid;
     const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjIdentity.pid));
     if (updatePid) {
-        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
+        ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        clientPid = callingPid;
+                 __func__, callingUid, callingPid, currentPid);
+        adjIdentity.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
 
     // we don't yet support anything other than linear PCM
@@ -2080,7 +2108,7 @@
     output.selectedDeviceId = input.selectedDeviceId;
     output.flags = input.flags;
 
-    client = registerPid(clientPid);
+    client = registerPid(VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(adjIdentity.pid)));
 
     // Not a conventional loop, but a retry loop for at most two iterations total.
     // Try first maybe with FAST flag then try again without FAST flag if that fails.
@@ -2100,9 +2128,7 @@
                                       input.riid,
                                       sessionId,
                                     // FIXME compare to AudioTrack
-                                      clientPid,
-                                      clientUid,
-                                      input.opPackageName,
+                                      adjIdentity,
                                       &input.config,
                                       output.flags, &output.selectedDeviceId, &portId);
     if (lStatus != NO_ERROR) {
@@ -2129,10 +2155,9 @@
                                                   input.config.format, input.config.channel_mask,
                                                   &output.frameCount, sessionId,
                                                   &output.notificationFrameCount,
-                                                  callingPid, clientUid, &output.flags,
+                                                  callingPid, adjIdentity, &output.flags,
                                                   input.clientInfo.clientTid,
-                                                  &lStatus, portId,
-                                                  input.opPackageName);
+                                                  &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
 
         // lStatus == BAD_TYPE means FAST flag was rejected: request a new input from
@@ -3525,9 +3550,7 @@
     const int32_t priority = request.priority;
     const AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioDeviceTypeAddress(request.device));
-    const String16 opPackageName = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_string_view_String16(request.opPackageName));
-    pid_t pid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(request.pid));
+    Identity adjIdentity = request.identity;
     const audio_session_t sessionId = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_session_t(request.sessionId));
     audio_io_handle_t io = VALUE_OR_RETURN_STATUS(
@@ -3543,17 +3566,21 @@
 
     status_t lStatus = NO_ERROR;
 
+    // TODO b/182392553: refactor or make clearer
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (pid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+    adjIdentity.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+    pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjIdentity.pid));
+    if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != -1 && pid != callingPid,
+        ALOGW_IF(currentPid != -1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, callingUid, callingPid, currentPid);
+        adjIdentity.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+        currentPid = callingPid;
     }
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
-          pid, effectClient.get(), priority, sessionId, io, mEffectsFactoryHal.get());
+          adjIdentity.pid, effectClient.get(), priority, sessionId, io, mEffectsFactoryHal.get());
 
     if (mEffectsFactoryHal == 0) {
         ALOGE("%s: no effects factory hal", __func__);
@@ -3581,7 +3608,7 @@
             goto Exit;
         }
     } else if (sessionId == AUDIO_SESSION_DEVICE) {
-        if (!modifyDefaultAudioEffectsAllowed(pid, callingUid)) {
+        if (!modifyDefaultAudioEffectsAllowed(adjIdentity)) {
             ALOGE("%s: device effect permission denied for uid %d", __func__, callingUid);
             lStatus = PERMISSION_DENIED;
             goto Exit;
@@ -3626,7 +3653,7 @@
         // check recording permission for visualizer
         if ((memcmp(&descOut.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) &&
             // TODO: Do we need to start/stop op - i.e. is there recording being performed?
-            !recordingAllowed(opPackageName, pid, callingUid)) {
+            !recordingAllowed(adjIdentity)) {
             lStatus = PERMISSION_DENIED;
             goto Exit;
         }
@@ -3652,7 +3679,7 @@
         Mutex::Autolock _l(mLock);
 
         if (sessionId == AUDIO_SESSION_DEVICE) {
-            sp<Client> client = registerPid(pid);
+            sp<Client> client = registerPid(currentPid);
             ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
             handle = mDeviceEffectManager.createEffect_l(
                     &descOut, device, client, effectClient, mPatchPanel.patches_l(),
@@ -3756,7 +3783,7 @@
             }
         }
 
-        sp<Client> client = registerPid(pid);
+        sp<Client> client = registerPid(currentPid);
 
         // create effect on selected output thread
         bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
@@ -4110,7 +4137,8 @@
         case TransactionCode::SET_MIC_MUTE:
         case TransactionCode::SET_LOW_RAM_DEVICE:
         case TransactionCode::SYSTEM_READY:
-        case TransactionCode::SET_AUDIO_HAL_PIDS: {
+        case TransactionCode::SET_AUDIO_HAL_PIDS:
+        case TransactionCode::SET_VIBRATOR_INFOS: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 1cfdffc..a980752 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -267,6 +267,8 @@
 
     virtual status_t setAudioHalPids(const std::vector<pid_t>& pids);
 
+    virtual status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
+
     status_t onPreTransact(TransactionCode code, const Parcel& data, uint32_t flags) override;
 
     // end of IAudioFlinger interface
@@ -296,6 +298,8 @@
     void updateDownStreamPatches_l(const struct audio_patch *patch,
                                    const std::set<audio_io_handle_t> streams);
 
+    const media::AudioVibratorInfo* getDefaultVibratorInfo_l();
+
 private:
     // FIXME The 400 is temporarily too high until a leak of writers in media.log is fixed.
     static const size_t kLogMemorySize = 400 * 1024;
@@ -971,6 +975,8 @@
     SimpleLog  mAppSetParameterLog;
     SimpleLog  mSystemSetParameterLog;
 
+    std::vector<media::AudioVibratorInfo> mAudioVibratorInfos;
+
     static inline constexpr const char *mMetricsId = AMEDIAMETRICS_KEY_AUDIO_FLINGER;
 };
 
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index 7e06096..d8565bd 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -173,22 +173,15 @@
     return status;
 }
 
-audio_format_t AudioStreamOut::getFormat() const
+audio_config_base_t AudioStreamOut::getAudioProperties() const
 {
-    audio_format_t result;
-    return stream->getFormat(&result) == OK ? result : AUDIO_FORMAT_INVALID;
-}
-
-uint32_t AudioStreamOut::getSampleRate() const
-{
-    uint32_t result;
-    return stream->getSampleRate(&result) == OK ? result : 0;
-}
-
-audio_channel_mask_t AudioStreamOut::getChannelMask() const
-{
-    audio_channel_mask_t result;
-    return stream->getChannelMask(&result) == OK ? result : AUDIO_CHANNEL_INVALID;
+    audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (stream->getAudioProperties(&result) != OK) {
+        result.sample_rate = 0;
+        result.channel_mask = AUDIO_CHANNEL_INVALID;
+        result.format = AUDIO_FORMAT_INVALID;
+    }
+    return result;
 }
 
 int AudioStreamOut::flush()
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
index 16fbcf2..565f43a 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/AudioStreamOut.h
@@ -81,22 +81,14 @@
     virtual size_t getFrameSize() const { return mHalFrameSize; }
 
     /**
-     * @return format from the perspective of the application and the AudioFlinger.
+     * @return audio stream configuration: channel mask, format, sample rate:
+     *   - channel mask from the perspective of the application and the AudioFlinger,
+     *     The HAL is in stereo mode when playing multi-channel compressed audio over HDMI;
+     *   - format from the perspective of the application and the AudioFlinger;
+     *   - sample rate from the perspective of the application and the AudioFlinger,
+     *     The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
      */
-    virtual audio_format_t getFormat() const;
-
-    /**
-     * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
-     * @return sample rate from the perspective of the application and the AudioFlinger.
-     */
-    virtual uint32_t getSampleRate() const;
-
-    /**
-     * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI.
-     * @return channel mask from the perspective of the application and the AudioFlinger.
-     */
-    virtual audio_channel_mask_t getChannelMask() const;
-
+    virtual audio_config_base_t getAudioProperties() const;
 
     virtual status_t flush();
     virtual status_t standby();
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 392d339..d75b13b 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -261,6 +261,12 @@
         }
         registered = mPolicyRegistered;
         enabled = mPolicyEnabled;
+        // The simultaneous release of two EffectHandles with the same EffectModule
+        // may cause us to call this method at the same time.
+        // This may deadlock under some circumstances (b/180941720).  Avoid this.
+        if (!doRegister && !(registered && doEnable)) {
+            return NO_ERROR;
+        }
         mPolicyLock.lock();
     }
     ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
@@ -1579,6 +1585,34 @@
     return status;
 }
 
+status_t AudioFlinger::EffectModule::setVibratorInfo(const media::AudioVibratorInfo* vibratorInfo)
+{
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    if (!isHapticGenerator()) {
+        ALOGW("Should not set vibrator info for effects that are not HapticGenerator");
+        return INVALID_OPERATION;
+    }
+
+    std::vector<uint8_t> request(
+            sizeof(effect_param_t) + sizeof(int32_t) + 2 * sizeof(float));
+    effect_param_t *param = (effect_param_t*) request.data();
+    param->psize = sizeof(int32_t);
+    param->vsize = 2 * sizeof(float);
+    *(int32_t*)param->data = HG_PARAM_VIBRATOR_INFO;
+    float* vibratorInfoPtr = reinterpret_cast<float*>(param->data + sizeof(int32_t));
+    vibratorInfoPtr[0] = vibratorInfo->resonantFrequency;
+    vibratorInfoPtr[1] = vibratorInfo->qFactor;
+    std::vector<uint8_t> response;
+    status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(response.size() != sizeof(status_t));
+        status = *reinterpret_cast<const status_t*>(response.data());
+    }
+    return status;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 8e82d53..9da95bc 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -258,6 +258,7 @@
     bool             isHapticGenerator() const;
 
     status_t         setHapticIntensity(int id, int intensity);
+    status_t         setVibratorInfo(const media::AudioVibratorInfo* vibratorInfo);
 
     void             dump(int fd, const Vector<String16>& args);
 
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index cd3c743..13e2ced 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -353,7 +353,8 @@
 #endif
         //ALOGD("Eric FastMixer::onWork() mIsWarm");
     } else {
-        dumpState->mTimestampVerifier.discontinuity();
+        dumpState->mTimestampVerifier.discontinuity(
+            dumpState->mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
         // See comment in if block.
 #ifdef FASTMIXER_LOG_HIST_TS
         LOG_AUDIO_STATE();
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index b83f6b5..ba868d7 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -29,8 +29,7 @@
                             audio_channel_mask_t channelMask,
                             audio_session_t sessionId,
                             bool isOut,
-                            uid_t uid,
-                            pid_t pid,
+                            const media::permission::Identity& identity,
                             pid_t creatorPid,
                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
     virtual             ~MmapTrack();
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index c70d6f9..2436248 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -15,6 +15,8 @@
 ** limitations under the License.
 */
 
+#include <android/media/permission/Identity.h>
+
 #ifndef INCLUDING_FROM_AUDIOFLINGER_H
     #error This header file should only be included from AudioFlinger.h
 #endif
@@ -26,11 +28,13 @@
     bool hasOpPlayAudio() const;
 
     static sp<OpPlayAudioMonitor> createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType,
-            const std::string& opPackageName);
+            const android::media::permission::Identity& identity,
+            const audio_attributes_t& attr, int id,
+            audio_stream_type_t streamType);
 
 private:
-    OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id, const String16& opPackageName);
+    OpPlayAudioMonitor(const android::media::permission::Identity& identity,
+        audio_usage_t usage, int id);
     void onFirstRef() override;
     static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
 
@@ -50,10 +54,9 @@
     void checkPlayAudioForUsage();
 
     std::atomic_bool mHasOpPlayAudio;
-    const uid_t mUid;
+    const android::media::permission::Identity mIdentity;
     const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as int32_t
     const int mId; // for logging purposes only
-    const String16 mOpPackageName;
 };
 
 // playback track
@@ -72,14 +75,13 @@
                                 const sp<IMemory>& sharedBuffer,
                                 audio_session_t sessionId,
                                 pid_t creatorPid,
-                                uid_t uid,
+                                const media::permission::Identity& identity,
                                 audio_output_flags_t flags,
                                 track_type type,
                                 audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
                                 /** default behaviour is to start when there are as many frames
                                   * ready as possible (aka. Buffer is full). */
-                                size_t frameCountToBeReady = SIZE_MAX,
-                                const std::string opPackageName = "");
+                                size_t frameCountToBeReady = SIZE_MAX);
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -217,6 +219,10 @@
     void flushAck();
     bool isResumePending();
     void resumeAck();
+    // For direct or offloaded tracks ensure that the pause state is acknowledged
+    // by the playback thread in case of an immediate flush.
+    bool isPausePending() const { return mPauseHwPending; }
+    void pauseAck();
     void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
             uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
 
@@ -284,8 +290,6 @@
     };
     sp<AudioVibrationController> mAudioVibrationController;
     sp<os::ExternalVibration>    mExternalVibration;
-    /** How many frames should be in the buffer before the track is considered ready */
-    const size_t        mFrameCountToBeReady;
 
     audio_dual_mono_mode_t mDualMonoMode = AUDIO_DUAL_MONO_MODE_OFF;
     float               mAudioDescriptionMixLevel = -std::numeric_limits<float>::infinity();
@@ -314,6 +318,7 @@
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
+    bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag     mChangeNotified = ATOMIC_FLAG_INIT;
@@ -336,7 +341,7 @@
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
                                 size_t frameCount,
-                                uid_t uid);
+                                const android::media::permission::Identity& identity);
     virtual             ~OutputTrack();
 
     virtual status_t    start(AudioSystem::sync_event_t event =
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index d87239d..5f248e1 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -26,10 +26,10 @@
     bool hasOpRecordAudio() const;
 
     static sp<OpRecordAudioMonitor> createIfNeeded
-        (uid_t uid, const audio_attributes_t& attr, const String16& opPackageName);
+        (const media::permission::Identity& identity, const audio_attributes_t& attr);
 
 private:
-    OpRecordAudioMonitor(uid_t uid, const String16& opPackageName);
+    explicit OpRecordAudioMonitor(const media::permission::Identity& identity);
     void onFirstRef() override;
 
     AppOpsManager mAppOpsManager;
@@ -49,8 +49,7 @@
     void checkRecordAudio();
 
     std::atomic_bool mHasOpRecordAudio;
-    const uid_t mUid;
-    const String16 mPackage;
+    const media::permission::Identity mIdentity;
 };
 
 // record track
@@ -67,10 +66,9 @@
                                 size_t bufferSize,
                                 audio_session_t sessionId,
                                 pid_t creatorPid,
-                                uid_t uid,
+                                const media::permission::Identity& identity,
                                 audio_input_flags_t flags,
                                 track_type type,
-                                const String16& opPackageName,
                                 audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
     virtual             ~RecordTrack();
     virtual status_t    initCheck() const;
@@ -149,7 +147,6 @@
 
             // used to enforce OP_RECORD_AUDIO
             uid_t                              mUid;
-            String16                           mOpPackageName;
             sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
 };
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index eaf0d10..997f24a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -108,6 +108,7 @@
 
 // TODO: Move these macro/inlines to a header file.
 #define max(a, b) ((a) > (b) ? (a) : (b))
+
 template <typename T>
 static inline T min(const T& a, const T& b)
 {
@@ -117,6 +118,7 @@
 namespace android {
 
 using media::IEffectClient;
+using media::permission::Identity;
 
 // retry counts for buffer fill timeout
 // 50 * ~20msecs = 1 second
@@ -624,7 +626,7 @@
     mIoJitterMs.reset();
     mLatencyMs.reset();
     mProcessTimeMs.reset();
-    mTimestampVerifier.discontinuity();
+    mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
 
     sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
     sendConfigEvent_l(configEvent);
@@ -1437,6 +1439,16 @@
             effect->setMode(mAudioFlinger->getMode());
             effect->setAudioSource(mAudioSource);
         }
+        if (effect->isHapticGenerator()) {
+            // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
+            // for the HapticGenerator.
+            const media::AudioVibratorInfo* defaultVibratorInfo =
+                    mAudioFlinger->getDefaultVibratorInfo_l();
+            if (defaultVibratorInfo != nullptr) {
+                // Only set the vibrator info when it is a valid one.
+                effect->setVibratorInfo(defaultVibratorInfo);
+            }
+        }
         // create effect handle and connect it to effect module
         handle = new EffectHandle(effect, client, effectClient, priority);
         lStatus = handle->initCheck();
@@ -2076,12 +2088,11 @@
         audio_session_t sessionId,
         audio_output_flags_t *flags,
         pid_t creatorPid,
+        const Identity& identity,
         pid_t tid,
-        uid_t uid,
         status_t *status,
         audio_port_handle_t portId,
-        const sp<media::IAudioTrackCallback>& callback,
-        const std::string& opPackageName)
+        const sp<media::IAudioTrackCallback>& callback)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2172,8 +2183,8 @@
                 "sampleRate=%u mSampleRate=%u "
                 "hasFastMixer=%d tid=%d fastTrackAvailMask=%#x",
                 sharedBuffer.get(), frameCount, mFrameCount, format, mFormat,
-                audio_is_linear_pcm(format),
-                channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask);
+                audio_is_linear_pcm(format), channelMask, sampleRate,
+                mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask);
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
       }
     }
@@ -2372,8 +2383,8 @@
         track = new Track(this, client, streamType, attr, sampleRate, format,
                           channelMask, frameCount,
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
-                          sessionId, creatorPid, uid, trackFlags, TrackBase::TYPE_DEFAULT, portId,
-                          SIZE_MAX /*frameCountToBeReady*/, opPackageName);
+                          sessionId, creatorPid, identity, trackFlags, TrackBase::TYPE_DEFAULT,
+                          portId, SIZE_MAX /*frameCountToBeReady*/);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2747,7 +2758,7 @@
         // the timestamp frame position to reset to 0 for direct and offload threads.
         // (Out of sequence requests are ignored, since the discontinuity would be handled
         // elsewhere, e.g. in flush).
-        mTimestampVerifier.discontinuity();
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
         mDrainSequence &= ~1;
         mWaitWorkCV.signal();
     }
@@ -2756,8 +2767,9 @@
 void AudioFlinger::PlaybackThread::readOutputParameters_l()
 {
     // unfortunately we have no way of recovering from errors here, hence the LOG_ALWAYS_FATAL
-    mSampleRate = mOutput->getSampleRate();
-    mChannelMask = mOutput->getChannelMask();
+    const audio_config_base_t audioConfig = mOutput->getAudioProperties();
+    mSampleRate = audioConfig.sample_rate;
+    mChannelMask = audioConfig.channel_mask;
     if (!audio_is_output_channel(mChannelMask)) {
         LOG_ALWAYS_FATAL("HAL channel mask %#x not valid for output", mChannelMask);
     }
@@ -2770,11 +2782,11 @@
     mBalance.setChannelMask(mChannelMask);
 
     // Get actual HAL format.
-    status_t result = mOutput->stream->getFormat(&mHALFormat);
+    status_t result = mOutput->stream->getAudioProperties(nullptr, nullptr, &mHALFormat);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving output stream format: %d", result);
     // Get format from the shim, which will be different than the HAL format
     // if playing compressed audio over HDMI passthrough.
-    mFormat = mOutput->getFormat();
+    mFormat = audioConfig.format;
     if (!audio_is_valid_format(mFormat)) {
         LOG_ALWAYS_FATAL("HAL format %#x not valid for output", mFormat);
     }
@@ -3439,7 +3451,6 @@
 
     mStandbyTimeNs = systemTime();
     int64_t lastLoopCountWritten = -2; // never matches "previous" loop, when loopCount = 0.
-    int64_t lastFramesWritten = -1;    // track changes in timestamp server frames written
 
     // MIXER
     nsecs_t lastWarning = 0;
@@ -3475,14 +3486,6 @@
 
     checkSilentMode_l();
 
-    // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
-    // TODO: add confirmation checks:
-    // 1) DIRECT threads and linear PCM format really resets to 0?
-    // 2) Is frame count really valid if not linear pcm?
-    // 3) Are all 64 bits of position returned, not just lowest 32 bits?
-    if (mType == OFFLOAD || mType == DIRECT) {
-        mTimestampVerifier.setDiscontinuityMode(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
-    }
     audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
 
     // loopCount is used for statistics and diagnostics.
@@ -3554,135 +3557,8 @@
                 logString = NULL;
             }
 
-            // Collect timestamp statistics for the Playback Thread types that support it.
-            if (mType == MIXER
-                    || mType == DUPLICATING
-                    || mType == DIRECT
-                    || mType == OFFLOAD) { // no indentation
-            // Gather the framesReleased counters for all active tracks,
-            // and associate with the sink frames written out.  We need
-            // this to convert the sink timestamp to the track timestamp.
-            bool kernelLocationUpdate = false;
-            ExtendedTimestamp timestamp; // use private copy to fetch
-            if (mStandby) {
-                mTimestampVerifier.discontinuity();
-            } else if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
-                mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                        mSampleRate);
+            collectTimestamps_l();
 
-                if (isTimestampCorrectionEnabled()) {
-                    ALOGVV("TS_BEFORE: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-                    auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
-                    timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mFrames;
-                    timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mTimeNs;
-                    ALOGVV("TS_AFTER: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-
-                    // Note: Downstream latency only added if timestamp correction enabled.
-                    if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
-                        const int64_t newPosition =
-                                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                        // prevent retrograde
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
-                                newPosition,
-                                (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                        - mSuspendedFrames));
-                    }
-                }
-
-                // We always fetch the timestamp here because often the downstream
-                // sink will block while writing.
-
-                // We keep track of the last valid kernel position in case we are in underrun
-                // and the normal mixer period is the same as the fast mixer period, or there
-                // is some error from the HAL.
-                if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
-                }
-
-                if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    kernelLocationUpdate = true;
-                } else {
-                    ALOGVV("getTimestamp error - no valid kernel position");
-                }
-
-                // copy over kernel info
-                mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                        + mSuspendedFrames; // add frames discarded when suspended
-                mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-            } else {
-                mTimestampVerifier.error();
-            }
-
-            // mFramesWritten for non-offloaded tracks are contiguous
-            // even after standby() is called. This is useful for the track frame
-            // to sink frame mapping.
-            bool serverLocationUpdate = false;
-            if (mFramesWritten != lastFramesWritten) {
-                serverLocationUpdate = true;
-                lastFramesWritten = mFramesWritten;
-            }
-            // Only update timestamps if there is a meaningful change.
-            // Either the kernel timestamp must be valid or we have written something.
-            if (kernelLocationUpdate || serverLocationUpdate) {
-                if (serverLocationUpdate) {
-                    // use the time before we called the HAL write - it is a bit more accurate
-                    // to when the server last read data than the current time here.
-                    //
-                    // If we haven't written anything, mLastIoBeginNs will be -1
-                    // and we use systemTime().
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
-                            ? systemTime() : mLastIoBeginNs;
-                }
-
-                for (const sp<Track> &t : mActiveTracks) {
-                    if (!t->isFastTrack()) {
-                        t->updateTrackFrameInfo(
-                                t->mAudioTrackServerProxy->framesReleased(),
-                                mFramesWritten,
-                                mSampleRate,
-                                mTimestamp);
-                    }
-                }
-            }
-
-            if (audio_has_proportional_frames(mFormat)) {
-                const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
-                if (latencyMs != 0.) { // note 0. means timestamp is empty.
-                    mLatencyMs.add(latencyMs);
-                }
-            }
-
-            } // if (mType ... ) { // no indentation
-#if 0
-            // logFormat example
-            if (z % 100 == 0) {
-                timespec ts;
-                clock_gettime(CLOCK_MONOTONIC, &ts);
-                LOGT("This is an integer %d, this is a float %f, this is my "
-                    "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
-                LOGT("A deceptive null-terminated string %\0");
-            }
-            ++z;
-#endif
             saveOutputTracks();
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
@@ -4128,6 +4004,148 @@
     return false;
 }
 
+void AudioFlinger::PlaybackThread::collectTimestamps_l()
+{
+    // Collect timestamp statistics for the Playback Thread types that support it.
+    if (mType != MIXER
+            && mType != DUPLICATING
+            && mType != DIRECT
+            && mType != OFFLOAD) {
+        return;
+    }
+    if (mStandby) {
+        mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
+        return;
+    } else if (mHwPaused) {
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
+        return;
+    }
+
+    // Gather the framesReleased counters for all active tracks,
+    // and associate with the sink frames written out.  We need
+    // this to convert the sink timestamp to the track timestamp.
+    bool kernelLocationUpdate = false;
+    ExtendedTimestamp timestamp; // use private copy to fetch
+
+    // Always query HAL timestamp and update timestamp verifier. In standby or pause,
+    // HAL may be draining some small duration buffered data for fade out.
+    if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
+        mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                mSampleRate);
+
+        if (isTimestampCorrectionEnabled()) {
+            ALOGVV("TS_BEFORE: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+            auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
+            timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mFrames;
+            timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mTimeNs;
+            ALOGVV("TS_AFTER: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+
+            // Note: Downstream latency only added if timestamp correction enabled.
+            if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
+                const int64_t newPosition =
+                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                        - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
+                // prevent retrograde
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
+                        newPosition,
+                        (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                                - mSuspendedFrames));
+            }
+        }
+
+        // We always fetch the timestamp here because often the downstream
+        // sink will block while writing.
+
+        // We keep track of the last valid kernel position in case we are in underrun
+        // and the normal mixer period is the same as the fast mixer period, or there
+        // is some error from the HAL.
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+        }
+
+        if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            kernelLocationUpdate = true;
+        } else {
+            ALOGVV("getTimestamp error - no valid kernel position");
+        }
+
+        // copy over kernel info
+        mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                + mSuspendedFrames; // add frames discarded when suspended
+        mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+    } else {
+        mTimestampVerifier.error();
+    }
+
+    // mFramesWritten for non-offloaded tracks are contiguous
+    // even after standby() is called. This is useful for the track frame
+    // to sink frame mapping.
+    bool serverLocationUpdate = false;
+    if (mFramesWritten != mLastFramesWritten) {
+        serverLocationUpdate = true;
+        mLastFramesWritten = mFramesWritten;
+    }
+    // Only update timestamps if there is a meaningful change.
+    // Either the kernel timestamp must be valid or we have written something.
+    if (kernelLocationUpdate || serverLocationUpdate) {
+        if (serverLocationUpdate) {
+            // use the time before we called the HAL write - it is a bit more accurate
+            // to when the server last read data than the current time here.
+            //
+            // If we haven't written anything, mLastIoBeginNs will be -1
+            // and we use systemTime().
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
+                    ? systemTime() : mLastIoBeginNs;
+        }
+
+        for (const sp<Track> &t : mActiveTracks) {
+            if (!t->isFastTrack()) {
+                t->updateTrackFrameInfo(
+                        t->mAudioTrackServerProxy->framesReleased(),
+                        mFramesWritten,
+                        mSampleRate,
+                        mTimestamp);
+            }
+        }
+    }
+
+    if (audio_has_proportional_frames(mFormat)) {
+        const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
+        if (latencyMs != 0.) { // note 0. means timestamp is empty.
+            mLatencyMs.add(latencyMs);
+        }
+    }
+#if 0
+    // logFormat example
+    if (z % 100 == 0) {
+        timespec ts;
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+        LOGT("This is an integer %d, this is a float %f, this is my "
+            "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
+        LOGT("A deceptive null-terminated string %\0");
+    }
+    ++z;
+#endif
+}
+
 // removeTracks_l() must be called with ThreadBase::mLock held
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
 {
@@ -4181,20 +4199,15 @@
         return status;
     }
     if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL) {
-        uint64_t position64;
-        if (mOutput->getPresentationPosition(&position64, &timestamp.mTime) == OK) {
-            timestamp.mPosition = (uint32_t)position64;
-            if (mDownstreamLatencyStatMs.getN() > 0) {
-                const uint32_t positionOffset =
-                    (uint32_t)(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                if (positionOffset > timestamp.mPosition) {
-                    timestamp.mPosition = 0;
-                } else {
-                    timestamp.mPosition -= positionOffset;
-                }
-            }
-            return NO_ERROR;
+        collectTimestamps_l();
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] <= 0) {
+            return INVALID_OPERATION;
         }
+        timestamp.mPosition = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+        const int64_t timeNs = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+        timestamp.mTime.tv_sec = timeNs / NANOS_PER_SECOND;
+        timestamp.mTime.tv_nsec = timeNs - (timestamp.mTime.tv_sec * NANOS_PER_SECOND);
+        return NO_ERROR;
     }
     return INVALID_OPERATION;
 }
@@ -5595,8 +5608,6 @@
                                                        status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AutoPark<FastMixer> park(mFastMixer);
@@ -5668,7 +5679,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 
@@ -5869,8 +5880,15 @@
         sp<Track> l = mActiveTracks.getLatest();
         bool last = l.get() == track;
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (mHwSupportsPause && last && !mHwPaused) {
                 doHwPause = true;
                 mHwPaused = true;
@@ -6129,8 +6147,6 @@
                                                               status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AudioParameter param = AudioParameter(keyValuePair);
@@ -6165,7 +6181,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const
@@ -6222,7 +6238,7 @@
     mOutput->flush();
     mHwPaused = false;
     mFlushPending = false;
-    mTimestampVerifier.discontinuity(); // DIRECT and OFFLOADED flush resets frame count.
+    mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
 }
 
@@ -6414,8 +6430,15 @@
             continue;
         }
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause if last, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (last) {
                 if (mHwSupportsPause && !mHwPaused) {
                     doHwPause = true;
@@ -6558,13 +6581,14 @@
                     track->presentationComplete(framesWritten, audioHALFrames);
                     track->reset();
                     tracksToRemove->add(track);
-                    // DIRECT and OFFLOADED stop resets frame counts.
+                    // OFFLOADED stop resets frame counts.
                     if (!mUseAsyncWrite) {
                         // If we don't get explicit drain notification we must
                         // register discontinuity regardless of whether this is
                         // the previous (!last) or the upcoming (last) track
                         // to avoid skipping the discontinuity.
-                        mTimestampVerifier.discontinuity();
+                        mTimestampVerifier.discontinuity(
+                                mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
                     }
                 }
             } else {
@@ -6812,13 +6836,19 @@
     // from different OutputTracks and their associated MixerThreads (e.g. one may
     // nearly empty and the other may be dropping data).
 
+    // TODO b/182392769: use identity util, move to server edge
+    Identity identity = Identity();
+    identity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
+        IPCThreadState::self()->getCallingUid()));
+    identity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(
+      IPCThreadState::self()->getCallingPid()));
     sp<OutputTrack> outputTrack = new OutputTrack(thread,
                                             this,
                                             mSampleRate,
                                             mFormat,
                                             mChannelMask,
                                             frameCount,
-                                            IPCThreadState::self()->getCallingUid());
+                                            identity);
     status_t status = outputTrack != 0 ? outputTrack->initCheck() : (status_t) NO_MEMORY;
     if (status != NO_ERROR) {
         ALOGE("addOutputTrack() initCheck failed %d", status);
@@ -7422,7 +7452,9 @@
         if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
             int64_t position, time;
             if (mStandby) {
-                mTimestampVerifier.discontinuity();
+                mTimestampVerifier.discontinuity(audio_is_linear_pcm(mFormat) ?
+                    mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS :
+                    mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
             } else if (mSource->getCapturePosition(&position, &time) == NO_ERROR
                     && time > mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]) {
 
@@ -7730,12 +7762,11 @@
         audio_session_t sessionId,
         size_t *pNotificationFrameCount,
         pid_t creatorPid,
-        uid_t uid,
+        const Identity& identity,
         audio_input_flags_t *flags,
         pid_t tid,
         status_t *status,
-        audio_port_handle_t portId,
-        const String16& opPackageName)
+        audio_port_handle_t portId)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -7868,8 +7899,8 @@
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
-                      nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid, uid,
-                      *flags, TrackBase::TYPE_DEFAULT, opPackageName, portId);
+                      nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
+                      identity, *flags, TrackBase::TYPE_DEFAULT, portId);
 
         lStatus = track->initCheck();
         if (lStatus != NO_ERROR) {
@@ -8077,6 +8108,9 @@
 {
     ALOGV("RecordThread::getActiveMicrophones");
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     status_t status = mInput->stream->getActiveMicrophones(activeMicrophones);
     return status;
 }
@@ -8086,6 +8120,9 @@
 {
     ALOGV("setPreferredMicrophoneDirection(%d)", direction);
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneDirection(direction);
 }
 
@@ -8093,6 +8130,9 @@
 {
     ALOGV("setPreferredMicrophoneFieldDimension(%f)", zoom);
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneFieldDimension(zoom);
 }
 
@@ -8418,13 +8458,11 @@
         }
         if (reconfig) {
             if (status == BAD_VALUE) {
-                uint32_t sRate;
-                audio_channel_mask_t channelMask;
-                audio_format_t format;
-                if (mInput->stream->getAudioProperties(&sRate, &channelMask, &format) == OK &&
-                        audio_is_linear_pcm(format) && audio_is_linear_pcm(reqFormat) &&
-                        sRate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
-                        audio_channel_count_from_in_mask(channelMask) <= FCC_8) {
+                audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+                if (mInput->stream->getAudioProperties(&config) == OK &&
+                        audio_is_linear_pcm(config.format) && audio_is_linear_pcm(reqFormat) &&
+                        config.sample_rate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
+                        audio_channel_count_from_in_mask(config.channel_mask) <= FCC_8) {
                     status = NO_ERROR;
                 }
             }
@@ -8884,7 +8922,7 @@
                                          audio_port_handle_t *handle)
 {
     ALOGV("%s clientUid %d mStandby %d mPortId %d *handle %d", __FUNCTION__,
-          client.clientUid, mStandby, mPortId, *handle);
+          client.identity.uid, mStandby, mPortId, *handle);
     if (mHalStream == 0) {
         return NO_INIT;
     }
@@ -8916,8 +8954,7 @@
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
-                                            client.clientPid,
-                                            client.clientUid,
+                                            client.identity,
                                             &config,
                                             flags,
                                             &deviceId,
@@ -8934,9 +8971,7 @@
         ret = AudioSystem::getInputForAttr(&mAttr, &io,
                                               RECORD_RIID_INVALID,
                                               mSessionId,
-                                              client.clientPid,
-                                              client.clientUid,
-                                              client.packageName,
+                                              client.identity,
                                               &config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ,
                                               &deviceId,
@@ -8976,16 +9011,15 @@
 
     // Given that MmapThread::mAttr is mutable, should a MmapTrack have attributes ?
     sp<MmapTrack> track = new MmapTrack(this, attr == nullptr ? mAttr : *attr, mSampleRate, mFormat,
-                                        mChannelMask, mSessionId, isOutput(), client.clientUid,
-                                        client.clientPid, IPCThreadState::self()->getCallingPid(),
-                                        portId);
+                                        mChannelMask, mSessionId, isOutput(), client.identity,
+                                        IPCThreadState::self()->getCallingPid(), portId);
 
     if (isOutput()) {
         // force volume update when a new track is added
         mHalVolFloat = -1.0f;
     } else if (!track->isSilenced_l()) {
         for (const sp<MmapTrack> &t : mActiveTracks) {
-            if (t->isSilenced_l() && t->uid() != client.clientUid)
+            if (t->isSilenced_l() && t->uid() != client.identity.uid)
                 t->invalidate();
         }
     }
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index c974252..e63642b 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -612,6 +612,11 @@
                 ExtendedTimestamp       mTimestamp;
                 TimestampVerifier< // For timestamp statistics.
                         int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
+                // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
+                // TODO: add confirmation checks:
+                // 1) DIRECT threads and linear PCM format really resets to 0?
+                // 2) Is frame count really valid if not linear pcm?
+                // 3) Are all 64 bits of position returned, not just lowest 32 bits?
                 // Timestamp corrected device should be a single device.
                 audio_devices_t         mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;
 
@@ -882,12 +887,11 @@
                                 audio_session_t sessionId,
                                 audio_output_flags_t *flags,
                                 pid_t creatorPid,
+                                const media::permission::Identity& identity,
                                 pid_t tid,
-                                uid_t uid,
                                 status_t *status /*non-NULL*/,
                                 audio_port_handle_t portId,
-                                const sp<media::IAudioTrackCallback>& callback,
-                                const std::string& opPackageName);
+                                const sp<media::IAudioTrackCallback>& callback);
 
                 AudioStreamOut* getOutput() const;
                 AudioStreamOut* clearOutput();
@@ -1052,6 +1056,8 @@
 
     int64_t                         mBytesWritten;
     int64_t                         mFramesWritten; // not reset on standby
+    int64_t                         mLastFramesWritten = -1; // track changes in timestamp
+                                                             // server frames written.
     int64_t                         mSuspendedFrames; // not reset on standby
 
     // mHapticChannelMask and mHapticChannelCount will only be valid when the thread support
@@ -1064,6 +1070,14 @@
     // copy rather than the one in AudioFlinger.  This optimization saves a lock.
     bool                            mMasterMute;
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
+
+                auto discontinuityForStandbyOrFlush() const { // call on threadLoop or with lock.
+                    return ((mType == DIRECT && !audio_is_linear_pcm(mFormat))
+                                    || mType == OFFLOAD)
+                            ? mTimestampVerifier.DISCONTINUITY_MODE_ZERO
+                            : mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS;
+                }
+
 protected:
     ActiveTracks<Track>     mActiveTracks;
 
@@ -1115,6 +1129,8 @@
     void        updateMetadata_l() final;
     virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
 
+    void        collectTimestamps_l();
+
     // The Tracks class manages tracks added and removed from the Thread.
     template <typename T>
     class Tracks {
@@ -1642,12 +1658,11 @@
                     audio_session_t sessionId,
                     size_t *pNotificationFrameCount,
                     pid_t creatorPid,
-                    uid_t uid,
+                    const media::permission::Identity& identity,
                     audio_input_flags_t *flags,
                     pid_t tid,
                     status_t *status /*non-NULL*/,
-                    audio_port_handle_t portId,
-                    const String16& opPackageName);
+                    audio_port_handle_t portId);
 
             status_t    start(RecordTrack* recordTrack,
                               AudioSystem::sync_event_t event,
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index af16448..7fb69be 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -67,7 +67,7 @@
         mIntervalStartTimeNs = systemTime();
     }
 
-    void logConstructor(pid_t creatorPid, uid_t creatorUid,
+    void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
             const std::string& traits = {},
             audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT) const {
         // Once this item is logged by the server, the client can add properties.
@@ -78,6 +78,7 @@
             .set(AMEDIAMETRICS_PROP_ALLOWUID, (int32_t)creatorUid)
             .set(AMEDIAMETRICS_PROP_EVENT,
                     AMEDIAMETRICS_PROP_PREFIX_SERVER AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+            .set(AMEDIAMETRICS_PROP_INTERNALTRACKID, internalTrackId)
             .set(AMEDIAMETRICS_PROP_TRAITS, traits);
         // log streamType from the service, since client doesn't know chosen streamType.
         if (streamType != AUDIO_STREAM_DEFAULT) {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 86c92ea..21651af 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -65,6 +65,7 @@
 
 using ::android::aidl_utils::binderStatusFromStatusT;
 using binder::Status;
+using media::permission::Identity;
 using media::VolumeShaper;
 // ----------------------------------------------------------------------------
 //      TrackBase
@@ -237,6 +238,14 @@
     }
 }
 
+// TODO b/182392769: use identity util
+static Identity audioServerIdentity(pid_t pid) {
+   Identity i{};
+   i.uid = AID_AUDIOSERVER;
+   i.pid = pid;
+   return i;
+}
+
 status_t AudioFlinger::ThreadBase::TrackBase::initCheck() const
 {
     status_t status;
@@ -489,10 +498,11 @@
 // static
 sp<AudioFlinger::PlaybackThread::OpPlayAudioMonitor>
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType,
-            const std::string& opPackageName)
+            const Identity& identity, const audio_attributes_t& attr, int id,
+            audio_stream_type_t streamType)
 {
     Vector <String16> packages;
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
     getPackagesForUid(uid, packages);
     if (isServiceUid(uid)) {
         if (packages.isEmpty()) {
@@ -515,31 +525,36 @@
         return nullptr;
     }
 
-    String16 opPackageNameStr(opPackageName.c_str());
-    if (opPackageName.empty()) {
+    // TODO b/182392769: use identity util
+    std::optional<std::string> opPackageNameStr = identity.packageName;
+    if (!identity.packageName.has_value()) {
         // If no package name is provided by the client, use the first associated with the uid
         if (!packages.isEmpty()) {
-            opPackageNameStr = packages[0];
+            opPackageNameStr =
+                VALUE_OR_FATAL(legacy2aidl_String16_string(packages[0]));
         }
     } else {
         // If the provided package name is invalid, we force app ops denial by clearing the package
         // name passed to OpPlayAudioMonitor
+        String16 opPackageLegacy = VALUE_OR_FATAL(
+            aidl2legacy_string_view_String16(opPackageNameStr.value_or("")));
         if (std::find_if(packages.begin(), packages.end(),
-                [&opPackageNameStr](const auto& package) {
-                return opPackageNameStr == package; }) == packages.end()) {
+                [&opPackageLegacy](const auto& package) {
+                return opPackageLegacy == package; }) == packages.end()) {
             ALOGW("The package name(%s) provided does not correspond to the uid %d, "
-                  "force muting the track", opPackageName.c_str(), uid);
-            // Set package name as an empty string so that hasOpPlayAudio will always return false.
-            opPackageNameStr = String16("");
+                  "force muting the track", opPackageNameStr.value().c_str(), uid);
+            // Set null package name so hasOpPlayAudio will always return false.
+            opPackageNameStr = std::optional<std::string>();
         }
     }
-    return new OpPlayAudioMonitor(uid, attr.usage, id, opPackageNameStr);
+    Identity adjIdentity = identity;
+    adjIdentity.packageName = opPackageNameStr;
+    return new OpPlayAudioMonitor(adjIdentity, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
-        uid_t uid, audio_usage_t usage, int id, const String16& opPackageName)
-        : mHasOpPlayAudio(true), mUid(uid), mUsage((int32_t) usage), mId(id),
-          mOpPackageName(opPackageName)
+        const Identity& identity, audio_usage_t usage, int id)
+        : mHasOpPlayAudio(true), mIdentity(identity), mUsage((int32_t) usage), mId(id)
 {
 }
 
@@ -554,9 +569,11 @@
 void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::onFirstRef()
 {
     checkPlayAudioForUsage();
-    if (mOpPackageName.size() != 0) {
+    if (mIdentity.packageName.has_value()) {
         mOpCallback = new PlayAudioOpCallback(this);
-        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mOpPackageName, mOpCallback);
+        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO,
+            VALUE_OR_FATAL(aidl2legacy_string_view_String16(mIdentity.packageName.value_or("")))
+            , mOpCallback);
     }
 }
 
@@ -569,11 +586,14 @@
 // - not called from PlayAudioOpCallback because the callback is not installed in this case
 void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::checkPlayAudioForUsage()
 {
-    if (mOpPackageName.size() == 0) {
+    if (!mIdentity.packageName.has_value()) {
         mHasOpPlayAudio.store(false);
     } else {
+        uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mIdentity.uid));
+        String16 packageName = VALUE_OR_FATAL(
+            aidl2legacy_string_view_String16(mIdentity.packageName.value_or("")));
         bool hasIt = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
-                    mUsage, mUid, mOpPackageName) == AppOpsManager::MODE_ALLOWED;
+                    mUsage, uid, packageName) == AppOpsManager::MODE_ALLOWED;
         ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasIt ? "not " : "");
         mHasOpPlayAudio.store(hasIt);
     }
@@ -623,12 +643,11 @@
             const sp<IMemory>& sharedBuffer,
             audio_session_t sessionId,
             pid_t creatorPid,
-            uid_t uid,
+            const Identity& identity,
             audio_output_flags_t flags,
             track_type type,
             audio_port_handle_t portId,
-            size_t frameCountToBeReady,
-            const std::string opPackageName)
+            size_t frameCountToBeReady)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -636,7 +655,8 @@
                   //       issue (e.g. by copying).
                   (sharedBuffer != 0) ? sharedBuffer->unsecurePointer() : buffer,
                   (sharedBuffer != 0) ? sharedBuffer->size() : bufferSize,
-                  sessionId, creatorPid, uid, true /*isOut*/,
+                  sessionId, creatorPid,
+                  VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid)), true /*isOut*/,
                   (type == TYPE_PATCH) ? ( buffer == NULL ? ALLOC_LOCAL : ALLOC_NONE) : ALLOC_CBLK,
                   type,
                   portId,
@@ -651,10 +671,9 @@
     mPresentationCompleteFrames(0),
     mFrameMap(16 /* sink-frame-to-track-frame map memory */),
     mVolumeHandler(new media::VolumeHandler(sampleRate)),
-    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
-            uid, attr, id(), streamType, opPackageName)),
+    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(identity, attr, id(),
+        streamType)),
     // mSinkTimestamp
-    mFrameCountToBeReady(frameCountToBeReady),
     mFastIndex(-1),
     mCachedVolume(1.0),
     /* The track might not play immediately after being active, similarly as if its volume was 0.
@@ -674,6 +693,7 @@
         return;
     }
 
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid));
     if (!thread->isTrackAllowed_l(channelMask, format, sessionId, uid)) {
         ALOGE("%s(%d): no more tracks available", __func__, mId);
         releaseCblk(); // this makes the track invalid.
@@ -688,6 +708,7 @@
                 mFrameSize, sampleRate);
     }
     mServerProxy = mAudioTrackServerProxy;
+    mServerProxy->setStartThresholdInFrames(frameCountToBeReady); // update the Cblk value
 
     // only allocate a fast track index if we were able to allocate a normal track name
     if (flags & AUDIO_OUTPUT_FLAG_FAST) {
@@ -718,13 +739,15 @@
         // HapticGenerator effect, which will generate haptic data, on the track. In that case,
         // external vibration is always created for all tracks attached to haptic playback thread.
         mAudioVibrationController = new AudioVibrationController(this);
+        std::string packageName = identity.packageName.has_value() ?
+            identity.packageName.value() : "";
         mExternalVibration = new os::ExternalVibration(
-                mUid, opPackageName, mAttr, mAudioVibrationController);
+                mUid, packageName, mAttr, mAudioVibrationController);
     }
 
     // Once this item is logged by the server, the client can add properties.
     const char * const traits = sharedBuffer == 0 ? "" : "static";
-    mTrackMetrics.logConstructor(creatorPid, uid, traits, streamType);
+    mTrackMetrics.logConstructor(creatorPid, uid, id(), traits, streamType);
 }
 
 AudioFlinger::PlaybackThread::Track::~Track()
@@ -1018,7 +1041,10 @@
     }
 
     size_t bufferSizeInFrames = mServerProxy->getBufferSizeInFrames();
-    size_t framesToBeReady = std::min(mFrameCountToBeReady, bufferSizeInFrames);
+    // Note: mServerProxy->getStartThresholdInFrames() is clamped.
+    const size_t startThresholdInFrames = mServerProxy->getStartThresholdInFrames();
+    const size_t framesToBeReady = std::clamp(  // clamp again to validate client values.
+            std::min(startThresholdInFrames, bufferSizeInFrames), size_t(1), mFrameCount);
 
     if (framesReady() >= framesToBeReady || (mCblk->mFlags & CBLK_FORCEREADY)) {
         ALOGV("%s(%d): consider track ready with %zu/%zu, target was %zu)",
@@ -1193,6 +1219,9 @@
             mState = PAUSING;
             ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
+            if (isOffloadedOrDirect()) {
+                mPauseHwPending = true;
+            }
             playbackThread->broadcast_l();
             break;
 
@@ -1280,6 +1309,11 @@
     mFlushHwPending = false;
 }
 
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+    mPauseHwPending = false;
+}
+
 void AudioFlinger::PlaybackThread::Track::reset()
 {
     // Do not reset twice to avoid discarding data written just after a flush and before
@@ -1821,12 +1855,12 @@
             audio_format_t format,
             audio_channel_mask_t channelMask,
             size_t frameCount,
-            uid_t uid)
+            const Identity& identity)
     :   Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
               audio_attributes_t{} /* currently unused for output track */,
               sampleRate, format, channelMask, frameCount,
               nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, getpid(), uid, AUDIO_OUTPUT_FLAG_NONE,
+              AUDIO_SESSION_NONE, getpid(), identity, AUDIO_OUTPUT_FLAG_NONE,
               TYPE_OUTPUT),
     mActive(false), mSourceThread(sourceThread)
 {
@@ -2056,8 +2090,8 @@
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER, flags, TYPE_PATCH,
-              AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
+              AUDIO_SESSION_NONE, getpid(), audioServerIdentity(getpid()), flags,
+              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true),
                        *playbackThread, timeout)
 {
@@ -2194,41 +2228,44 @@
 // static
 sp<AudioFlinger::RecordThread::OpRecordAudioMonitor>
 AudioFlinger::RecordThread::OpRecordAudioMonitor::createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, const String16& opPackageName)
+            const Identity& identity, const audio_attributes_t& attr)
 {
-    if (isServiceUid(uid)) {
-        ALOGV("not silencing record for service uid:%d pack:%s",
-                uid, String8(opPackageName).string());
+    if (isServiceUid(identity.uid)) {
+        ALOGV("not silencing record for service %s",
+                identity.toString().c_str());
         return nullptr;
     }
 
     // Capturing from FM TUNER output is not controlled by OP_RECORD_AUDIO
     // because it does not affect users privacy as does capturing from an actual microphone.
     if (attr.source == AUDIO_SOURCE_FM_TUNER) {
-        ALOGV("not muting FM TUNER capture for uid %d", uid);
+        ALOGV("not muting FM TUNER capture for uid %d", identity.uid);
         return nullptr;
     }
 
-    if (opPackageName.size() == 0) {
+    if (!identity.packageName.has_value() || identity.packageName.value().size() == 0) {
         Vector<String16> packages;
         // no package name, happens with SL ES clients
         // query package manager to find one
         PermissionController permissionController;
-        permissionController.getPackagesForUid(uid, packages);
+        permissionController.getPackagesForUid(identity.uid, packages);
         if (packages.isEmpty()) {
             return nullptr;
         } else {
-            ALOGV("using pack:%s for uid:%d", String8(packages[0]).string(), uid);
-            return new OpRecordAudioMonitor(uid, packages[0]);
+            Identity adjIdentity = identity;
+            adjIdentity.packageName =
+                VALUE_OR_FATAL(legacy2aidl_String16_string(packages[0]));
+            ALOGV("using identity:%s", adjIdentity.toString().c_str());
+            return new OpRecordAudioMonitor(adjIdentity);
         }
     }
 
-    return new OpRecordAudioMonitor(uid, opPackageName);
+    return new OpRecordAudioMonitor(identity);
 }
 
 AudioFlinger::RecordThread::OpRecordAudioMonitor::OpRecordAudioMonitor(
-        uid_t uid, const String16& opPackageName)
-        : mHasOpRecordAudio(true), mUid(uid), mPackage(opPackageName)
+        const Identity& identity)
+        : mHasOpRecordAudio(true), mIdentity(identity)
 {
 }
 
@@ -2244,8 +2281,10 @@
 {
     checkRecordAudio();
     mOpCallback = new RecordAudioOpCallback(this);
-    ALOGV("start watching OP_RECORD_AUDIO for pack:%s", String8(mPackage).string());
-    mAppOpsManager.startWatchingMode(AppOpsManager::OP_RECORD_AUDIO, mPackage, mOpCallback);
+    ALOGV("start watching OP_RECORD_AUDIO for %s", mIdentity.toString().c_str());
+    mAppOpsManager.startWatchingMode(AppOpsManager::OP_RECORD_AUDIO,
+        VALUE_OR_FATAL(aidl2legacy_string_view_String16(mIdentity.packageName.value_or(""))),
+        mOpCallback);
 }
 
 bool AudioFlinger::RecordThread::OpRecordAudioMonitor::hasOpRecordAudio() const {
@@ -2260,14 +2299,17 @@
 // - not called from RecordAudioOpCallback because the callback is not installed in this case
 void AudioFlinger::RecordThread::OpRecordAudioMonitor::checkRecordAudio()
 {
+
     const int32_t mode = mAppOpsManager.checkOp(AppOpsManager::OP_RECORD_AUDIO,
-            mUid, mPackage);
+            mIdentity.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                mIdentity.packageName.value_or(""))));
     const bool hasIt =  (mode == AppOpsManager::MODE_ALLOWED);
     // verbose logging only log when appOp changed
     ALOGI_IF(hasIt != mHasOpRecordAudio.load(),
-            "OP_RECORD_AUDIO missing, %ssilencing record uid%d pack:%s",
-            hasIt ? "un" : "", mUid, String8(mPackage).string());
+            "OP_RECORD_AUDIO missing, %ssilencing record %s",
+            hasIt ? "un" : "", mIdentity.toString().c_str());
     mHasOpRecordAudio.store(hasIt);
+
 }
 
 AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
@@ -2361,14 +2403,15 @@
             size_t bufferSize,
             audio_session_t sessionId,
             pid_t creatorPid,
-            uid_t uid,
+            const Identity& identity,
             audio_input_flags_t flags,
             track_type type,
-            const String16& opPackageName,
             audio_port_handle_t portId)
     :   TrackBase(thread, client, attr, sampleRate, format,
                   channelMask, frameCount, buffer, bufferSize, sessionId,
-                  creatorPid, uid, false /*isOut*/,
+                  creatorPid,
+                  VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid)),
+                  false /*isOut*/,
                   (type == TYPE_DEFAULT) ?
                           ((flags & AUDIO_INPUT_FLAG_FAST) ? ALLOC_PIPE : ALLOC_CBLK) :
                           ((buffer == NULL) ? ALLOC_LOCAL : ALLOC_NONE),
@@ -2380,7 +2423,7 @@
         mRecordBufferConverter(NULL),
         mFlags(flags),
         mSilenced(false),
-        mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(uid, attr, opPackageName))
+        mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(identity, attr))
 {
     if (mCblk == NULL) {
         return;
@@ -2421,7 +2464,7 @@
 #endif
 
     // Once this item is logged by the server, the client can add properties.
-    mTrackMetrics.logConstructor(creatorPid, uid);
+    mTrackMetrics.logConstructor(creatorPid, uid(), id());
 }
 
 AudioFlinger::RecordThread::RecordTrack::~RecordTrack()
@@ -2692,8 +2735,8 @@
     :   RecordTrack(recordThread, NULL,
                 audio_attributes_t{} /* currently unused for patch track */,
                 sampleRate, format, channelMask, frameCount,
-                buffer, bufferSize, AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER,
-                flags, TYPE_PATCH, String16()),
+                buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
+                audioServerIdentity(getpid()), flags, TYPE_PATCH),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, false, true),
                        *recordThread, timeout)
 {
@@ -2970,21 +3013,23 @@
         audio_channel_mask_t channelMask,
         audio_session_t sessionId,
         bool isOut,
-        uid_t uid,
-        pid_t pid,
+        const Identity& identity,
         pid_t creatorPid,
         audio_port_handle_t portId)
     :   TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
-                  sessionId, creatorPid, uid, isOut,
+                  sessionId, creatorPid,
+                  VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.uid)),
+                  isOut,
                   ALLOC_NONE,
                   TYPE_DEFAULT, portId,
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
-        mPid(pid), mSilenced(false), mSilencedNotified(false)
+        mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(identity.pid))),
+            mSilenced(false), mSilencedNotified(false)
 {
     // Once this item is logged by the server, the client can add properties.
-    mTrackMetrics.logConstructor(creatorPid, uid);
+    mTrackMetrics.logConstructor(creatorPid, uid(), id());
 }
 
 AudioFlinger::MmapThread::MmapTrack::~MmapTrack()
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 9132086..5f052a5 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -21,6 +21,7 @@
 #include <media/AudioSystem.h>
 #include <media/AudioPolicy.h>
 #include <media/DeviceDescriptorBase.h>
+#include <android/media/permission/Identity.h>
 #include <utils/String8.h>
 
 namespace android {
@@ -122,7 +123,7 @@
                                         audio_io_handle_t *output,
                                         audio_session_t session,
                                         audio_stream_type_t *stream,
-                                        uid_t uid,
+                                        const media::permission::Identity& identity,
                                         const audio_config_t *config,
                                         audio_output_flags_t *flags,
                                         audio_port_handle_t *selectedDeviceId,
@@ -141,7 +142,7 @@
                                      audio_io_handle_t *input,
                                      audio_unique_id_t riid,
                                      audio_session_t session,
-                                     uid_t uid,
+                                     const media::permission::Identity& identity,
                                      const audio_config_base_t *config,
                                      audio_input_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
@@ -272,8 +273,11 @@
 
     virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
                                         audio_format_t *surroundFormats,
-                                        bool *surroundFormatsEnabled,
-                                        bool reported) = 0;
+                                        bool *surroundFormatsEnabled) = 0;
+
+    virtual status_t getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                                audio_format_t *surroundFormats) = 0;
+
     virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) = 0;
 
     virtual bool     isHapticPlaybackSupported() = 0;
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 0537365..552919d 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -226,6 +226,8 @@
             return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_ARC) != 0) {
             return AUDIO_DEVICE_OUT_HDMI_ARC;
+        } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_EARC) != 0) {
+            return AUDIO_DEVICE_OUT_HDMI_EARC;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_AUX_LINE) != 0) {
             return AUDIO_DEVICE_OUT_AUX_LINE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPDIF) != 0) {
@@ -240,4 +242,4 @@
             return a2dpDevices.empty() ? AUDIO_DEVICE_NONE : a2dpDevices[0];
         }
     }
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index c6bdb04..c2a20c6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -77,6 +77,7 @@
 
     sp<DeviceDescriptor> getDeviceAndMixForInputSource(audio_source_t inputSource,
                                                        const DeviceVector &availableDeviceTypes,
+                                                       uid_t uid,
                                                        sp<AudioPolicyMix> *policyMix) const;
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 05ec69e..20b4044 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -226,7 +226,9 @@
             add(devices);
             return size();
         }
-        return SortedVector::merge(devices);
+        ssize_t ret = SortedVector::merge(devices);
+        refreshTypes();
+        return ret;
     }
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index e6eef24..ab33b38 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -44,7 +44,7 @@
 
     bool equals(const sp<PolicyAudioPort> &right) const
     {
-        return getTagName() == right->getTagName();
+        return right != 0 && getTagName() == right->getTagName();
     }
 
     virtual sp<AudioPort> asAudioPort() const = 0;
diff --git a/services/audiopolicy/common/managerdefinitions/include/Serializer.h b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
index 48c4147..b70c595 100644
--- a/services/audiopolicy/common/managerdefinitions/include/Serializer.h
+++ b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
@@ -21,5 +21,9 @@
 namespace android {
 
 status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config);
+// In VTS mode all vendor extensions are ignored. This is done because
+// VTS tests are built using AOSP code and thus can not use vendor overlays
+// of system libraries.
+status_t deserializeAudioPolicyFileForVts(const char *fileName, AudioPolicyConfig *config);
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index c024a85..b209a88 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -391,6 +391,7 @@
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForInputSource(
         audio_source_t inputSource,
         const DeviceVector &availDevices,
+        uid_t uid,
         sp<AudioPolicyMix> *policyMix) const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -402,7 +403,11 @@
             if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
                     mix->mCriteria[j].mValue.mSource == inputSource) ||
                (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mSource != inputSource)) {
+                    mix->mCriteria[j].mValue.mSource != inputSource) ||
+               (RULE_MATCH_UID == mix->mCriteria[j].mRule &&
+                    mix->mCriteria[j].mValue.mUid == uid) ||
+               (RULE_EXCLUDE_UID == mix->mCriteria[j].mRule &&
+                    mix->mCriteria[j].mValue.mUid != uid)) {
                 // assuming PolicyMix only for remote submix for input
                 // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX
                 audio_devices_t device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
index c8e4e76..866417e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
@@ -39,7 +39,7 @@
 bool AudioRoute::supportsPatch(const sp<PolicyAudioPort> &srcPort,
                                const sp<PolicyAudioPort> &dstPort) const
 {
-    if (mSink == 0 || dstPort == 0 || !dstPort->equals(mSink)) {
+    if (mSink == 0 || srcPort == 0 || dstPort == 0 || !dstPort->equals(mSink)) {
         return false;
     }
     ALOGV("%s: sinks %s matching", __FUNCTION__, mSink->getTagName().c_str());
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 129f6f6..562c213 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -20,8 +20,8 @@
 #include <memory>
 #include <string>
 #include <utility>
+#include <variant>
 
-#include <hidl/Status.h>
 #include <libxml/parser.h>
 #include <libxml/xinclude.h>
 #include <media/convert.h>
@@ -36,12 +36,14 @@
 
 namespace {
 
-// TODO(mnaganov): Consider finding an alternative for using HIDL code.
-using hardware::Return;
-using hardware::Status;
-using hardware::Void;
 using utilities::convertTo;
 
+static inline bool maybeVendorExtension(const std::string& s) {
+    // Only checks whether the string starts with the "vendor prefix".
+    static const std::string vendorPrefix = "VX_";
+    return s.size() > vendorPrefix.size() && s.substr(0, vendorPrefix.size()) == vendorPrefix;
+}
+
 template<typename E, typename C>
 struct AndroidCollectionTraits {
     typedef sp<E> Element;
@@ -187,7 +189,7 @@
 
 struct GlobalConfigTraits
 {
-    typedef void Element;
+    typedef std::monostate Element;
 
     static constexpr const char *tag = "globalConfiguration";
 
@@ -203,7 +205,7 @@
 
 struct SurroundSoundTraits
 {
-    typedef void Element;
+    typedef std::monostate Element;
 
     static constexpr const char *tag = "surroundSound";
 
@@ -226,14 +228,15 @@
 class PolicySerializer
 {
 public:
-    status_t deserialize(const char *configFile, AudioPolicyConfig *config);
+    status_t deserialize(const char *configFile, AudioPolicyConfig *config,
+            bool ignoreVendorExtensions = false);
 
     template <class Trait>
     status_t deserializeCollection(const xmlNode *cur,
             typename Trait::Collection *collection,
             typename Trait::PtrSerializingCtx serializingContext);
     template <class Trait>
-    Return<typename Trait::Element> deserialize(const xmlNode *cur,
+    std::variant<status_t, typename Trait::Element> deserialize(const xmlNode *cur,
             typename Trait::PtrSerializingCtx serializingContext);
 
 private:
@@ -242,6 +245,7 @@
 
     typedef AudioPolicyConfig Element;
 
+    bool mIgnoreVendorExtensions = false;
     std::string mChannelMasksSeparator = ",";
     std::string mSamplingRatesSeparator = ",";
     std::string mFlagsSeparator = "|";
@@ -307,14 +311,17 @@
         }
         for (; child != NULL; child = child->next) {
             if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>(Trait::tag))) {
-                auto element = deserialize<Trait>(child, serializingContext);
-                if (element.isOk()) {
-                    status_t status = Trait::addElementToCollection(element, collection);
+                auto maybeElement = deserialize<Trait>(child, serializingContext);
+                if (maybeElement.index() == 1) {
+                    status_t status = Trait::addElementToCollection(
+                            std::get<1>(maybeElement), collection);
                     if (status != NO_ERROR) {
                         ALOGE("%s: could not add element to %s collection", __func__,
                             Trait::collectionTag);
                         return status;
                     }
+                } else if (mIgnoreVendorExtensions && std::get<status_t>(maybeElement) == NO_INIT) {
+                    // Skip a vendor extension element.
                 } else {
                     return BAD_VALUE;
                 }
@@ -328,8 +335,8 @@
 }
 
 template<>
-Return<AudioGainTraits::Element> PolicySerializer::deserialize<AudioGainTraits>(const xmlNode *cur,
-        AudioGainTraits::PtrSerializingCtx /*serializingContext*/)
+std::variant<status_t, AudioGainTraits::Element> PolicySerializer::deserialize<AudioGainTraits>(
+        const xmlNode *cur, AudioGainTraits::PtrSerializingCtx /*serializingContext*/)
 {
     using Attributes = AudioGainTraits::Attributes;
 
@@ -393,12 +400,13 @@
     if (gain->getMode() != 0) {
         return gain;
     } else {
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
 }
 
 template<>
-Return<AudioProfileTraits::Element> PolicySerializer::deserialize<AudioProfileTraits>(
+std::variant<status_t, AudioProfileTraits::Element>
+PolicySerializer::deserialize<AudioProfileTraits>(
         const xmlNode *cur, AudioProfileTraits::PtrSerializingCtx /*serializingContext*/)
 {
     using Attributes = AudioProfileTraits::Attributes;
@@ -407,6 +415,10 @@
     std::string format = getXmlAttribute(cur, Attributes::format);
     std::string channels = getXmlAttribute(cur, Attributes::channelMasks);
 
+    if (mIgnoreVendorExtensions && maybeVendorExtension(format)) {
+        ALOGI("%s: vendor extension format \"%s\" skipped", __func__, format.c_str());
+        return NO_INIT;
+    }
     AudioProfileTraits::Element profile = new AudioProfile(formatFromString(format, gDynamicFormat),
             channelMasksFromString(channels, mChannelMasksSeparator.c_str()),
             samplingRatesFromString(samplingRates, mSamplingRatesSeparator.c_str()));
@@ -419,21 +431,21 @@
 }
 
 template<>
-Return<MixPortTraits::Element> PolicySerializer::deserialize<MixPortTraits>(const xmlNode *child,
-        MixPortTraits::PtrSerializingCtx /*serializingContext*/)
+std::variant<status_t, MixPortTraits::Element> PolicySerializer::deserialize<MixPortTraits>(
+        const xmlNode *child, MixPortTraits::PtrSerializingCtx /*serializingContext*/)
 {
     using Attributes = MixPortTraits::Attributes;
 
     std::string name = getXmlAttribute(child, Attributes::name);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::name);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, MixPortTraits::tag, Attributes::name, name.c_str());
     std::string role = getXmlAttribute(child, Attributes::role);
     if (role.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::role);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: Role=%s", __func__, role.c_str());
     audio_port_role_t portRole = (role == Attributes::roleSource) ?
@@ -444,7 +456,7 @@
     AudioProfileTraits::Collection profiles;
     status_t status = deserializeCollection<AudioProfileTraits>(child, &profiles, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     if (profiles.empty()) {
         profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
@@ -476,7 +488,7 @@
     AudioGainTraits::Collection gains;
     status = deserializeCollection<AudioGainTraits>(child, &gains, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     mixPort->setGains(gains);
 
@@ -484,7 +496,7 @@
 }
 
 template<>
-Return<DevicePortTraits::Element> PolicySerializer::deserialize<DevicePortTraits>(
+std::variant<status_t, DevicePortTraits::Element> PolicySerializer::deserialize<DevicePortTraits>(
         const xmlNode *cur, DevicePortTraits::PtrSerializingCtx /*serializingContext*/)
 {
     using Attributes = DevicePortTraits::Attributes;
@@ -493,30 +505,34 @@
     std::string name = getXmlAttribute(cur, Attributes::tagName);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::tagName);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::tagName, name.c_str());
     std::string typeName = getXmlAttribute(cur, Attributes::type);
     if (typeName.empty()) {
         ALOGE("%s: no type for %s", __func__, name.c_str());
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::type, typeName.c_str());
     std::string role = getXmlAttribute(cur, Attributes::role);
     if (role.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::role);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::role, role.c_str());
     audio_port_role_t portRole = (role == Attributes::roleSource) ?
                 AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
 
+    if (mIgnoreVendorExtensions && maybeVendorExtension(typeName)) {
+        ALOGI("%s: vendor extension device type \"%s\" skipped", __func__, typeName.c_str());
+        return NO_INIT;
+    }
     audio_devices_t type = AUDIO_DEVICE_NONE;
     if (!DeviceConverter::fromString(typeName, type) ||
             (!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
             (!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
         ALOGW("%s: bad type %08x", __func__, type);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     std::string encodedFormatsLiteral = getXmlAttribute(cur, Attributes::encodedFormats);
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::encodedFormats, encodedFormatsLiteral.c_str());
@@ -531,7 +547,7 @@
     AudioProfileTraits::Collection profiles;
     status_t status = deserializeCollection<AudioProfileTraits>(cur, &profiles, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     if (profiles.empty()) {
         profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
@@ -544,7 +560,7 @@
     // Deserialize AudioGain children
     status = deserializeCollection<AudioGainTraits>(cur, &deviceDesc->mGains, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     ALOGV("%s: adding device tag %s type %08x address %s", __func__,
           deviceDesc->getName().c_str(), type, deviceDesc->address().c_str());
@@ -552,7 +568,7 @@
 }
 
 template<>
-Return<RouteTraits::Element> PolicySerializer::deserialize<RouteTraits>(
+std::variant<status_t, RouteTraits::Element> PolicySerializer::deserialize<RouteTraits>(
         const xmlNode *cur, RouteTraits::PtrSerializingCtx ctx)
 {
     using Attributes = RouteTraits::Attributes;
@@ -560,7 +576,7 @@
     std::string type = getXmlAttribute(cur, Attributes::type);
     if (type.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::type);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     audio_route_type_t routeType = (type == Attributes::typeMix) ?
                 AUDIO_ROUTE_MIX : AUDIO_ROUTE_MUX;
@@ -571,20 +587,24 @@
     std::string sinkAttr = getXmlAttribute(cur, Attributes::sink);
     if (sinkAttr.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::sink);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     // Convert Sink name to port pointer
     sp<PolicyAudioPort> sink = ctx->findPortByTagName(sinkAttr);
-    if (sink == NULL) {
+    if (sink == NULL && !mIgnoreVendorExtensions) {
         ALOGE("%s: no sink found with name=%s", __func__, sinkAttr.c_str());
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
+    } else if (sink == NULL) {
+        ALOGW("Skipping route to sink \"%s\" as it likely has vendor extension type",
+                sinkAttr.c_str());
+        return NO_INIT;
     }
     route->setSink(sink);
 
     std::string sourcesAttr = getXmlAttribute(cur, Attributes::sources);
     if (sourcesAttr.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::sources);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     // Tokenize and Convert Sources name to port pointer
     PolicyAudioPortVector sources;
@@ -594,11 +614,15 @@
     while (devTag != NULL) {
         if (strlen(devTag) != 0) {
             sp<PolicyAudioPort> source = ctx->findPortByTagName(devTag);
-            if (source == NULL) {
+            if (source == NULL && !mIgnoreVendorExtensions) {
                 ALOGE("%s: no source found with name=%s", __func__, devTag);
-                return Status::fromStatusT(BAD_VALUE);
+                return BAD_VALUE;
+            } else if (source == NULL) {
+                ALOGW("Skipping route source \"%s\" as it likely has vendor extension type",
+                        devTag);
+            } else {
+                sources.add(source);
             }
-            sources.add(source);
         }
         devTag = strtok(NULL, ",");
     }
@@ -613,7 +637,7 @@
 }
 
 template<>
-Return<ModuleTraits::Element> PolicySerializer::deserialize<ModuleTraits>(
+std::variant<status_t, ModuleTraits::Element> PolicySerializer::deserialize<ModuleTraits>(
         const xmlNode *cur, ModuleTraits::PtrSerializingCtx ctx)
 {
     using Attributes = ModuleTraits::Attributes;
@@ -625,7 +649,7 @@
     std::string name = getXmlAttribute(cur, Attributes::name);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::name);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     uint32_t versionMajor = 0, versionMinor = 0;
     std::string versionLiteral = getXmlAttribute(cur, Attributes::version);
@@ -643,21 +667,21 @@
     MixPortTraits::Collection mixPorts;
     status_t status = deserializeCollection<MixPortTraits>(cur, &mixPorts, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     module->setProfiles(mixPorts);
 
     DevicePortTraits::Collection devicePorts;
     status = deserializeCollection<DevicePortTraits>(cur, &devicePorts, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     module->setDeclaredDevices(devicePorts);
 
     RouteTraits::Collection routes;
     status = deserializeCollection<RouteTraits>(cur, &routes, module.get());
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     module->setRoutes(routes);
 
@@ -677,6 +701,12 @@
                         sp<DeviceDescriptor> device = module->getDeclaredDevices().
                                 getDeviceFromTagName(std::string(reinterpret_cast<const char*>(
                                                         attachedDevice.get())));
+                        if (device == nullptr && mIgnoreVendorExtensions) {
+                            ALOGW("Skipped attached device \"%s\" because it likely uses a vendor"
+                                    "extension type",
+                                    reinterpret_cast<const char*>(attachedDevice.get()));
+                            continue;
+                        }
                         ctx->addDevice(device);
                     }
                 }
@@ -703,7 +733,8 @@
 }
 
 template<>
-Return<GlobalConfigTraits::Element> PolicySerializer::deserialize<GlobalConfigTraits>(
+std::variant<status_t, GlobalConfigTraits::Element>
+PolicySerializer::deserialize<GlobalConfigTraits>(
         const xmlNode *root, GlobalConfigTraits::PtrSerializingCtx config)
 {
     using Attributes = GlobalConfigTraits::Attributes;
@@ -725,14 +756,15 @@
             if (!engineLibrarySuffix.empty()) {
                 config->setEngineLibraryNameSuffix(engineLibrarySuffix);
             }
-            return Void();
+            return NO_ERROR;
         }
     }
-    return Void();
+    return NO_ERROR;
 }
 
 template<>
-Return<SurroundSoundTraits::Element> PolicySerializer::deserialize<SurroundSoundTraits>(
+std::variant<status_t, SurroundSoundTraits::Element>
+PolicySerializer::deserialize<SurroundSoundTraits>(
         const xmlNode *root, SurroundSoundTraits::PtrSerializingCtx config)
 {
     config->setDefaultSurroundFormats();
@@ -745,14 +777,15 @@
             if (status == NO_ERROR) {
                 config->setSurroundFormats(formats);
             }
-            return Void();
+            return NO_ERROR;
         }
     }
-    return Void();
+    return NO_ERROR;
 }
 
 template<>
-Return<SurroundSoundFormatTraits::Element> PolicySerializer::deserialize<SurroundSoundFormatTraits>(
+std::variant<status_t, SurroundSoundFormatTraits::Element>
+PolicySerializer::deserialize<SurroundSoundFormatTraits>(
         const xmlNode *cur, SurroundSoundFormatTraits::PtrSerializingCtx /*serializingContext*/)
 {
     using Attributes = SurroundSoundFormatTraits::Attributes;
@@ -760,12 +793,16 @@
     std::string formatLiteral = getXmlAttribute(cur, Attributes::name);
     if (formatLiteral.empty()) {
         ALOGE("%s: No %s found for a surround format", __func__, Attributes::name);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    if (mIgnoreVendorExtensions && maybeVendorExtension(formatLiteral)) {
+        ALOGI("%s: vendor extension format \"%s\" skipped", __func__, formatLiteral.c_str());
+        return NO_INIT;
     }
     audio_format_t format = formatFromString(formatLiteral);
     if (format == AUDIO_FORMAT_DEFAULT) {
         ALOGE("%s: Unrecognized format %s", __func__, formatLiteral.c_str());
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     SurroundSoundFormatTraits::Element pair = std::make_pair(
             format, SurroundSoundFormatTraits::Collection::mapped_type{});
@@ -777,14 +814,16 @@
         auto result = pair.second.insert(subformat);
         if (!result.second) {
             ALOGE("%s: could not add subformat %x to collection", __func__, subformat);
-            return Status::fromStatusT(BAD_VALUE);
+            return BAD_VALUE;
         }
     }
     return pair;
 }
 
-status_t PolicySerializer::deserialize(const char *configFile, AudioPolicyConfig *config)
+status_t PolicySerializer::deserialize(const char *configFile, AudioPolicyConfig *config,
+                                       bool ignoreVendorExtensions)
 {
+    mIgnoreVendorExtensions = ignoreVendorExtensions;
     auto doc = make_xmlUnique(xmlParseFile(configFile));
     if (doc == nullptr) {
         ALOGE("%s: Could not parse %s document.", __func__, configFile);
@@ -845,4 +884,12 @@
     return status;
 }
 
+status_t deserializeAudioPolicyFileForVts(const char *fileName, AudioPolicyConfig *config)
+{
+    PolicySerializer serializer;
+    status_t status = serializer.deserialize(fileName, config, true /*ignoreVendorExtensions*/);
+    if (status != OK) config->clear();
+    return status;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 9bef97c..0f8b0a5 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -170,11 +170,13 @@
     status_t getMediaDevicesForRole(device_role_t role, const DeviceVector& availableDevices,
             DeviceVector& devices) const;
 
+    void dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const;
+
     AudioPolicyManagerObserver *mApmObserver = nullptr;
 
     ProductStrategyMap mProductStrategies;
-    ProductStrategyPreferredRoutingMap mProductStrategyPreferredDevices;
-    CapturePresetDevicesRoleMap mCapturePresetDevicesRole;
+    ProductStrategyDevicesRoleMap mProductStrategyDeviceRoleMap;
+    CapturePresetDevicesRoleMap mCapturePresetDevicesRoleMap;
     VolumeGroupMap mVolumeGroups;
     LastRemovableMediaDevices mLastRemovableMediaDevices;
     audio_mode_t mPhoneState = AUDIO_MODE_NORMAL;  /**< current phone state. */
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 54625ea..2aa2f9a 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -18,20 +18,20 @@
 
 #include "VolumeGroup.h"
 
-#include <system/audio.h>
-#include <utils/RefBase.h>
-#include <HandleGenerator.h>
-#include <string>
-#include <vector>
 #include <map>
-#include <utils/Errors.h>
-#include <utils/String8.h>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <HandleGenerator.h>
 #include <media/AudioAttributes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
-
-#include <vector>
+#include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
 
 namespace android {
 
@@ -170,11 +170,12 @@
     product_strategy_t mDefaultStrategy = PRODUCT_STRATEGY_NONE;
 };
 
-class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t,
-                                                           AudioDeviceTypeAddrVector>
-{
-public:
-    void dump(String8 *dst, int spaces = 0) const;
-};
+using ProductStrategyDevicesRoleMap =
+        std::map<std::pair<product_strategy_t, device_role_t>, AudioDeviceTypeAddrVector>;
+
+void dumpProductStrategyDevicesRoleMap(
+        const ProductStrategyDevicesRoleMap& productStrategyDeviceRoleMap,
+        String8 *dst,
+        int spaces);
 
 } // namespace android
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 37e4caa..150a9a8 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -17,6 +17,10 @@
 #define LOG_TAG "APM::AudioPolicyEngine/Base"
 //#define LOG_NDEBUG 0
 
+#include <functional>
+#include <string>
+#include <sys/stat.h>
+
 #include "EngineBase.h"
 #include "EngineDefaultConfig.h"
 #include <TypeConverter.h>
@@ -148,10 +152,15 @@
         });
         return iter != end(volumeGroups);
     };
+    auto fileExists = [](const char* path) {
+        struct stat fileStat;
+        return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
+    };
 
-    auto result = engineConfig::parse();
+    auto result = fileExists(engineConfig::DEFAULT_PATH) ?
+            engineConfig::parse(engineConfig::DEFAULT_PATH) : engineConfig::ParsingResult{};
     if (result.parsedConfig == nullptr) {
-        ALOGW("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
+        ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
         engineConfig::Config config = gDefaultEngineConfig;
         android::status_t ret = engineConfig::parseLegacyVolumes(config.volumeGroups);
         result = {std::make_unique<engineConfig::Config>(config),
@@ -342,23 +351,33 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
-            const AudioDeviceTypeAddrVector &devices)
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s invalid strategy %u", __func__, strategy);
+namespace {
+template <typename T>
+status_t setDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const AudioDeviceTypeAddrVector &devices,
+        const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
-        mProductStrategyPreferredDevices[strategy] = devices;
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support set devices role as disabled for strategy.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
+    case DEVICE_ROLE_DISABLED: {
+        tDevicesRoleMap[std::make_pair(t, role)] = devices;
+        // The preferred devices and disabled devices are mutually exclusive. Once a device is added
+        // the a list, it must be removed from the other one.
+        const device_role_t roleToRemove = role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED
+                                                                         : DEVICE_ROLE_PREFERRED;
+        auto it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
+        if (it != tDevicesRoleMap.end()) {
+            it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+            if (it->second.empty()) {
+                tDevicesRoleMap.erase(it);
+            }
+        }
+    } break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it is no need to set device role as none for a strategy.
     default:
@@ -368,28 +387,26 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s invalid strategy %u", __func__, strategy);
+template <typename T>
+status_t removeAllDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
-        if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
-            // no preferred device was set
+    case DEVICE_ROLE_DISABLED:
+        if (tDevicesRoleMap.erase(std::make_pair(t, role)) == 0) {
+            // no preferred/disabled device was set
             return NAME_NOT_FOUND;
         }
         break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support remove devices role as disabled for strategy.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it makes no sense to remove devices with
-        // role as DEVICE_ROLE_NONE for a strategy
+        // role as DEVICE_ROLE_NONE
     default:
         ALOGE("%s invalid role %d", __func__, role);
         return BAD_VALUE;
@@ -397,25 +414,26 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
-            AudioDeviceTypeAddrVector &devices) const
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s unknown strategy %u", __func__, strategy);
+template <typename T>
+status_t getDevicesRoleForT(
+        const std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, AudioDeviceTypeAddrVector &devices, const std::string& logStr,
+        std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
     switch (role) {
-    case DEVICE_ROLE_PREFERRED: {
-        // preferred device for this strategy?
-        auto devIt = mProductStrategyPreferredDevices.find(strategy);
-        if (devIt == mProductStrategyPreferredDevices.end()) {
-            ALOGV("%s no preferred device for strategy %u", __func__, strategy);
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+        if (it == tDevicesRoleMap.end()) {
+            ALOGV("%s no device as role %u for %s %u", __func__, role, logStr.c_str(), t);
             return NAME_NOT_FOUND;
         }
 
-        devices = devIt->second;
+        devices = it->second;
     } break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
@@ -426,32 +444,45 @@
     return NO_ERROR;
 }
 
+} // namespace
+
+status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return setDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return removeAllDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return getDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
 status_t EngineBase::setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
         const AudioDeviceTypeAddrVector &devices)
 {
-    // verify if the audio source is valid
-    if (!audio_is_valid_audio_source(audioSource)) {
-        ALOGE("%s unknown audio source %u", __func__, audioSource);
-    }
-
-    switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-        mCapturePresetDevicesRole[audioSource][role] = devices;
-        // When the devices are set as preferred devices, remove them from the disabled devices.
-        doRemoveDevicesRoleForCapturePreset(
-                audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support setting devices role as disabled for capture preset.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
-    case DEVICE_ROLE_NONE:
-        // Intentionally fall-through as it is no need to set device role as none
-    default:
-        ALOGE("%s invalid role %d", __func__, role);
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return setDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, devices, "audio source" /*logStr*/, p);
 }
 
 status_t EngineBase::addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
@@ -464,19 +495,20 @@
 
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
-        mCapturePresetDevicesRole[audioSource][role] = excludeDeviceTypeAddrsFrom(
-                mCapturePresetDevicesRole[audioSource][role], devices);
-        for (const auto& device : devices) {
-            mCapturePresetDevicesRole[audioSource][role].push_back(device);
+    case DEVICE_ROLE_DISABLED: {
+        const auto audioSourceRole = std::make_pair(audioSource, role);
+        mCapturePresetDevicesRoleMap[audioSourceRole] = excludeDeviceTypeAddrsFrom(
+                mCapturePresetDevicesRoleMap[audioSourceRole], devices);
+        for (const auto &device : devices) {
+            mCapturePresetDevicesRoleMap[audioSourceRole].push_back(device);
         }
         // When the devices are set as preferred devices, remove them from the disabled devices.
         doRemoveDevicesRoleForCapturePreset(
-                audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support setting devices role as disabled for capture preset.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
+                audioSource,
+                role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED : DEVICE_ROLE_PREFERRED,
+                devices,
+                false /*forceMatched*/);
+    } break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it is no need to set device role as none
     default:
@@ -502,21 +534,22 @@
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
     case DEVICE_ROLE_DISABLED: {
-        if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
-                mCapturePresetDevicesRole[audioSource].count(role) == 0) {
+        const auto audioSourceRole = std::make_pair(audioSource, role);
+        if (mCapturePresetDevicesRoleMap.find(audioSourceRole) ==
+                mCapturePresetDevicesRoleMap.end()) {
             return NAME_NOT_FOUND;
         }
         AudioDeviceTypeAddrVector remainingDevices = excludeDeviceTypeAddrsFrom(
-                mCapturePresetDevicesRole[audioSource][role], devices);
+                mCapturePresetDevicesRoleMap[audioSourceRole], devices);
         if (forceMatched && remainingDevices.size() !=
-                mCapturePresetDevicesRole[audioSource][role].size() - devices.size()) {
+                mCapturePresetDevicesRoleMap[audioSourceRole].size() - devices.size()) {
             // There are some devices from `devicesToRemove` that are not shown in the cached record
             return BAD_VALUE;
         }
-        mCapturePresetDevicesRole[audioSource][role] = remainingDevices;
-        if (mCapturePresetDevicesRole[audioSource][role].empty()) {
+        mCapturePresetDevicesRoleMap[audioSourceRole] = remainingDevices;
+        if (mCapturePresetDevicesRoleMap[audioSourceRole].empty()) {
             // Remove the role when device list is empty
-            mCapturePresetDevicesRole[audioSource].erase(role);
+            mCapturePresetDevicesRoleMap.erase(audioSourceRole);
         }
     } break;
     case DEVICE_ROLE_NONE:
@@ -532,63 +565,21 @@
 status_t EngineBase::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                       device_role_t role)
 {
-    // verify if the audio source is valid
-    if (!audio_is_valid_audio_source(audioSource)) {
-        ALOGE("%s unknown audio source %u", __func__, audioSource);
-    }
-
-    switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-        if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
-                mCapturePresetDevicesRole[audioSource].erase(role) == 0) {
-            // no preferred device for the given audio source
-            return NAME_NOT_FOUND;
-        }
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support remove devices role as disabled for strategy.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
-    case DEVICE_ROLE_NONE:
-        // Intentionally fall-through as it makes no sense to remove devices with
-        // role as DEVICE_ROLE_NONE for a strategy
-    default:
-        ALOGE("%s invalid role %d", __func__, role);
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return removeAllDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, "audio source" /*logStr*/, p);
 }
 
 status_t EngineBase::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
         device_role_t role, AudioDeviceTypeAddrVector &devices) const
 {
-    // verify if the audio source is valid
-    if (!audio_is_valid_audio_source(audioSource)) {
-        ALOGE("%s unknown audio source %u", __func__, audioSource);
-        return BAD_VALUE;
-    }
-
-    switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-    case DEVICE_ROLE_DISABLED: {
-        if (mCapturePresetDevicesRole.count(audioSource) == 0) {
-            return NAME_NOT_FOUND;
-        }
-        auto devIt = mCapturePresetDevicesRole.at(audioSource).find(role);
-        if (devIt == mCapturePresetDevicesRole.at(audioSource).end()) {
-            ALOGV("%s no devices role(%d) for capture preset %u", __func__, role, audioSource);
-            return NAME_NOT_FOUND;
-        }
-
-        devices = devIt->second;
-    } break;
-    case DEVICE_ROLE_NONE:
-        // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
-    default:
-        ALOGE("%s invalid role %d", __func__, role);
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return getDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, devices, "audio source" /*logStr*/, p);
 }
 
 status_t EngineBase::getMediaDevicesForRole(device_role_t role,
@@ -630,10 +621,22 @@
     return activeDevices;
 }
 
+void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
+{
+    dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
+    for (const auto& [capturePresetRolePair, devices] : mCapturePresetDevicesRoleMap) {
+        dst->appendFormat("\n%*sCapture preset(%u) Device Role(%u) Devices(%s)", spaces + 2, "",
+                capturePresetRolePair.first, capturePresetRolePair.second,
+                dumpAudioDeviceTypeAddrVector(devices, true /*includeSensitiveInfo*/).c_str());
+    }
+    dst->appendFormat("\n");
+}
+
 void EngineBase::dump(String8 *dst) const
 {
     mProductStrategies.dump(dst, 2);
-    mProductStrategyPreferredDevices.dump(dst, 2);
+    dumpProductStrategyDevicesRoleMap(mProductStrategyDeviceRoleMap, dst, 2);
+    dumpCapturePresetDevicesRoleMap(dst, 2);
     mVolumeGroups.dump(dst, 2);
 }
 
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index d4cea5a..b3d144f 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -320,14 +320,15 @@
     }
 }
 
-void ProductStrategyPreferredRoutingMap::dump(android::String8* dst, int spaces) const {
-    dst->appendFormat("\n%*sPreferred devices per product strategy dump:", spaces, "");
-    for (const auto& iter : *this) {
-        dst->appendFormat("\n%*sStrategy %u %s",
-                          spaces + 2, "",
-                          (uint32_t) iter.first,
-                          dumpAudioDeviceTypeAddrVector(iter.second, true /*includeSensitiveInfo*/)
-                                  .c_str());
+void dumpProductStrategyDevicesRoleMap(
+        const ProductStrategyDevicesRoleMap& productStrategyDeviceRoleMap,
+        String8 *dst,
+        int spaces) {
+    dst->appendFormat("\n%*sDevice role per product strategy dump:", spaces, "");
+    for (const auto& [strategyRolePair, devices] : productStrategyDeviceRoleMap) {
+        dst->appendFormat("\n%*sStrategy(%u) Device Role(%u) Devices(%s)", spaces + 2, "",
+                strategyRolePair.first, strategyRolePair.second,
+                dumpAudioDeviceTypeAddrVector(devices, true /*includeSensitiveInfo*/).c_str());
     }
     dst->appendFormat("\n");
 }
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 7cfef5b..1c86051 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -643,7 +643,11 @@
     xmlDocPtr doc;
     doc = xmlParseFile(path);
     if (doc == NULL) {
-        ALOGE("%s: Could not parse document %s", __FUNCTION__, path);
+        // It is OK not to find an engine config file at the default location
+        // as the caller will default to hardcoded default config
+        if (strncmp(path, DEFAULT_PATH, strlen(DEFAULT_PATH))) {
+            ALOGW("%s: Could not parse document %s", __FUNCTION__, path);
+        }
         return {nullptr, 0};
     }
     xmlNodePtr cur = xmlDocGetRootElement(doc);
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 6b0774f..5791f17 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "audiopolicy_engineconfig_tests",
 
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
index 0aee0e9..9cee978 100644
--- a/services/audiopolicy/engine/config/tests/resources/Android.bp
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 filegroup {
     name: "audiopolicy_engineconfig_files",
     srcs: [
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index f0a01d3..518f86e 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <utility>
+
 #include <AudioPolicyManagerObserver.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
@@ -35,7 +37,7 @@
 using StrategyVector = std::vector<product_strategy_t>;
 using VolumeGroupVector = std::vector<volume_group_t>;
 using CapturePresetDevicesRoleMap =
-        std::map<audio_source_t, std::map<device_role_t, AudioDeviceTypeAddrVector>>;
+        std::map<std::pair<audio_source_t, device_role_t>, AudioDeviceTypeAddrVector>;
 
 /**
  * This interface is dedicated to the policy manager that a Policy Engine shall implement.
@@ -171,8 +173,10 @@
      * @param[out] mix to be used if a mix has been installed for the given audio attributes.
      * @return selected input device for the audio attributes, may be null if error.
      */
-    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const = 0;
+    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                             uid_t uid = 0,
+                                                             sp<AudioPolicyMix> *mix = nullptr)
+                                                             const = 0;
 
     /**
      * Get the legacy stream type for a given audio attributes.
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 2d57d4f..bc72484 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -32,8 +32,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index df31bb9..11da8c7 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -33,8 +33,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 65ab8b6..91ffeb5 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -32,8 +32,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 90154ee..cac63fc 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -29,8 +29,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 5b06852..337f358 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -29,8 +29,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 6d42fcf..b0c376a 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -310,6 +310,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         uid_t uid,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -328,7 +329,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source, availableInputDevices, mix);
+    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+                                                       availableInputDevices,
+                                                       uid,
+                                                       mix);
     if (device != nullptr) {
         return device;
     }
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 3b371d8..d8e2742 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -61,8 +61,10 @@
     DeviceVector getOutputDevicesForStream(audio_stream_type_t stream,
                                            bool fromCache = false) const override;
 
-    sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const override;
+    sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     uid_t uid = 0,
+                                                     sp<AudioPolicyMix> *mix = nullptr)
+                                                     const override;
 
     void updateDeviceSelectionCache() override;
 
diff --git a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
index 5083b14..43b3dd2 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
@@ -172,12 +172,6 @@
         logging.info("added stub input device mask")
 
     # Transform input source in inclusive criterion
-    shift = len(all_component_types['OutputDevicesMask'])
-    if shift > 32:
-        logging.critical("OutputDevicesMask incompatible with criterion representation on 32 bits")
-        logging.info("EXIT ON FAILURE")
-        exit(1)
-
     for component_types in all_component_types:
         values = ','.join('{}:{}'.format(value, key) for key, value in all_component_types[component_types].items())
         logging.info("{}: <{}>".format(component_types, values))
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index d038ce5..edcdf5a 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -142,47 +142,20 @@
     return EngineBase::setForceUse(usage, config);
 }
 
-DeviceVector Engine::getDevicesForStrategyInt(legacy_strategy strategy,
-                                              DeviceVector availableOutputDevices,
-                                              DeviceVector availableInputDevices,
-                                              const SwAudioOutputCollection &outputs) const
+void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
+                                            DeviceVector& availableOutputDevices,
+                                            const SwAudioOutputCollection &outputs) const
 {
-    DeviceVector devices;
+    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
 
     switch (strategy) {
-
-    case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
-        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
-        break;
-
-    case STRATEGY_SONIFICATION_RESPECTFUL:
-        if (isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
-        } else {
-            bool media_active_locally =
-                    outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_MUSIC),
-                                            SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
-                    || outputs.isActiveLocally(
-                        toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
-                        SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+    case STRATEGY_SONIFICATION_RESPECTFUL: {
+        if (!(isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
             // routing is same as media without the "remote" device
             availableOutputDevices.remove(availableOutputDevices.getDevicesFromType(
                     AUDIO_DEVICE_OUT_REMOTE_SUBMIX));
-            devices = getDevicesForStrategyInt(STRATEGY_MEDIA,
-                    availableOutputDevices,
-                    availableInputDevices, outputs);
-            // if no media is playing on the device, check for mandatory use of "safe" speaker
-            // when media would have played on speaker, and the safe speaker path is available
-            if (!media_active_locally) {
-                devices.replaceDevicesByType(
-                        AUDIO_DEVICE_OUT_SPEAKER,
-                        availableOutputDevices.getDevicesFromType(
-                                AUDIO_DEVICE_OUT_SPEAKER_SAFE));
-            }
         }
-        break;
-
+        } break;
     case STRATEGY_DTMF:
     case STRATEGY_PHONE: {
         // Force use of only devices on primary output if:
@@ -214,6 +187,78 @@
                 availableOutputDevices = availPrimaryOutputDevices;
             }
         }
+        } break;
+    case STRATEGY_ACCESSIBILITY: {
+        // do not route accessibility prompts to a digital output currently configured with a
+        // compressed format as they would likely not be mixed and dropped.
+        for (size_t i = 0; i < outputs.size(); i++) {
+            sp<AudioOutputDescriptor> desc = outputs.valueAt(i);
+            if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
+                availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
+                        AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
+                        AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC}));
+            }
+        }
+        } break;
+    default:
+        break;
+    }
+}
+
+product_strategy_t Engine::remapStrategyFromContext(product_strategy_t strategy,
+                                                 const SwAudioOutputCollection &outputs) const {
+    auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
+                          mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
+
+    if (isInCall()) {
+        switch (legacyStrategy) {
+        case STRATEGY_ACCESSIBILITY:
+        case STRATEGY_DTMF:
+        case STRATEGY_MEDIA:
+        case STRATEGY_SONIFICATION:
+        case STRATEGY_SONIFICATION_RESPECTFUL:
+            legacyStrategy = STRATEGY_PHONE;
+            break;
+
+        default:
+            return strategy;
+        }
+    } else {
+        switch (legacyStrategy) {
+        case STRATEGY_SONIFICATION_RESPECTFUL:
+        case STRATEGY_SONIFICATION:
+            if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
+                legacyStrategy = STRATEGY_PHONE;
+            }
+            break;
+
+        case STRATEGY_ACCESSIBILITY:
+            if (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
+                    outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM))) {
+                legacyStrategy = STRATEGY_SONIFICATION;
+            }
+            break;
+
+        default:
+            return strategy;
+        }
+    }
+    return getProductStrategyFromLegacy(legacyStrategy);
+}
+
+DeviceVector Engine::getDevicesForStrategyInt(legacy_strategy strategy,
+                                              DeviceVector availableOutputDevices,
+                                              const SwAudioOutputCollection &outputs) const
+{
+    DeviceVector devices;
+
+    switch (strategy) {
+
+    case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
+        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
+        break;
+
+    case STRATEGY_PHONE: {
         devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
         if (!devices.isEmpty()) break;
         devices = availableOutputDevices.getFirstDevicesFromTypes({
@@ -227,16 +272,6 @@
     } break;
 
     case STRATEGY_SONIFICATION:
-
-        // If incall, just select the STRATEGY_PHONE device
-        if (isInCall() ||
-                outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
-            break;
-        }
-        FALLTHROUGH_INTENDED;
-
     case STRATEGY_ENFORCED_AUDIBLE:
         // strategy STRATEGY_ENFORCED_AUDIBLE uses same routing policy as STRATEGY_SONIFICATION
         // except:
@@ -284,32 +319,9 @@
         // The second device used for sonification is the same as the device used by media strategy
         FALLTHROUGH_INTENDED;
 
+    case STRATEGY_DTMF:
     case STRATEGY_ACCESSIBILITY:
-        if (strategy == STRATEGY_ACCESSIBILITY) {
-            // do not route accessibility prompts to a digital output currently configured with a
-            // compressed format as they would likely not be mixed and dropped.
-            for (size_t i = 0; i < outputs.size(); i++) {
-                sp<AudioOutputDescriptor> desc = outputs.valueAt(i);
-                if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
-                    availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
-                            AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
-                            AUDIO_DEVICE_OUT_HDMI_ARC}));
-                }
-            }
-            if (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
-                    outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM))) {
-                return getDevicesForStrategyInt(
-                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
-            }
-            if (isInCall()) {
-                return getDevicesForStrategyInt(
-                        STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
-            }
-        }
-        // For other cases, STRATEGY_ACCESSIBILITY behaves like STRATEGY_MEDIA
-        FALLTHROUGH_INTENDED;
-
-    // FIXME: STRATEGY_REROUTING follow STRATEGY_MEDIA for now
+    case STRATEGY_SONIFICATION_RESPECTFUL:
     case STRATEGY_REROUTING:
     case STRATEGY_MEDIA: {
         DeviceVector devices2;
@@ -322,11 +334,6 @@
                 devices2.add(remoteSubmix);
             }
         }
-        if (isInCall() && (strategy == STRATEGY_MEDIA)) {
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
-            break;
-        }
 
         if ((devices2.isEmpty()) &&
             (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) == AUDIO_POLICY_FORCE_SPEAKER)) {
@@ -359,7 +366,9 @@
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
             devices3 = availableOutputDevices.getDevicesFromTypes({
-                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE});
+                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+                    AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE,
+                    });
         }
 
         devices2.add(devices3);
@@ -374,9 +383,19 @@
             devices.remove(devices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
         }
 
-        // for STRATEGY_SONIFICATION:
+        bool mediaActiveLocally =
+                outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_MUSIC),
+                                        SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
+                || outputs.isActiveLocally(
+                    toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
+                    SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+        // - for STRATEGY_SONIFICATION:
         // if SPEAKER was selected, and SPEAKER_SAFE is available, use SPEAKER_SAFE instead
-        if (strategy == STRATEGY_SONIFICATION) {
+        // - for STRATEGY_SONIFICATION_RESPECTFUL:
+        // if no media is playing on the device, check for mandatory use of "safe" speaker
+        // when media would have played on speaker, and the safe speaker path is available
+        if (strategy == STRATEGY_SONIFICATION
+            || (strategy == STRATEGY_SONIFICATION_RESPECTFUL && !mediaActiveLocally)) {
             devices.replaceDevicesByType(
                     AUDIO_DEVICE_OUT_SPEAKER,
                     availableOutputDevices.getDevicesFromType(
@@ -629,16 +648,21 @@
     return preferredAvailableDevVec;
 }
 
+
 DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
-    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
+
+    // Take context into account to remap product strategy before
+    // checking preferred device for strategy and applying default routing rules
+    strategy = remapStrategyFromContext(strategy, outputs);
+
     auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
                           mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
 
-    // When not in call, STRATEGY_PHONE and STRATEGY_DTMF follow STRATEGY_MEDIA
-    if (!isInCall() && (legacyStrategy == STRATEGY_PHONE || legacyStrategy == STRATEGY_DTMF)) {
-        legacyStrategy = STRATEGY_MEDIA;
-        strategy = getProductStrategyFromLegacy(STRATEGY_MEDIA);
-    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+
+    filterOutputDevicesForStrategy(legacyStrategy, availableOutputDevices, outputs);
+
     // check if this strategy has a preferred device that is available,
     // if yes, give priority to it.
     DeviceVector preferredAvailableDevVec =
@@ -647,12 +671,9 @@
         return preferredAvailableDevVec;
     }
 
-    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
-    const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
-
     return getDevicesForStrategyInt(legacyStrategy,
                                     availableOutputDevices,
-                                    availableInputDevices, outputs);
+                                    outputs);
 }
 
 DeviceVector Engine::getOutputDevicesForAttributes(const audio_attributes_t &attributes,
@@ -688,6 +709,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         uid_t uid,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -707,7 +729,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source, availableInputDevices, mix);
+    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+                                                       availableInputDevices,
+                                                       uid,
+                                                       mix);
     if (device != nullptr) {
         return device;
     }
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 6214fe7..595e289 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -62,8 +62,10 @@
     DeviceVector getOutputDevicesForStream(audio_stream_type_t stream,
                                            bool fromCache = false) const override;
 
-    sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const override;
+    sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     uid_t uid = 0,
+                                                     sp<AudioPolicyMix> *mix = nullptr)
+                                                     const override;
 
     void updateDeviceSelectionCache() override;
 
@@ -74,9 +76,15 @@
 
     status_t setDefaultDevice(audio_devices_t device);
 
+    void filterOutputDevicesForStrategy(legacy_strategy strategy,
+                                            DeviceVector& availableOutputDevices,
+                                            const SwAudioOutputCollection &outputs) const;
+
+    product_strategy_t remapStrategyFromContext(product_strategy_t strategy,
+                                            const SwAudioOutputCollection &outputs) const;
+
     DeviceVector getDevicesForStrategyInt(legacy_strategy strategy,
                                           DeviceVector availableOutputDevices,
-                                          DeviceVector availableInputDevices,
                                           const SwAudioOutputCollection &outputs) const;
 
     DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index 21f6515..38bdedc 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -16,6 +16,15 @@
  *
  ******************************************************************************/
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_fuzz {
     name: "audiopolicy_fuzzer",
     srcs: [
@@ -41,6 +50,7 @@
         "libbinder",
         "libaudiopolicy",
         "libaudiopolicymanagerdefault",
+        "media_permission-aidl-cpp",
     ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index c1f2aa8..1177b95 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -26,6 +26,7 @@
 
 #include <Serializer.h>
 #include <android-base/file.h>
+#include <android/media/permission/Identity.h>
 #include <libxml/parser.h>
 #include <libxml/xinclude.h>
 #include <media/AudioPolicy.h>
@@ -46,6 +47,8 @@
 using namespace ::android::audio::policy::configuration::V7_0;
 }
 
+using media::permission::Identity;
+
 static const std::vector<audio_format_t> kAudioFormats = [] {
     std::vector<audio_format_t> result;
     for (const auto enumVal : xsdc_enum_range<xsd::AudioFormat>{}) {
@@ -246,7 +249,10 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
 
-    if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, 0 /*uid*/, &config,
+    // TODO b/182392769: use identity util
+    Identity i;
+    i.uid = 0;
+    if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, i, &config,
                                    &flags, selectedDeviceId, portId, {}, &outputType) != OK) {
         return false;
     }
@@ -270,7 +276,9 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::input_type_t inputType;
 
-    if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, 0 /*uid*/, &config,
+    Identity i;
+    i.uid = 0;
+    if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, i, &config,
                                   flags, selectedDeviceId, &inputType, portId) != OK) {
         return false;
     }
@@ -633,7 +641,9 @@
 }
 
 bool AudioPolicyManagerFuzzerDPPlaybackReRouting::initialize() {
-    AudioPolicyManagerFuzzerDynamicPolicy::initialize();
+    if (!AudioPolicyManagerFuzzerDynamicPolicy::initialize()) {
+        return false;
+    }
     mTracker.reset(new RecordingActivityTracker());
 
     mAudioConfig = AUDIO_CONFIG_INITIALIZER;
@@ -743,7 +753,9 @@
 }
 
 bool AudioPolicyManagerFuzzerDPMixRecordInjection::initialize() {
-    AudioPolicyManagerFuzzerDynamicPolicy::initialize();
+    if (!AudioPolicyManagerFuzzerDynamicPolicy::initialize()) {
+        return false;
+    }
 
     mTracker.reset(new RecordingActivityTracker());
 
diff --git a/services/audiopolicy/fuzzer/resources/Android.bp b/services/audiopolicy/fuzzer/resources/Android.bp
index f1e3a51..22ee256 100644
--- a/services/audiopolicy/fuzzer/resources/Android.bp
+++ b/services/audiopolicy/fuzzer/resources/Android.bp
@@ -16,6 +16,15 @@
  *
  ******************************************************************************/
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 filegroup {
     name: "audiopolicyfuzzer_configuration_files",
     srcs: [
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 5572beb..b111db4 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -34,6 +34,8 @@
         // a dependency on it in the device makefile. There will be no build time
         // conflict with libaudiopolicyenginedefault.
         "libaudiopolicyenginedefault",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 04bc5f1..dd44c54 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -52,6 +52,8 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 //FIXME: workaround for truncated touch sounds
 // to be removed when the problem is handled by system UI
 #define TOUCH_SOUND_FIXED_DELAY_MS 100
@@ -61,11 +63,11 @@
 constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
 
 // Compressed formats for MSD module, ordered from most preferred to least preferred.
-static const std::vector<audio_format_t> compressedFormatsOrder = {{
-        AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
+static const std::vector<audio_format_t> msdCompressedFormatsOrder = {{
+        AUDIO_FORMAT_IEC60958, AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
         AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_16_BIT }};
 // Channel masks for MSD module, 3D > 2D > 1D ordering (most preferred to least preferred).
-static const std::vector<audio_channel_mask_t> surroundChannelMasksOrder = {{
+static const std::vector<audio_channel_mask_t> msdSurroundChannelMasksOrder = {{
         AUDIO_CHANNEL_OUT_3POINT1POINT2, AUDIO_CHANNEL_OUT_3POINT0POINT2,
         AUDIO_CHANNEL_OUT_2POINT1POINT2, AUDIO_CHANNEL_OUT_2POINT0POINT2,
         AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_STEREO }};
@@ -261,11 +263,7 @@
         } else {
             checkCloseOutputs();
         }
-
-        if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-            DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-            updateCallRouting(newDevices);
-        }
+        (void)updateCallRouting(false /*fromCache*/);
         std::vector<audio_io_handle_t> outputsToReopen;
         const DeviceVector msdOutDevices = getMsdAudioOutDevices();
         const DeviceVector activeMediaDevices =
@@ -286,7 +284,7 @@
                 setOutputDevices(desc, newDevices, force, 0);
             }
             if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
-                    desc->devices() != activeMediaDevices &&
+                    !activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
                     desc->supportsDevicesForPlayback(activeMediaDevices)) {
                 // Reopen the output to query the dynamic profiles when there is not active
                 // clients or all active clients will be rerouted. Otherwise, set the flag
@@ -382,10 +380,7 @@
         // getDeviceForStrategy() cache
         updateDevicesAndOutputs();
 
-        if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-            DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-            updateCallRouting(newDevices);
-        }
+        (void)updateCallRouting(false /*fromCache*/);
         // Reconnect Audio Source
         for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
             auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
@@ -550,23 +545,58 @@
     return status;
 }
 
-uint32_t AudioPolicyManager::updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs)
+DeviceVector AudioPolicyManager::selectBestRxSinkDevicesForCall(bool fromCache)
+{
+    DeviceVector rxSinkdevices{};
+    rxSinkdevices = mEngine->getOutputDevicesForAttributes(
+                attributes_initializer(AUDIO_USAGE_VOICE_COMMUNICATION), nullptr, fromCache);
+    if (!rxSinkdevices.isEmpty() && mAvailableOutputDevices.contains(rxSinkdevices.itemAt(0))) {
+        auto rxSinkDevice = rxSinkdevices.itemAt(0);
+        auto telephonyRxModule = mHwModules.getModuleForDeviceType(
+                    AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
+        // retrieve Rx Source device descriptor
+        sp<DeviceDescriptor> rxSourceDevice = mAvailableInputDevices.getDevice(
+                    AUDIO_DEVICE_IN_TELEPHONY_RX, String8(), AUDIO_FORMAT_DEFAULT);
+
+        // RX Telephony and Rx sink devices are declared by Primary Audio HAL
+        if (isPrimaryModule(telephonyRxModule) && (telephonyRxModule->getHalVersionMajor() >= 3) &&
+                telephonyRxModule->supportsPatch(rxSourceDevice, rxSinkDevice)) {
+            ALOGW("%s() device %s using HW Bridge", __func__, rxSinkDevice->toString().c_str());
+            return DeviceVector(rxSinkDevice);
+        }
+    }
+    // Note that despite the fact that getNewOutputDevices() is called on the primary output,
+    // the device returned is not necessarily reachable via this output
+    // (filter later by setOutputDevices())
+    return getNewOutputDevices(mPrimaryOutput, fromCache);
+}
+
+status_t AudioPolicyManager::updateCallRouting(bool fromCache, uint32_t delayMs, uint32_t *waitMs)
+{
+    if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
+        DeviceVector rxDevices = selectBestRxSinkDevicesForCall(fromCache);
+        return updateCallRoutingInternal(rxDevices, delayMs, waitMs);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t AudioPolicyManager::updateCallRoutingInternal(
+        const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs)
 {
     bool createTxPatch = false;
     bool createRxPatch = false;
     uint32_t muteWaitMs = 0;
-
     if(!hasPrimaryOutput() ||
             mPrimaryOutput->devices().onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_STUB)) {
-        return muteWaitMs;
+        return INVALID_OPERATION;
     }
-    ALOG_ASSERT(!rxDevices.isEmpty(), "updateCallRouting() no selected output device");
+    ALOG_ASSERT(!rxDevices.isEmpty(), "%s() no selected output device", __func__);
 
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
-    ALOG_ASSERT(txSourceDevice != 0, "updateCallRouting() input selected device not available");
+    ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
 
-    ALOGV("updateCallRouting device rxDevice %s txDevice %s",
+    ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
 
     disconnectTelephonyRxAudioSource();
@@ -595,8 +625,8 @@
             (telephonyRxModule->getHalVersionMajor() >= 3)) {
         if (rxSourceDevice == 0 || txSinkDevice == 0) {
             // RX / TX Telephony device(s) is(are) not currently available
-            ALOGE("updateCallRouting() no telephony Tx and/or RX device");
-            return muteWaitMs;
+            ALOGE("%s() no telephony Tx and/or RX device", __func__);
+            return INVALID_OPERATION;
         }
         // createAudioPatchInternal now supports both HW / SW bridging
         createRxPatch = true;
@@ -634,8 +664,10 @@
         }
         mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
     }
-
-    return muteWaitMs;
+    if (waitMs != nullptr) {
+        *waitMs = muteWaitMs;
+    }
+    return NO_ERROR;
 }
 
 sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
@@ -753,25 +785,22 @@
     }
 
     if (hasPrimaryOutput()) {
-        // Note that despite the fact that getNewOutputDevices() is called on the primary output,
-        // the device returned is not necessarily reachable via this output
-        DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-        // force routing command to audio hardware when ending call
-        // even if no device change is needed
-        if (isStateInCall(oldState) && rxDevices.isEmpty()) {
-            rxDevices = mPrimaryOutput->devices();
-        }
-
         if (state == AUDIO_MODE_IN_CALL) {
-            updateCallRouting(rxDevices, delayMs);
-        } else if (oldState == AUDIO_MODE_IN_CALL) {
-            disconnectTelephonyRxAudioSource();
-            if (mCallTxPatch != 0) {
-                releaseAudioPatchInternal(mCallTxPatch->getHandle());
-                mCallTxPatch.clear();
-            }
-            setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+            (void)updateCallRouting(false /*fromCache*/, delayMs);
         } else {
+            DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
+            // force routing command to audio hardware when ending call
+            // even if no device change is needed
+            if (isStateInCall(oldState) && rxDevices.isEmpty()) {
+                rxDevices = mPrimaryOutput->devices();
+            }
+            if (oldState == AUDIO_MODE_IN_CALL) {
+                disconnectTelephonyRxAudioSource();
+                if (mCallTxPatch != 0) {
+                    releaseAudioPatchInternal(mCallTxPatch->getHandle());
+                    mCallTxPatch.clear();
+                }
+            }
             setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
         }
     }
@@ -1065,7 +1094,7 @@
     *output = AUDIO_IO_HANDLE_NONE;
     if (!msdDevices.isEmpty()) {
         *output = getOutputForDevices(msdDevices, session, *stream, config, flags);
-        if (*output != AUDIO_IO_HANDLE_NONE && setMsdPatches(&outputDevices) == NO_ERROR) {
+        if (*output != AUDIO_IO_HANDLE_NONE && setMsdOutputPatches(&outputDevices) == NO_ERROR) {
             ALOGV("%s() Using MSD devices %s instead of devices %s",
                   __func__, msdDevices.toString().c_str(), outputDevices.toString().c_str());
         } else {
@@ -1103,7 +1132,7 @@
                                               audio_io_handle_t *output,
                                               audio_session_t session,
                                               audio_stream_type_t *stream,
-                                              uid_t uid,
+                                              const Identity& identity,
                                               const audio_config_t *config,
                                               audio_output_flags_t *flags,
                                               audio_port_handle_t *selectedDeviceId,
@@ -1115,6 +1144,8 @@
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
         return INVALID_OPERATION;
     }
+    const uid_t uid = VALUE_OR_RETURN_STATUS(
+        aidl2legacy_int32_t_uid_t(identity.uid));
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
     audio_attributes_t resultAttr;
     bool isRequestedDeviceForExclusiveUse = false;
@@ -1231,7 +1262,7 @@
 
     // An MSD patch may be using the only output stream that can service this request. Release
     // all MSD patches to prioritize this request over any active output on MSD.
-    releaseMsdPatches(devices);
+    releaseMsdOutputPatches(devices);
 
     status_t status = outputDesc->open(config, devices, stream, flags, output);
 
@@ -1355,7 +1386,7 @@
                                                         mAvailableOutputDevices);
 }
 
-const AudioPatchCollection AudioPolicyManager::getMsdPatches() const {
+const AudioPatchCollection AudioPolicyManager::getMsdOutputPatches() const {
     AudioPatchCollection msdPatches;
     sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
     if (msdModule != 0) {
@@ -1373,50 +1404,47 @@
     return msdPatches;
 }
 
-status_t AudioPolicyManager::getBestMsdAudioProfileFor(const sp<DeviceDescriptor> &outputDevice,
-        bool hwAvSync, audio_port_config *sourceConfig, audio_port_config *sinkConfig) const
-{
-    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
-    if (msdModule == nullptr) {
-        ALOGE("%s() unable to get MSD module", __func__);
-        return NO_INIT;
-    }
-    sp<HwModule> deviceModule = mHwModules.getModuleForDevice(outputDevice, AUDIO_FORMAT_DEFAULT);
-    if (deviceModule == nullptr) {
-        ALOGE("%s() unable to get module for %s", __func__, outputDevice->toString().c_str());
-        return NO_INIT;
-    }
-    const InputProfileCollection &inputProfiles = msdModule->getInputProfiles();
+status_t AudioPolicyManager::getMsdProfiles(bool hwAvSync,
+                                            const InputProfileCollection &inputProfiles,
+                                            const OutputProfileCollection &outputProfiles,
+                                            const sp<DeviceDescriptor> &sourceDevice,
+                                            const sp<DeviceDescriptor> &sinkDevice,
+                                            AudioProfileVector& sourceProfiles,
+                                            AudioProfileVector& sinkProfiles) const {
     if (inputProfiles.isEmpty()) {
-        ALOGE("%s() no input profiles for MSD module", __func__);
+        ALOGE("%s() no input profiles for source module", __func__);
         return NO_INIT;
     }
-    const OutputProfileCollection &outputProfiles = deviceModule->getOutputProfiles();
     if (outputProfiles.isEmpty()) {
-        ALOGE("%s() no output profiles for device %s", __func__, outputDevice->toString().c_str());
+        ALOGE("%s() no output profiles for sink module", __func__);
         return NO_INIT;
     }
-    AudioProfileVector msdProfiles;
-    // Each IOProfile represents a MixPort from audio_policy_configuration.xml
     for (const auto &inProfile : inputProfiles) {
-        if (hwAvSync == ((inProfile->getFlags() & AUDIO_INPUT_FLAG_HW_AV_SYNC) != 0)) {
-            appendAudioProfiles(msdProfiles, inProfile->getAudioProfiles());
+        if (hwAvSync == ((inProfile->getFlags() & AUDIO_INPUT_FLAG_HW_AV_SYNC) != 0) &&
+                inProfile->supportsDevice(sourceDevice)) {
+            appendAudioProfiles(sourceProfiles, inProfile->getAudioProfiles());
         }
     }
-    AudioProfileVector deviceProfiles;
     for (const auto &outProfile : outputProfiles) {
         if (hwAvSync == ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) &&
-                outProfile->supportsDevice(outputDevice)) {
-            appendAudioProfiles(deviceProfiles, outProfile->getAudioProfiles());
+                outProfile->supportsDevice(sinkDevice)) {
+            appendAudioProfiles(sinkProfiles, outProfile->getAudioProfiles());
         }
     }
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getBestMsdConfig(bool hwAvSync,
+        const AudioProfileVector &sourceProfiles, const AudioProfileVector &sinkProfiles,
+        audio_port_config *sourceConfig, audio_port_config *sinkConfig) const
+{
     struct audio_config_base bestSinkConfig;
-    status_t result = findBestMatchingOutputConfig(msdProfiles, deviceProfiles,
-            compressedFormatsOrder, surroundChannelMasksOrder, true /*preferHigherSamplingRates*/,
-            bestSinkConfig);
+    status_t result = findBestMatchingOutputConfig(sourceProfiles, sinkProfiles,
+            msdCompressedFormatsOrder, msdSurroundChannelMasksOrder,
+            true /*preferHigherSamplingRates*/, bestSinkConfig);
     if (result != NO_ERROR) {
-        ALOGD("%s() no matching profiles found for device: %s, hwAvSync: %d",
-                __func__, outputDevice->toString().c_str(), hwAvSync);
+        ALOGD("%s() no matching config found for sink, hwAvSync: %d",
+                __func__, hwAvSync);
         return result;
     }
     sinkConfig->sample_rate = bestSinkConfig.sample_rate;
@@ -1427,7 +1455,7 @@
             sinkConfig->flags.output | AUDIO_OUTPUT_FLAG_DIRECT);
     if (audio_is_iec61937_compatible(sinkConfig->format)) {
         // For formats compatible with IEC61937 encapsulation, assume that
-        // the record thread input from MSD is IEC61937 framed (for proportional buffer sizing).
+        // the input is IEC61937 framed (for proportional buffer sizing).
         // Add the AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO flag so downstream HAL can distinguish between
         // raw and IEC61937 framed streams.
         sinkConfig->flags.output = static_cast<audio_output_flags_t>(
@@ -1453,28 +1481,50 @@
     return NO_ERROR;
 }
 
-PatchBuilder AudioPolicyManager::buildMsdPatch(const sp<DeviceDescriptor> &outputDevice) const
+PatchBuilder AudioPolicyManager::buildMsdPatch(bool msdIsSource,
+                                               const sp<DeviceDescriptor> &device) const
 {
     PatchBuilder patchBuilder;
-    patchBuilder.addSource(getMsdAudioInDevice()).addSink(outputDevice);
+    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    ALOG_ASSERT(msdModule != nullptr, "MSD module not available");
+    sp<HwModule> deviceModule = mHwModules.getModuleForDevice(device, AUDIO_FORMAT_DEFAULT);
+    if (deviceModule == nullptr) {
+        ALOGE("%s() unable to get module for %s", __func__, device->toString().c_str());
+        return patchBuilder;
+    }
+    const InputProfileCollection inputProfiles = msdIsSource ?
+            msdModule->getInputProfiles() : deviceModule->getInputProfiles();
+    const OutputProfileCollection outputProfiles = msdIsSource ?
+            deviceModule->getOutputProfiles() : msdModule->getOutputProfiles();
+
+    const sp<DeviceDescriptor> sourceDevice = msdIsSource ? getMsdAudioInDevice() : device;
+    const sp<DeviceDescriptor> sinkDevice = msdIsSource ?
+            device : getMsdAudioOutDevices().itemAt(0);
+    patchBuilder.addSource(sourceDevice).addSink(sinkDevice);
+
     audio_port_config sourceConfig = patchBuilder.patch()->sources[0];
     audio_port_config sinkConfig = patchBuilder.patch()->sinks[0];
+    AudioProfileVector sourceProfiles;
+    AudioProfileVector sinkProfiles;
     // TODO: Figure out whether MSD module has HW_AV_SYNC flag set in the AP config file.
     // For now, we just forcefully try with HwAvSync first.
-    status_t res = getBestMsdAudioProfileFor(outputDevice, true /*hwAvSync*/,
-            &sourceConfig, &sinkConfig) == NO_ERROR ? NO_ERROR :
-            getBestMsdAudioProfileFor(
-                    outputDevice, false /*hwAvSync*/, &sourceConfig, &sinkConfig);
-    if (res == NO_ERROR) {
-        // Found a matching profile for encoded audio. Re-create PatchBuilder with this config.
-        return (PatchBuilder()).addSource(sourceConfig).addSink(sinkConfig);
+    for (auto hwAvSync : { true, false }) {
+        if (getMsdProfiles(hwAvSync, inputProfiles, outputProfiles, sourceDevice, sinkDevice,
+                sourceProfiles, sinkProfiles) != NO_ERROR) {
+            continue;
+        }
+        if (getBestMsdConfig(hwAvSync, sourceProfiles, sinkProfiles, &sourceConfig,
+                &sinkConfig) == NO_ERROR) {
+            // Found a matching config. Re-create PatchBuilder with this config.
+            return (PatchBuilder()).addSource(sourceConfig).addSink(sinkConfig);
+        }
     }
-    ALOGV("%s() no matching profile found. Fall through to default PCM patch"
+    ALOGV("%s() no matching config found. Fall through to default PCM patch"
             " supporting PCM format conversion.", __func__);
     return patchBuilder;
 }
 
-status_t AudioPolicyManager::setMsdPatches(const DeviceVector *outputDevices) {
+status_t AudioPolicyManager::setMsdOutputPatches(const DeviceVector *outputDevices) {
     DeviceVector devices;
     if (outputDevices != nullptr && outputDevices->size() > 0) {
         devices.add(*outputDevices);
@@ -1489,11 +1539,11 @@
     std::vector<PatchBuilder> patchesToCreate;
     for (auto i = 0u; i < devices.size(); ++i) {
         ALOGV("%s() for device %s", __func__, devices[i]->toString().c_str());
-        patchesToCreate.push_back(buildMsdPatch(devices[i]));
+        patchesToCreate.push_back(buildMsdPatch(true /*msdIsSource*/, devices[i]));
     }
     // Retain only the MSD patches associated with outputDevices request.
     // Tear down the others, and create new ones as needed.
-    AudioPatchCollection patchesToRemove = getMsdPatches();
+    AudioPatchCollection patchesToRemove = getMsdOutputPatches();
     for (auto it = patchesToCreate.begin(); it != patchesToCreate.end(); ) {
         auto retainedPatch = false;
         for (auto i = 0u; i < patchesToRemove.size(); ++i) {
@@ -1538,8 +1588,8 @@
     return status;
 }
 
-void AudioPolicyManager::releaseMsdPatches(const DeviceVector& devices) {
-    AudioPatchCollection msdPatches = getMsdPatches();
+void AudioPolicyManager::releaseMsdOutputPatches(const DeviceVector& devices) {
+    AudioPatchCollection msdPatches = getMsdOutputPatches();
     for (size_t i = 0; i < msdPatches.size(); i++) {
         const auto& patch = msdPatches[i];
         for (size_t j = 0; j < patch->mPatch.num_sinks; ++j) {
@@ -2064,7 +2114,7 @@
                                              audio_io_handle_t *input,
                                              audio_unique_id_t riid,
                                              audio_session_t session,
-                                             uid_t uid,
+                                             const Identity& identity,
                                              const audio_config_base_t *config,
                                              audio_input_flags_t flags,
                                              audio_port_handle_t *selectedDeviceId,
@@ -2083,6 +2133,7 @@
     sp<AudioInputDescriptor> inputDesc;
     sp<RecordClientDescriptor> clientDesc;
     audio_port_handle_t requestedDeviceId = *selectedDeviceId;
+    uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(identity.uid));
     bool isSoundTrigger;
 
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
@@ -2177,14 +2228,17 @@
         } else {
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
-            device = mEngine->getInputDeviceForAttributes(attributes, &policyMix);
+            device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+            ALOGV("%s found device type is 0x%X", __FUNCTION__, device->type());
         }
         if (device == nullptr) {
             ALOGW("getInputForAttr() could not find device for source %d", attributes.source);
             status = BAD_VALUE;
             goto error;
         }
-        if (policyMix) {
+        if (device->type() == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
+            *inputType = API_INPUT_MIX_CAPTURE;
+        } else if (policyMix) {
             ALOG_ASSERT(policyMix->mMixType == MIX_TYPE_RECORDERS, "Invalid Mix Type");
             // there is an external policy, but this input is attached to a mix of recorders,
             // meaning it receives audio injected into the framework, so the recorder doesn't
@@ -2561,7 +2615,7 @@
             bool close = false;
             for (const auto& client : input->clientsList()) {
                 sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes());
+                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
                 if (!input->supportedDevices().contains(device)) {
                     close = true;
                     break;
@@ -3279,9 +3333,7 @@
 void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
 {
     uint32_t waitMs = 0;
-    if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-        DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, true /*fromCache*/);
-        waitMs = updateCallRouting(newDevices, delayMs);
+    if (updateCallRouting(true /*fromCache*/, delayMs, &waitMs) == NO_ERROR) {
         // Only apply special touch sound delay once
         delayMs = 0;
     }
@@ -3900,6 +3952,15 @@
             // be incomplete.
             PatchBuilder patchBuilder;
             audio_port_config sourcePortConfig = {};
+
+            // if first sink is to MSD, establish single MSD patch
+            if (getMsdAudioOutDevices().contains(
+                        mAvailableOutputDevices.getDeviceFromId(patch->sinks[0].id))) {
+                ALOGV("%s patching to MSD", __FUNCTION__);
+                patchBuilder = buildMsdPatch(false /*msdIsSource*/, srcDevice);
+                goto installPatch;
+            }
+
             srcDevice->toAudioPortConfig(&sourcePortConfig, &patch->sources[0]);
             patchBuilder.addSource(sourcePortConfig);
 
@@ -3995,6 +4056,7 @@
             }
             // TODO: check from routing capabilities in config file and other conflicting patches
 
+installPatch:
             status_t status = installPatch(
                         __func__, index, handle, patchBuilder.patch(), delayMs, uid, &patchDesc);
             if (status != NO_ERROR) {
@@ -4468,68 +4530,28 @@
 
 status_t AudioPolicyManager::getSurroundFormats(unsigned int *numSurroundFormats,
                                                 audio_format_t *surroundFormats,
-                                                bool *surroundFormatsEnabled,
-                                                bool reported)
+                                                bool *surroundFormatsEnabled)
 {
-    if (numSurroundFormats == NULL || (*numSurroundFormats != 0 &&
-            (surroundFormats == NULL || surroundFormatsEnabled == NULL))) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 &&
+            (surroundFormats == nullptr || surroundFormatsEnabled == nullptr))) {
         return BAD_VALUE;
     }
-    ALOGV("%s() numSurroundFormats %d surroundFormats %p surroundFormatsEnabled %p reported %d",
-            __func__, *numSurroundFormats, surroundFormats, surroundFormatsEnabled, reported);
+    ALOGV("%s() numSurroundFormats %d surroundFormats %p surroundFormatsEnabled %p",
+            __func__, *numSurroundFormats, surroundFormats, surroundFormatsEnabled);
 
     size_t formatsWritten = 0;
     size_t formatsMax = *numSurroundFormats;
-    std::unordered_set<audio_format_t> formats; // Uses primary surround formats only
-    if (reported) {
-        // Return formats from all device profiles that have already been resolved by
-        // checkOutputsForDevice().
-        for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
-            sp<DeviceDescriptor> device = mAvailableOutputDevices[i];
-            audio_devices_t deviceType = device->type();
-            // Enabling/disabling formats are applied to only HDMI devices. So, this function
-            // returns formats reported by HDMI devices.
-            if (deviceType != AUDIO_DEVICE_OUT_HDMI) {
-                continue;
-            }
-            // Formats reported by sink devices
-            std::unordered_set<audio_format_t> formatset;
-            if (auto it = mReportedFormatsMap.find(device); it != mReportedFormatsMap.end()) {
-                formatset.insert(it->second.begin(), it->second.end());
-            }
 
-            // Formats hard-coded in the in policy configuration file (if any).
-            FormatVector encodedFormats = device->encodedFormats();
-            formatset.insert(encodedFormats.begin(), encodedFormats.end());
-            // Filter the formats which are supported by the vendor hardware.
-            for (auto it = formatset.begin(); it != formatset.end(); ++it) {
-                if (mConfig.getSurroundFormats().count(*it) != 0) {
-                    formats.insert(*it);
-                } else {
-                    for (const auto& pair : mConfig.getSurroundFormats()) {
-                        if (pair.second.count(*it) != 0) {
-                            formats.insert(pair.first);
-                            break;
-                        }
-                    }
-                }
-            }
-        }
-    } else {
-        for (const auto& pair : mConfig.getSurroundFormats()) {
-            formats.insert(pair.first);
-        }
-    }
-    *numSurroundFormats = formats.size();
+    *numSurroundFormats = mConfig.getSurroundFormats().size();
     audio_policy_forced_cfg_t forceUse = mEngine->getForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND);
-    for (const auto& format: formats) {
+    for (const auto& format: mConfig.getSurroundFormats()) {
         if (formatsWritten < formatsMax) {
-            surroundFormats[formatsWritten] = format;
+            surroundFormats[formatsWritten] = format.first;
             bool formatEnabled = true;
             switch (forceUse) {
                 case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
-                    formatEnabled = mManualSurroundFormats.count(format) != 0;
+                    formatEnabled = mManualSurroundFormats.count(format.first) != 0;
                     break;
                 case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
                     formatEnabled = false;
@@ -4543,6 +4565,60 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyManager::getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                                        audio_format_t *surroundFormats) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 && surroundFormats == nullptr)) {
+        return BAD_VALUE;
+    }
+    ALOGV("%s() numSurroundFormats %d surroundFormats %p",
+            __func__, *numSurroundFormats, surroundFormats);
+
+    size_t formatsWritten = 0;
+    size_t formatsMax = *numSurroundFormats;
+    std::unordered_set<audio_format_t> formats; // Uses primary surround formats only
+
+    // Return formats from all device profiles that have already been resolved by
+    // checkOutputsForDevice().
+    for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
+        sp<DeviceDescriptor> device = mAvailableOutputDevices[i];
+        audio_devices_t deviceType = device->type();
+        // Enabling/disabling formats are applied to only HDMI devices. So, this function
+        // returns formats reported by HDMI devices.
+        if (deviceType != AUDIO_DEVICE_OUT_HDMI) {
+            continue;
+        }
+        // Formats reported by sink devices
+        std::unordered_set<audio_format_t> formatset;
+        if (auto it = mReportedFormatsMap.find(device); it != mReportedFormatsMap.end()) {
+            formatset.insert(it->second.begin(), it->second.end());
+        }
+
+        // Formats hard-coded in the in policy configuration file (if any).
+        FormatVector encodedFormats = device->encodedFormats();
+        formatset.insert(encodedFormats.begin(), encodedFormats.end());
+        // Filter the formats which are supported by the vendor hardware.
+        for (auto it = formatset.begin(); it != formatset.end(); ++it) {
+            if (mConfig.getSurroundFormats().count(*it) != 0) {
+                formats.insert(*it);
+            } else {
+                for (const auto& pair : mConfig.getSurroundFormats()) {
+                    if (pair.second.count(*it) != 0) {
+                        formats.insert(pair.first);
+                        break;
+                    }
+                }
+            }
+        }
+    }
+    *numSurroundFormats = formats.size();
+    for (const auto& format: formats) {
+        if (formatsWritten < formatsMax) {
+            surroundFormats[formatsWritten++] = format;
+        }
+    }
+    return NO_ERROR;
+}
+
 status_t AudioPolicyManager::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
 {
     ALOGV("%s() format 0x%X enabled %d", __func__, audioFormat, enabled);
@@ -5368,7 +5444,7 @@
             // arguments to mEngine->getOutputDevicesForAttributes() when resolving which output
             // devices to patch to. This may be complicated by the fact that devices may become
             // unavailable.
-            setMsdPatches();
+            setMsdOutputPatches();
         }
     }
 }
@@ -5441,7 +5517,7 @@
         // unnecessary rerouting by caching and reusing the arguments to
         // mEngine->getOutputDevicesForAttributes() when resolving which output devices to patch to.
         // This may be complicated by the fact that devices may become unavailable.
-        setMsdPatches();
+        setMsdOutputPatches();
     }
     // an event that changed routing likely occurred, inform upper layers
     mpClientInterface->onRoutingUpdated();
@@ -5783,12 +5859,22 @@
 
     // If we are not in call and no client is active on this input, this methods returns
     // a null sp<>, causing the patch on the input stream to be released.
-    audio_attributes_t attributes = inputDesc->getHighestPriorityAttributes();
+    audio_attributes_t attributes;
+    uid_t uid;
+    sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
+    if (topClient != nullptr) {
+      attributes = topClient->attributes();
+      uid = topClient->uid();
+    } else {
+      attributes = { .source = AUDIO_SOURCE_DEFAULT };
+      uid = 0;
+    }
+
     if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
         attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     }
     if (attributes.source != AUDIO_SOURCE_DEFAULT) {
-        device = mEngine->getInputDeviceForAttributes(attributes);
+        device = mEngine->getInputDeviceForAttributes(attributes, uid);
     }
 
     return device;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index d0c8673..f5dd20c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -116,7 +116,7 @@
                                   audio_io_handle_t *output,
                                   audio_session_t session,
                                   audio_stream_type_t *stream,
-                                  uid_t uid,
+                                  const media::permission::Identity& identity,
                                   const audio_config_t *config,
                                   audio_output_flags_t *flags,
                                   audio_port_handle_t *selectedDeviceId,
@@ -130,7 +130,7 @@
                                          audio_io_handle_t *input,
                                          audio_unique_id_t riid,
                                          audio_session_t session,
-                                         uid_t uid,
+                                         const media::permission::Identity& identity,
                                          const audio_config_base_t *config,
                                          audio_input_flags_t flags,
                                          audio_port_handle_t *selectedDeviceId,
@@ -313,8 +313,9 @@
 
         virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
                                             audio_format_t *surroundFormats,
-                                            bool *surroundFormatsEnabled,
-                                            bool reported);
+                                            bool *surroundFormatsEnabled);
+        virtual status_t getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                                    audio_format_t *surroundFormats);
         virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
         virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
@@ -735,9 +736,22 @@
                     String8(devices.itemAt(0)->address().c_str()) : String8("");
         }
 
-        uint32_t updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs = 0);
+        status_t updateCallRouting(
+                bool fromCache, uint32_t delayMs = 0, uint32_t *waitMs = nullptr);
+        status_t updateCallRoutingInternal(
+                const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs);
         sp<AudioPatch> createTelephonyPatch(bool isRx, const sp<DeviceDescriptor> &device,
                                             uint32_t delayMs);
+        /**
+         * @brief selectBestRxSinkDevicesForCall: if the primary module host both Telephony Rx/Tx
+         * devices, and it declares also supporting a HW bridge between the Telephony Rx and the
+         * given sink device for Voice Call audio attributes, select this device in prio.
+         * Otherwise, getNewOutputDevices() is called on the primary output to select sink device.
+         * @param fromCache true to prevent engine reconsidering all product strategies and retrieve
+         * from engine cache.
+         * @return vector of devices, empty if none is found.
+         */
+        DeviceVector selectBestRxSinkDevicesForCall(bool fromCache);
         bool isDeviceOfModule(const sp<DeviceDescriptor>& devDesc, const char *moduleId) const;
 
         status_t startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
@@ -856,14 +870,22 @@
         // Support for Multi-Stream Decoder (MSD) module
         sp<DeviceDescriptor> getMsdAudioInDevice() const;
         DeviceVector getMsdAudioOutDevices() const;
-        const AudioPatchCollection getMsdPatches() const;
-        status_t getBestMsdAudioProfileFor(const sp<DeviceDescriptor> &outputDevice,
-                                           bool hwAvSync,
-                                           audio_port_config *sourceConfig,
-                                           audio_port_config *sinkConfig) const;
-        PatchBuilder buildMsdPatch(const sp<DeviceDescriptor> &outputDevice) const;
-        status_t setMsdPatches(const DeviceVector *outputDevices = nullptr);
-        void releaseMsdPatches(const DeviceVector& devices);
+        const AudioPatchCollection getMsdOutputPatches() const;
+        status_t getMsdProfiles(bool hwAvSync,
+                const InputProfileCollection &inputProfiles,
+                const OutputProfileCollection &outputProfiles,
+                const sp<DeviceDescriptor> &sourceDevice,
+                const sp<DeviceDescriptor> &sinkDevice,
+                AudioProfileVector &sourceProfiles,
+                AudioProfileVector &sinkProfiles) const;
+        status_t getBestMsdConfig(bool hwAvSync,
+                const AudioProfileVector &sourceProfiles,
+                const AudioProfileVector &sinkProfiles,
+                audio_port_config *sourceConfig,
+                audio_port_config *sinkConfig) const;
+        PatchBuilder buildMsdPatch(bool msdIsSource, const sp<DeviceDescriptor> &device) const;
+        status_t setMsdOutputPatches(const DeviceVector *outputDevices = nullptr);
+        void releaseMsdOutputPatches(const DeviceVector& devices);
 private:
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index d4f5088..14be671 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libaudiopolicyservice",
 
@@ -14,6 +23,7 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libaudioclient",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
@@ -35,10 +45,12 @@
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "media_permission-aidl-cpp",
     ],
 
     static_libs: [
         "libaudiopolicycomponents",
+        "media_permission-aidl-cpp",
     ],
 
     header_libs: [
@@ -56,6 +68,8 @@
     ],
 
     export_shared_lib_headers: [
+        "libactivitymanager_aidl",
         "libsensorprivacy",
+        "media_permission-aidl-cpp",
     ],
 }
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index b738633..8426a77 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -35,6 +35,8 @@
 
 namespace android {
 
+using media::permission::Identity;
+
 // ----------------------------------------------------------------------------
 // AudioPolicyEffects Implementation
 // ----------------------------------------------------------------------------
@@ -121,7 +123,9 @@
         Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
         for (size_t i = 0; i < effects.size(); i++) {
             EffectDesc *effect = effects[i];
-            sp<AudioEffect> fx = new AudioEffect(String16("android"));
+            Identity identity;
+            identity.packageName = "android";
+            sp<AudioEffect> fx = new AudioEffect(identity);
             fx->set(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
@@ -270,7 +274,9 @@
         Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
         for (size_t i = 0; i < effects.size(); i++) {
             EffectDesc *effect = effects[i];
-            sp<AudioEffect> fx = new AudioEffect(String16("android"));
+            Identity identity;
+            identity.packageName = "android";
+            sp<AudioEffect> fx = new AudioEffect(identity);
             fx->set(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
@@ -970,7 +976,9 @@
     for (const auto& deviceEffectsIter : mDeviceEffects) {
         const auto& deviceEffects =  deviceEffectsIter.second;
         for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
-            auto fx = std::make_unique<AudioEffect>(String16("android"));
+            Identity identity;
+            identity.packageName = "android";
+            sp<AudioEffect> fx = new AudioEffect(identity);
             fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
                     nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
                     AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
@@ -987,7 +995,7 @@
             ALOGV("%s(): create Fx %s added on port type=%d address=%s", __func__,
                   effectDesc->mName, deviceEffects->getDeviceType(),
                   deviceEffects->getDeviceAddress().c_str());
-            deviceEffects->mEffects.push_back(std::move(fx));
+            deviceEffects->mEffects.push_back(fx);
         }
     }
 }
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 81c728d..13d5d0c 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -207,7 +207,7 @@
             mDeviceType(device), mDeviceAddress(address) {}
         /*virtual*/ ~DeviceEffects() = default;
 
-        std::vector<std::unique_ptr<AudioEffect>> mEffects;
+        std::vector< sp<AudioEffect> > mEffects;
         audio_devices_t getDeviceType() const { return mDeviceType; }
         std::string getDeviceAddress() const { return mDeviceAddress; }
         const std::unique_ptr<EffectDescVector> mEffectDescriptors;
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 07122cc..551013f 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -25,6 +25,7 @@
 #include <media/MediaMetricsItem.h>
 #include <media/PolicyAidlConversion.h>
 #include <utils/Log.h>
+#include <android/media/permission/Identity.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -42,6 +43,7 @@
 namespace android {
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
+using media::permission::Identity;
 
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
     AUDIO_USAGE_CALL_ASSISTANT,
@@ -62,15 +64,15 @@
 }
 
 status_t AudioPolicyService::validateUsage(audio_usage_t usage) {
-     return validateUsage(usage, IPCThreadState::self()->getCallingPid(),
-        IPCThreadState::self()->getCallingUid());
+     return validateUsage(usage, getCallingIdentity());
 }
 
-status_t AudioPolicyService::validateUsage(audio_usage_t usage, pid_t pid, uid_t uid) {
+status_t AudioPolicyService::validateUsage(audio_usage_t usage, const Identity& identity) {
     if (isSystemUsage(usage)) {
         if (isSupportedSystemUsage(usage)) {
-            if (!modifyAudioRoutingAllowed(pid, uid)) {
-                ALOGE("permission denied: modify audio routing not allowed for uid %d", uid);
+            if (!modifyAudioRoutingAllowed(identity)) {
+                ALOGE(("permission denied: modify audio routing not allowed "
+                       "for identity %s"), identity.toString().c_str());
                 return PERMISSION_DENIED;
             }
         } else {
@@ -195,6 +197,7 @@
     mAudioPolicyManager->setPhoneState(state);
     mPhoneState = state;
     mPhoneStateOwnerUid = uid;
+    updateUidStates_l();
     return Status::ok();
 }
 
@@ -276,8 +279,7 @@
 
 Status AudioPolicyService::getOutputForAttr(const media::AudioAttributesInternal& attrAidl,
                                             int32_t sessionAidl,
-                                            int32_t pidAidl,
-                                            int32_t uidAidl,
+                                            const Identity& identity,
                                             const media::AudioConfig& configAidl,
                                             int32_t flagsAidl,
                                             int32_t selectedDeviceIdAidl,
@@ -288,8 +290,6 @@
     audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_session_t(sessionAidl));
     audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
-    pid_t pid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_pid_t(pidAidl));
-    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
     audio_config_t config = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioConfig_audio_config_t(configAidl));
     audio_output_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
@@ -307,22 +307,28 @@
 
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage, pid, uid)));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage, identity)));
 
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
 
+    // TODO b/182392553: refactor or remove
+    Identity adjIdentity = identity;
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (!isAudioServerOrMediaServerUid(callingUid) || uid == (uid_t)-1) {
-        ALOGW_IF(uid != (uid_t)-1 && uid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __func__, callingUid, uid);
-        uid = callingUid;
+    if (!isAudioServerOrMediaServerUid(callingUid) || identity.uid == -1) {
+        int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_uid_t_int32_t(callingUid));
+        ALOGW_IF(identity.uid != -1 && identity.uid != callingUidAidl,
+                "%s uid %d tried to pass itself off as %d", __func__,
+                callingUidAidl, identity.uid);
+        adjIdentity.uid = callingUidAidl;
     }
-    if (!mPackageManager.allowPlaybackCapture(uid)) {
+    if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
+        aidl2legacy_int32_t_uid_t(adjIdentity.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(pid, uid)) {
+            && !bypassInterruptionPolicyAllowed(identity)) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
@@ -330,7 +336,7 @@
     AudioPolicyInterface::output_type_t outputType;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
-                                                            uid,
+                                                            adjIdentity,
                                                             &config,
                                                             &flags, &selectedDeviceId, &portId,
                                                             &secondaryOutputs,
@@ -343,16 +349,16 @@
         case AudioPolicyInterface::API_OUTPUT_LEGACY:
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
-            if (!modifyPhoneStateAllowed(pid, uid)) {
+            if (!modifyPhoneStateAllowed(adjIdentity)) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
-                    __func__, uid);
+                    __func__, adjIdentity.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(pid, uid)) {
+            if (!modifyAudioRoutingAllowed(adjIdentity)) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
-                    __func__, uid);
+                    __func__, adjIdentity.uid);
                 result = PERMISSION_DENIED;
             }
             break;
@@ -365,8 +371,8 @@
 
     if (result == NO_ERROR) {
         sp<AudioPlaybackClient> client =
-                new AudioPlaybackClient(attr, output, uid, pid, session, portId, selectedDeviceId,
-                                        stream);
+                new AudioPlaybackClient(attr, output, adjIdentity, session,
+                    portId, selectedDeviceId, stream);
         mAudioPlaybackClients.add(portId, client);
 
         _aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
@@ -502,9 +508,7 @@
                                            int32_t inputAidl,
                                            int32_t riidAidl,
                                            int32_t sessionAidl,
-                                           int32_t pidAidl,
-                                           int32_t uidAidl,
-                                           const std::string& opPackageNameAidl,
+                                           const Identity& identity,
                                            const media::AudioConfigBase& configAidl,
                                            int32_t flagsAidl,
                                            int32_t selectedDeviceIdAidl,
@@ -517,10 +521,6 @@
             aidl2legacy_int32_t_audio_unique_id_t(riidAidl));
     audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_session_t(sessionAidl));
-    pid_t pid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_pid_t(pidAidl));
-    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
-    String16 opPackageName = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_string_view_String16(opPackageNameAidl));
     audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl));
     audio_input_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
@@ -536,7 +536,6 @@
 
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage, pid, uid)));
 
     audio_source_t inputSource = attr.source;
     if (inputSource == AUDIO_SOURCE_DEFAULT) {
@@ -552,34 +551,42 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    bool updatePid = (pid == -1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    // Make sure identity represents the current caller
+    Identity adjIdentity = identity;
+    // TODO b/182392553: refactor or remove
+    bool updatePid = (identity.pid == -1);
+    const uid_t callingUid =IPCThreadState::self()->getCallingUid();
+    const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(identity.uid));
     if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(uid != (uid_t)-1 && uid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, uid);
-        uid = callingUid;
+        ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
+                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
+                currentUid);
+        adjIdentity.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
         updatePid = true;
     }
 
     if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != (pid_t)-1 && pid != callingPid,
+        const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
+            IPCThreadState::self()->getCallingPid()));
+        ALOGW_IF(identity.pid != -1 && identity.pid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, adjIdentity.uid, callingPid, identity.pid);
+        adjIdentity.pid = callingPid;
     }
 
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage, adjIdentity)));
+
     // check calling permissions.
     // Capturing from FM_TUNER source is controlled by captureTunerAudioInputAllowed() and
     // captureAudioOutputAllowed() (deprecated) as this does not affect users privacy
     // as does capturing from an actual microphone.
-    if (!(recordingAllowed(opPackageName, pid, uid) || attr.source == AUDIO_SOURCE_FM_TUNER)) {
-        ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
-                __func__, uid, pid);
+    if (!(recordingAllowed(adjIdentity) || attr.source == AUDIO_SOURCE_FM_TUNER)) {
+        ALOGE("%s permission denied: recording not allowed for %s",
+                __func__, adjIdentity.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(pid, uid);
+    bool canCaptureOutput = captureAudioOutputAllowed(adjIdentity);
     if ((inputSource == AUDIO_SOURCE_VOICE_UPLINK ||
         inputSource == AUDIO_SOURCE_VOICE_DOWNLINK ||
         inputSource == AUDIO_SOURCE_VOICE_CALL ||
@@ -589,12 +596,12 @@
     }
 
     if (inputSource == AUDIO_SOURCE_FM_TUNER
-        && !captureTunerAudioInputAllowed(pid, uid)
+        && !captureTunerAudioInputAllowed(adjIdentity)
         && !canCaptureOutput) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(opPackageName, pid, uid);
+    bool canCaptureHotword = captureHotwordAllowed(adjIdentity);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -602,7 +609,7 @@
     if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
             && !canCaptureHotword) {
         ALOGE("%s: permission denied: hotword mode not allowed"
-              " for uid %d pid %d", __func__, uid, pid);
+              " for uid %d pid %d", __func__, adjIdentity.uid, adjIdentity.pid);
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -615,10 +622,11 @@
         {
             AutoCallerClear acc;
             // the audio_in_acoustics_t parameter is ignored by get_input()
-            status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session, uid,
-                                                          &config,
+            status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
+                                                          adjIdentity, &config,
                                                           flags, &selectedDeviceId,
                                                           &inputType, &portId);
+
         }
         audioPolicyEffects = mAudioPolicyEffects;
 
@@ -639,7 +647,7 @@
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!modifyAudioRoutingAllowed(pid, uid)) {
+                if (!modifyAudioRoutingAllowed(adjIdentity)) {
                     ALOGE("getInputForAttr() permission denied: modify audio routing not allowed");
                     status = PERMISSION_DENIED;
                 }
@@ -659,8 +667,8 @@
             return binderStatusFromStatusT(status);
         }
 
-        sp<AudioRecordClient> client = new AudioRecordClient(attr, input, uid, pid, session, portId,
-                                                             selectedDeviceId, opPackageName,
+        sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
+                                                             selectedDeviceId, adjIdentity,
                                                              canCaptureOutput, canCaptureHotword);
         mAudioRecordClients.add(portId, client);
     }
@@ -711,12 +719,15 @@
         client = mAudioRecordClients.valueAt(index);
     }
 
+    std::stringstream msg;
+    msg << "Audio recording on session " << client->session;
+
     // check calling permissions
-    if (!(startRecording(client->opPackageName, client->pid, client->uid,
-            client->attributes.source)
+    if (!(startRecording(client->identity, String16(msg.str().c_str()),
+        client->attributes.source)
             || client->attributes.source == AUDIO_SOURCE_FM_TUNER)) {
-        ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
-                __func__, client->uid, client->pid);
+        ALOGE("%s permission denied: recording not allowed for identity %s",
+                __func__, client->identity.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -760,11 +771,13 @@
             item->setCString(kAudioPolicyRqstSrc,
                              toString(client->attributes.source).c_str());
             item->setInt32(kAudioPolicyRqstSession, client->session);
-            if (client->opPackageName.size() != 0) {
+            if (client->identity.packageName.has_value() &&
+                client->identity.packageName.value().size() != 0) {
                 item->setCString(kAudioPolicyRqstPkg,
-                                 std::string(String8(client->opPackageName).string()).c_str());
+                    client->identity.packageName.value().c_str());
             } else {
-                item->setCString(kAudioPolicyRqstPkg, std::to_string(client->uid).c_str());
+                item->setCString(kAudioPolicyRqstPkg,
+                    std::to_string(client->identity.uid).c_str());
             }
             item->setCString(
                     kAudioPolicyRqstDevice, getDeviceTypeStrForPortId(client->deviceId).c_str());
@@ -780,12 +793,13 @@
                     item->setCString(kAudioPolicyActiveSrc,
                                      toString(other->attributes.source).c_str());
                     item->setInt32(kAudioPolicyActiveSession, other->session);
-                    if (other->opPackageName.size() != 0) {
+                    if (other->identity.packageName.has_value() &&
+                        other->identity.packageName.value().size() != 0) {
                         item->setCString(kAudioPolicyActivePkg,
-                             std::string(String8(other->opPackageName).string()).c_str());
+                            other->identity.packageName.value().c_str());
                     } else {
-                        item->setCString(kAudioPolicyRqstPkg,
-                                         std::to_string(other->uid).c_str());
+                        item->setCString(kAudioPolicyRqstPkg, std::to_string(
+                            other->identity.uid).c_str());
                     }
                     item->setCString(kAudioPolicyActiveDevice,
                                      getDeviceTypeStrForPortId(other->deviceId).c_str());
@@ -801,8 +815,7 @@
         client->active = false;
         client->startTimeNs = 0;
         updateUidStates_l();
-        finishRecording(client->opPackageName, client->uid,
-                        client->attributes.source);
+        finishRecording(client->identity, client->attributes.source);
     }
 
     return binderStatusFromStatusT(status);
@@ -831,8 +844,7 @@
     updateUidStates_l();
 
     // finish the recording app op
-    finishRecording(client->opPackageName, client->uid,
-                    client->attributes.source);
+    finishRecording(client->identity, client->attributes.source);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->stopInput(portId));
 }
@@ -1629,15 +1641,15 @@
     bool needCaptureMediaOutput = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
             return mix.mAllowPrivilegedMediaPlaybackCapture; });
 
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    const Identity identity = getCallingIdentity();
 
-    if (needCaptureMediaOutput && !captureMediaOutputAllowed(callingPid, callingUid)) {
+
+    if (needCaptureMediaOutput && !captureMediaOutputAllowed(identity)) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (needCaptureVoiceCommunicationOutput &&
-        !captureVoiceCommunicationOutputAllowed(callingPid, callingUid)) {
+        !captureVoiceCommunicationOutputAllowed(identity)) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1801,8 +1813,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getSurroundFormats(
-        bool reported, media::Int* count,
+Status AudioPolicyService::getSurroundFormats(media::Int* count,
         std::vector<media::audio::common::AudioFormat>* formats,
         std::vector<bool>* formatsEnabled) {
     unsigned int numSurroundFormats = VALUE_OR_RETURN_BINDER_STATUS(
@@ -1821,7 +1832,7 @@
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getSurroundFormats(&numSurroundFormats, surroundFormats.get(),
-                                                    surroundFormatsEnabled.get(), reported)));
+                                                    surroundFormatsEnabled.get())));
     numSurroundFormatsReq = std::min(numSurroundFormats, numSurroundFormatsReq);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(surroundFormats.get(), surroundFormats.get() + numSurroundFormatsReq,
@@ -1834,6 +1845,32 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::getReportedSurroundFormats(
+        media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) {
+    unsigned int numSurroundFormats = VALUE_OR_RETURN_BINDER_STATUS(
+            convertIntegral<unsigned int>(count->value));
+    if (numSurroundFormats > MAX_ITEMS_PER_LIST) {
+        numSurroundFormats = MAX_ITEMS_PER_LIST;
+    }
+    unsigned int numSurroundFormatsReq = numSurroundFormats;
+    std::unique_ptr<audio_format_t[]>surroundFormats(new audio_format_t[numSurroundFormats]);
+
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    AutoCallerClear acc;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getReportedSurroundFormats(
+                    &numSurroundFormats, surroundFormats.get())));
+    numSurroundFormatsReq = std::min(numSurroundFormats, numSurroundFormatsReq);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(surroundFormats.get(), surroundFormats.get() + numSurroundFormatsReq,
+                         std::back_inserter(*formats), legacy2aidl_audio_format_t_AudioFormat)));
+    count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(numSurroundFormats));
+    return Status::ok();
+}
+
 Status AudioPolicyService::getHwOffloadEncodingFormatsSupportedForA2DP(
         std::vector<media::audio::common::AudioFormat>* _aidl_return) {
     std::vector<audio_format_t> formats;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 90ad81e..b5eb98f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -537,35 +537,34 @@
 {
 //    Go over all active clients and allow capture (does not force silence) in the
 //    following cases:
-//    The client source is virtual (remote submix, call audio TX or RX...)
-//    OR The user the client is running in has microphone sensor privacy disabled
-//        AND The client is the assistant
-//                AND an accessibility service is on TOP or a RTT call is active
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR uses VOICE_RECOGNITION AND is on TOP
-//                        OR uses HOTWORD
-//                    AND there is no active privacy sensitive capture or call
-//                        OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR The client is an accessibility service
-//                AND Is on TOP
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR The assistant is not on TOP
-//                        AND there is no active privacy sensitive capture or call
-//                            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//                AND is on TOP
+//    The client is the assistant
+//        AND an accessibility service is on TOP or a RTT call is active
 //                AND the source is VOICE_RECOGNITION or HOTWORD
-//            OR the client source is HOTWORD
-//                AND is on TOP
-//                    OR all active clients are using HOTWORD source
-//                AND no call is active
-//                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR the client is the current InputMethodService
-//                AND a RTT call is active AND the source is VOICE_RECOGNITION
-//            OR Any client
-//                AND The assistant is not on TOP
-//                AND is on TOP or latest started
+//            OR uses VOICE_RECOGNITION AND is on TOP
+//                OR uses HOTWORD
+//            AND there is no active privacy sensitive capture or call
+//                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR The client is an accessibility service
+//        AND Is on TOP
+//                AND the source is VOICE_RECOGNITION or HOTWORD
+//            OR The assistant is not on TOP
 //                AND there is no active privacy sensitive capture or call
 //                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//        AND is on TOP
+//        AND the source is VOICE_RECOGNITION or HOTWORD
+//    OR the client source is virtual (remote submix, call audio TX or RX...)
+//    OR the client source is HOTWORD
+//        AND is on TOP
+//            OR all active clients are using HOTWORD source
+//        AND no call is active
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR the client is the current InputMethodService
+//        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR Any client
+//        AND The assistant is not on TOP
+//        AND is on TOP or latest started
+//        AND there is no active privacy sensitive capture or call
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 
 
     sp<AudioRecordClient> topActive;
@@ -595,23 +594,23 @@
 
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
-        if (!current->active || (!isVirtualSource(current->attributes.source)
-                && isUserSensorPrivacyEnabledForUid(current->uid))) {
+        uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(current->identity.uid));
+        if (!current->active) {
             continue;
         }
 
-        app_state_t appState = apmStatFromAmState(mUidPolicy->getUidState(current->uid));
+        app_state_t appState = apmStatFromAmState(mUidPolicy->getUidState(currentUid));
         // clients which app is in IDLE state are not eligible for top active or
         // latest active
         if (appState == APP_STATE_IDLE) {
             continue;
         }
 
-        bool isAccessibility = mUidPolicy->isA11yUid(current->uid);
+        bool isAccessibility = mUidPolicy->isA11yUid(currentUid);
         // Clients capturing for Accessibility services or virtual sources are not considered
         // for top or latest active to avoid masking regular clients started before
         if (!isAccessibility && !isVirtualSource(current->attributes.source)) {
-            bool isAssistant = mUidPolicy->isAssistantUid(current->uid);
+            bool isAssistant = mUidPolicy->isAssistantUid(currentUid);
             bool isPrivacySensitive =
                     (current->attributes.flags & AUDIO_FLAG_CAPTURE_PRIVATE) != 0;
 
@@ -639,9 +638,11 @@
                     // if audio mode is IN_COMMUNICATION, make sure the audio mode owner
                     // is marked latest sensitive active even if another app qualifies.
                     if (current->startTimeNs > latestSensitiveStartNs
-                            || (isInCommunication && current->uid == mPhoneStateOwnerUid)) {
+                            || (isInCommunication && currentUid == mPhoneStateOwnerUid)) {
                         if (!isInCommunication || latestSensitiveActiveOrComm == nullptr
-                                || latestSensitiveActiveOrComm->uid != mPhoneStateOwnerUid) {
+                                || VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+                                    latestSensitiveActiveOrComm->identity.uid))
+                                        != mPhoneStateOwnerUid) {
                             latestSensitiveActiveOrComm = current;
                             latestSensitiveStartNs = current->startTimeNs;
                         }
@@ -658,7 +659,7 @@
         if (current->attributes.source != AUDIO_SOURCE_HOTWORD) {
             onlyHotwordActive = false;
         }
-        if (current->uid == mPhoneStateOwnerUid) {
+        if (currentUid == mPhoneStateOwnerUid) {
             isPhoneStateOwnerActive = true;
         }
     }
@@ -674,7 +675,9 @@
     } else if (latestSensitiveActiveOrComm != nullptr) {
         // if audio mode is IN_COMMUNICATION, favor audio mode owner over an app with
         // foreground UI in case both are capturing with privacy sensitive flag.
-        if (isInCommunication && latestSensitiveActiveOrComm->uid == mPhoneStateOwnerUid) {
+        uid_t latestActiveUid = VALUE_OR_FATAL(
+            aidl2legacy_int32_t_uid_t(latestSensitiveActiveOrComm->identity.uid));
+        if (isInCommunication && latestActiveUid == mPhoneStateOwnerUid) {
             topSensitiveActive = latestSensitiveActiveOrComm;
             topSensitiveStartNs = latestSensitiveStartNs;
         }
@@ -692,20 +695,25 @@
 
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
+        uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+            current->identity.uid));
         if (!current->active) {
             continue;
         }
 
         audio_source_t source = current->attributes.source;
-        bool isTopOrLatestActive = topActive == nullptr ? false : current->uid == topActive->uid;
-        bool isTopOrLatestSensitive = topSensitiveActive == nullptr ?
-                                 false : current->uid == topSensitiveActive->uid;
+        bool isTopOrLatestActive = topActive == nullptr ? false :
+            current->identity.uid == topActive->identity.uid;
+        bool isTopOrLatestSensitive = topSensitiveActive == nullptr ? false :
+            current->identity.uid == topSensitiveActive->identity.uid;
 
         auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mLock) {
+            uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+                recordClient->identity.uid));
             bool canCaptureCall = recordClient->canCaptureOutput;
             bool canCaptureCommunication = recordClient->canCaptureOutput
                 || !isPhoneStateOwnerActive
-                || recordClient->uid == mPhoneStateOwnerUid;
+                || recordUid == mPhoneStateOwnerUid;
             return !(isInCall && !canCaptureCall)
                 && !(isInCommunication && !canCaptureCommunication);
         };
@@ -724,10 +732,7 @@
         if (isVirtualSource(source)) {
             // Allow capture for virtual (remote submix, call audio TX or RX...) sources
             allowCapture = true;
-        } else if (isUserSensorPrivacyEnabledForUid(current->uid)) {
-            // If sensor privacy is enabled, don't allow capture
-            allowCapture = false;
-        } else if (mUidPolicy->isAssistantUid(current->uid)) {
+        } else if (mUidPolicy->isAssistantUid(currentUid)) {
             // For assistant allow capture if:
             //     An accessibility service is on TOP or a RTT call is active
             //            AND the source is VOICE_RECOGNITION or HOTWORD
@@ -747,7 +752,7 @@
                     allowCapture = true;
                 }
             }
-        } else if (mUidPolicy->isA11yUid(current->uid)) {
+        } else if (mUidPolicy->isA11yUid(currentUid)) {
             // For accessibility service allow capture if:
             //     The assistant is not on TOP
             //         AND there is no active privacy sensitive capture or call
@@ -773,7 +778,7 @@
                     && canCaptureIfInCallOrCommunication(current)) {
                 allowCapture = true;
             }
-        } else if (mUidPolicy->isCurrentImeUid(current->uid)) {
+        } else if (mUidPolicy->isCurrentImeUid(currentUid)) {
             // For current InputMethodService allow capture if:
             //     A RTT call is active AND the source is VOICE_RECOGNITION
             if (rttCallActive && source == AUDIO_SOURCE_VOICE_RECOGNITION) {
@@ -781,7 +786,7 @@
             }
         }
         setAppState_l(current->portId,
-                      allowCapture ? apmStatFromAmState(mUidPolicy->getUidState(current->uid)) :
+                      allowCapture ? apmStatFromAmState(mUidPolicy->getUidState(currentUid)) :
                                 APP_STATE_IDLE);
     }
 }
@@ -925,6 +930,7 @@
         case TRANSACTION_registerPolicyMixes:
         case TRANSACTION_setMasterMono:
         case TRANSACTION_getSurroundFormats:
+        case TRANSACTION_getReportedSurroundFormats:
         case TRANSACTION_setSurroundFormatEnabled:
         case TRANSACTION_setAssistantUid:
         case TRANSACTION_setA11yServicesUids:
@@ -1134,16 +1140,6 @@
     return NO_INIT;
 }
 
-bool AudioPolicyService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mMicrophoneSensorPrivacyPolicies.find(userId) == mMicrophoneSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        userPolicy->registerSelfForMicrophoneOnly(userId);
-        mMicrophoneSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mMicrophoneSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 status_t AudioPolicyService::printHelp(int out) {
     return dprintf(out, "Audio policy service commands:\n"
         "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index a11b2cc..00d9670 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -38,6 +38,8 @@
 #include "CaptureStateNotifier.h"
 #include <AudioPolicyInterface.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
+#include <android/media/permission/Identity.h>
+
 #include <unordered_map>
 
 namespace android {
@@ -79,15 +81,16 @@
                                media::AudioPolicyForcedConfig* _aidl_return) override;
     binder::Status getOutput(media::AudioStreamType stream, int32_t* _aidl_return) override;
     binder::Status getOutputForAttr(const media::AudioAttributesInternal& attr, int32_t session,
-                                    int32_t pid, int32_t uid, const media::AudioConfig& config,
+                                    const media::permission::Identity &identity,
+                                    const media::AudioConfig& config,
                                     int32_t flags, int32_t selectedDeviceId,
                                     media::GetOutputForAttrResponse* _aidl_return) override;
     binder::Status startOutput(int32_t portId) override;
     binder::Status stopOutput(int32_t portId) override;
     binder::Status releaseOutput(int32_t portId) override;
     binder::Status getInputForAttr(const media::AudioAttributesInternal& attr, int32_t input,
-                                   int32_t riid, int32_t session, int32_t pid, int32_t uid,
-                                   const std::string& opPackageName,
+                                   int32_t riid, int32_t session,
+                                   const media::permission::Identity &identity,
                                    const media::AudioConfigBase& config, int32_t flags,
                                    int32_t selectedDeviceId,
                                    media::GetInputForAttrResponse* _aidl_return) override;
@@ -182,9 +185,11 @@
     binder::Status getMasterMono(bool* _aidl_return) override;
     binder::Status getStreamVolumeDB(media::AudioStreamType stream, int32_t index, int32_t device,
                                      float* _aidl_return) override;
-    binder::Status getSurroundFormats(bool reported, media::Int* count,
+    binder::Status getSurroundFormats(media::Int* count,
                                       std::vector<media::audio::common::AudioFormat>* formats,
                                       std::vector<bool>* formatsEnabled) override;
+    binder::Status getReportedSurroundFormats(
+            media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) override;
     binder::Status getHwOffloadEncodingFormatsSupportedForA2DP(
             std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
     binder::Status setSurroundFormatEnabled(media::audio::common::AudioFormat audioFormat,
@@ -339,9 +344,7 @@
 
     bool isSupportedSystemUsage(audio_usage_t usage);
     status_t validateUsage(audio_usage_t usage);
-    status_t validateUsage(audio_usage_t usage, pid_t pid, uid_t uid);
-
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
+    status_t validateUsage(audio_usage_t usage, const media::permission::Identity& identity);
 
     void updateUidStates();
     void updateUidStates_l() REQUIRES(mLock);
@@ -784,18 +787,18 @@
     class AudioClient : public virtual RefBase {
     public:
                 AudioClient(const audio_attributes_t attributes,
-                            const audio_io_handle_t io, uid_t uid, pid_t pid,
+                            const audio_io_handle_t io,
+                            const media::permission::Identity& identity,
                             const audio_session_t session,  audio_port_handle_t portId,
                             const audio_port_handle_t deviceId) :
-                                attributes(attributes), io(io), uid(uid), pid(pid),
+                                attributes(attributes), io(io), identity(identity),
                                 session(session), portId(portId), deviceId(deviceId), active(false) {}
                 ~AudioClient() override = default;
 
 
         const audio_attributes_t attributes; // source, flags ...
         const audio_io_handle_t io;          // audio HAL stream IO handle
-        const uid_t uid;                     // client UID
-        const pid_t pid;                     // client PID
+        const media::permission::Identity& identity; //client identity
         const audio_session_t session;       // audio session ID
         const audio_port_handle_t portId;
         const audio_port_handle_t deviceId;  // selected input device port ID
@@ -808,16 +811,17 @@
     class AudioRecordClient : public AudioClient {
     public:
                 AudioRecordClient(const audio_attributes_t attributes,
-                          const audio_io_handle_t io, uid_t uid, pid_t pid,
+                          const audio_io_handle_t io,
                           const audio_session_t session, audio_port_handle_t portId,
-                          const audio_port_handle_t deviceId, const String16& opPackageName,
+                          const audio_port_handle_t deviceId,
+                          const media::permission::Identity& identity,
                           bool canCaptureOutput, bool canCaptureHotword) :
-                    AudioClient(attributes, io, uid, pid, session, portId, deviceId),
-                    opPackageName(opPackageName), startTimeNs(0),
+                    AudioClient(attributes, io, identity,
+                        session, portId, deviceId), identity(identity), startTimeNs(0),
                     canCaptureOutput(canCaptureOutput), canCaptureHotword(canCaptureHotword) {}
                 ~AudioRecordClient() override = default;
 
-        const String16 opPackageName;        // client package name
+        const media::permission::Identity identity;        // identity of client
         nsecs_t startTimeNs;
         const bool canCaptureOutput;
         const bool canCaptureHotword;
@@ -829,10 +833,11 @@
     class AudioPlaybackClient : public AudioClient {
     public:
                 AudioPlaybackClient(const audio_attributes_t attributes,
-                      const audio_io_handle_t io, uid_t uid, pid_t pid,
+                      const audio_io_handle_t io, media::permission::Identity identity,
                             const audio_session_t session, audio_port_handle_t portId,
                             audio_port_handle_t deviceId, audio_stream_type_t stream) :
-                    AudioClient(attributes, io, uid, pid, session, portId, deviceId), stream(stream) {}
+                    AudioClient(attributes, io, identity, session, portId,
+                        deviceId), stream(stream) {}
                 ~AudioPlaybackClient() override = default;
 
         const audio_stream_type_t stream;
@@ -901,8 +906,6 @@
     void *mLibraryHandle = nullptr;
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
-
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mMicrophoneSensorPrivacyPolicies;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index d10fcb9..f480210 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -25,6 +25,7 @@
         "libmedia_helper",
         "libutils",
         "libxml2",
+        "media_permission-aidl-cpp",
     ],
 
     static_libs: [
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index ea95364..7f67940 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -29,9 +29,10 @@
     using AudioPolicyManager::getOutputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
-    using AudioPolicyManager::releaseMsdPatches;
-    using AudioPolicyManager::setMsdPatches;
     using AudioPolicyManager::setSurroundFormatEnabled;
+    using AudioPolicyManager::releaseMsdOutputPatches;
+    using AudioPolicyManager::setMsdOutputPatches;
+    using AudioPolicyManager::getAudioPatches;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
 };
 
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 684358f..8f12ecf 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -25,6 +25,7 @@
 #define LOG_TAG "APM_Test"
 #include <Serializer.h>
 #include <android-base/file.h>
+#include <android/media/permission/Identity.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
 #include <media/RecordingActivityTracker.h>
@@ -39,6 +40,7 @@
 
 using namespace android;
 using testing::UnorderedElementsAre;
+using media::permission::Identity;
 
 TEST(AudioPolicyManagerTestInit, EngineFailure) {
     AudioPolicyTestClient client;
@@ -214,8 +216,11 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
+    // TODO b/182392769: use identity util
+    Identity i = Identity();
+    i.uid = 0;
     ASSERT_EQ(OK, mManager->getOutputForAttr(
-                    &attr, output, AUDIO_SESSION_NONE, &stream, 0 /*uid*/, &config, &flags,
+                    &attr, output, AUDIO_SESSION_NONE, &stream, i, &config, &flags,
                     selectedDeviceId, portId, {}, &outputType));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
@@ -239,8 +244,11 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::input_type_t inputType;
+    // TODO b/182392769: use identity util
+    Identity i = Identity();
+    i.uid = 0;
     ASSERT_EQ(OK, mManager->getInputForAttr(
-            &attr, &input, riid, AUDIO_SESSION_NONE, 0 /*uid*/, &config, flags,
+            &attr, &input, riid, AUDIO_SESSION_NONE, i, &config, flags,
             selectedDeviceId, &inputType, portId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
@@ -370,6 +378,8 @@
 
     const size_t mExpectedAudioPatchCount;
     sp<DeviceDescriptor> mSpdifDevice;
+
+    sp<DeviceDescriptor> mHdmiInputDevice;
 };
 
 AudioPolicyManagerTestMsd::AudioPolicyManagerTestMsd()
@@ -396,8 +406,11 @@
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
     sp<AudioProfile> ac3OutputProfile = new AudioProfile(
             AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000);
+    sp<AudioProfile> iec958OutputProfile = new AudioProfile(
+            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_OUT_STEREO, 48000);
     mMsdOutputDevice->addAudioProfile(pcmOutputProfile);
     mMsdOutputDevice->addAudioProfile(ac3OutputProfile);
+    mMsdOutputDevice->addAudioProfile(iec958OutputProfile);
     mMsdInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_BUS);
     // Match output profile from AudioPolicyConfig::setDefault.
     sp<AudioProfile> pcmInputProfile = new AudioProfile(
@@ -435,6 +448,11 @@
             AUDIO_OUTPUT_FLAG_NON_BLOCKING);
     msdCompressedOutputProfile->addSupportedDevice(mMsdOutputDevice);
     msdModule->addOutputProfile(msdCompressedOutputProfile);
+    sp<OutputProfile> msdIec958OutputProfile = new OutputProfile("msd iec958 input");
+    msdIec958OutputProfile->addAudioProfile(iec958OutputProfile);
+    msdIec958OutputProfile->setFlags(AUDIO_OUTPUT_FLAG_DIRECT);
+    msdIec958OutputProfile->addSupportedDevice(mMsdOutputDevice);
+    msdModule->addOutputProfile(msdIec958OutputProfile);
 
     sp<InputProfile> msdInputProfile = new InputProfile("msd output");
     msdInputProfile->addAudioProfile(pcmInputProfile);
@@ -458,6 +476,19 @@
         mSpdifDevice->addAudioProfile(dtsOutputProfile);
         primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
     }
+
+    // Add HDMI input device with IEC60958 profile for HDMI in -> MSD patching.
+    mHdmiInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_HDMI);
+    sp<AudioProfile> iec958InputProfile = new AudioProfile(
+            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_IN_STEREO, 48000);
+    mHdmiInputDevice->addAudioProfile(iec958InputProfile);
+    config.addDevice(mHdmiInputDevice);
+    sp<InputProfile> hdmiInputProfile = new InputProfile("hdmi input");
+    hdmiInputProfile->addAudioProfile(iec958InputProfile);
+    hdmiInputProfile->setFlags(AUDIO_INPUT_FLAG_DIRECT);
+    hdmiInputProfile->addSupportedDevice(mHdmiInputDevice);
+    config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+            addInputProfile(hdmiInputProfile);
 }
 
 void AudioPolicyManagerTestMsd::TearDown() {
@@ -465,6 +496,7 @@
     mMsdInputDevice.clear();
     mDefaultOutputDevice.clear();
     mSpdifDevice.clear();
+    mHdmiInputDevice.clear();
     AudioPolicyManagerTest::TearDown();
 }
 
@@ -485,21 +517,21 @@
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdPatches) {
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdOutputPatches) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     DeviceVector devices = mManager->getAvailableOutputDevices();
     // Remove MSD output device to avoid patching to itself
     devices.remove(mMsdOutputDevice);
     ASSERT_EQ(mExpectedAudioPatchCount, devices.size());
-    mManager->setMsdPatches(&devices);
+    mManager->setMsdOutputPatches(&devices);
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
     // Dual patch: exercise creating one new audio patch and reusing another existing audio patch.
     DeviceVector singleDevice(devices[0]);
-    mManager->releaseMsdPatches(singleDevice);
+    mManager->releaseMsdOutputPatches(singleDevice);
     ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
-    mManager->setMsdPatches(&devices);
+    mManager->setMsdOutputPatches(&devices);
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
-    mManager->releaseMsdPatches(devices);
+    mManager->releaseMsdOutputPatches(devices);
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
 
@@ -580,6 +612,34 @@
     }
 }
 
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationFromHdmiInToMsd) {
+    audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+    uid_t uid = 42;
+    const PatchCountCheck patchCount = snapshotPatchCount();
+    ASSERT_FALSE(mManager->getAvailableInputDevices().isEmpty());
+    PatchBuilder patchBuilder;
+    patchBuilder.
+            addSource(mManager->getAvailableInputDevices().
+                    getDevice(AUDIO_DEVICE_IN_HDMI, String8(""), AUDIO_FORMAT_DEFAULT)).
+            addSink(mManager->getAvailableOutputDevices().
+                    getDevice(AUDIO_DEVICE_OUT_BUS, String8(""), AUDIO_FORMAT_DEFAULT));
+    ASSERT_EQ(NO_ERROR, mManager->createAudioPatch(patchBuilder.patch(), &handle, uid));
+    ASSERT_NE(AUDIO_PATCH_HANDLE_NONE, handle);
+    AudioPatchCollection patches = mManager->getAudioPatches();
+    sp<AudioPatch> patch = patches.valueFor(handle);
+    ASSERT_EQ(1, patch->mPatch.num_sources);
+    ASSERT_EQ(1, patch->mPatch.num_sinks);
+    ASSERT_EQ(AUDIO_PORT_ROLE_SOURCE, patch->mPatch.sources[0].role);
+    ASSERT_EQ(AUDIO_PORT_ROLE_SINK, patch->mPatch.sinks[0].role);
+    ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sources[0].format);
+    ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sinks[0].format);
+    ASSERT_EQ(AUDIO_CHANNEL_IN_STEREO, patch->mPatch.sources[0].channel_mask);
+    ASSERT_EQ(AUDIO_CHANNEL_OUT_STEREO, patch->mPatch.sinks[0].channel_mask);
+    ASSERT_EQ(48000, patch->mPatch.sources[0].sample_rate);
+    ASSERT_EQ(48000, patch->mPatch.sinks[0].sample_rate);
+    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+}
+
 class AudioPolicyManagerTestWithConfigurationFile : public AudioPolicyManagerTest {
 protected:
     void SetUpManagerConfig() override;
@@ -745,7 +805,8 @@
 protected:
     void SetUp() override;
     std::string getConfigFile() override { return sTvConfig; }
-    std::map<audio_format_t, bool> getSurroundFormatsHelper(bool reported);
+    std::map<audio_format_t, bool> getSurroundFormatsHelper();
+    std::vector<audio_format_t> getReportedSurroundFormatsHelper();
     std::unordered_set<audio_format_t> getFormatsFromPorts();
     AudioPolicyManagerTestClient* getClient() override {
         return new AudioPolicyManagerTestClientForHdmi;
@@ -776,12 +837,12 @@
 }
 
 std::map<audio_format_t, bool>
-        AudioPolicyManagerTestForHdmi::getSurroundFormatsHelper(bool reported) {
+        AudioPolicyManagerTestForHdmi::getSurroundFormatsHelper() {
     unsigned int numSurroundFormats = 0;
     std::map<audio_format_t, bool> surroundFormatsMap;
     status_t ret = mManager->getSurroundFormats(
             &numSurroundFormats, nullptr /* surroundFormats */,
-            nullptr /* surroundFormatsEnabled */, reported);
+            nullptr /* surroundFormatsEnabled */);
     EXPECT_EQ(NO_ERROR, ret);
     if (ret != NO_ERROR) {
         return surroundFormatsMap;
@@ -791,7 +852,7 @@
     bool surroundFormatsEnabled[numSurroundFormats];
     memset(surroundFormatsEnabled, 0, sizeof(bool) * numSurroundFormats);
     ret = mManager->getSurroundFormats(
-            &numSurroundFormats, surroundFormats, surroundFormatsEnabled, reported);
+            &numSurroundFormats, surroundFormats, surroundFormatsEnabled);
     EXPECT_EQ(NO_ERROR, ret);
     if (ret != NO_ERROR) {
         return surroundFormatsMap;
@@ -802,6 +863,28 @@
     return surroundFormatsMap;
 }
 
+std::vector<audio_format_t> AudioPolicyManagerTestForHdmi::getReportedSurroundFormatsHelper() {
+    unsigned int numSurroundFormats = 0;
+    std::vector<audio_format_t>  surroundFormatsVector;
+    status_t ret = mManager->getReportedSurroundFormats(
+            &numSurroundFormats, nullptr /* surroundFormats */);
+    EXPECT_EQ(NO_ERROR, ret);
+    if (ret != NO_ERROR) {
+        return surroundFormatsVector;
+    }
+    audio_format_t surroundFormats[numSurroundFormats];
+    memset(surroundFormats, 0, sizeof(audio_format_t) * numSurroundFormats);
+    ret = mManager->getReportedSurroundFormats(&numSurroundFormats, surroundFormats);
+    EXPECT_EQ(NO_ERROR, ret);
+    if (ret != NO_ERROR) {
+        return surroundFormatsVector;
+    }
+    for (const auto &surroundFormat : surroundFormats) {
+        surroundFormatsVector.push_back(surroundFormat);
+    }
+    return surroundFormatsVector;
+}
+
 std::unordered_set<audio_format_t>
         AudioPolicyManagerTestForHdmi::getFormatsFromPorts() {
     uint32_t numPorts = 0;
@@ -832,7 +915,7 @@
 TEST_F(AudioPolicyManagerTestForHdmi, GetSurroundFormatsReturnsSupportedFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    auto surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+    auto surroundFormats = getSurroundFormatsHelper();
     ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
 }
 
@@ -844,19 +927,19 @@
     status_t ret =
             mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
-    auto surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+    auto surroundFormats = getSurroundFormatsHelper();
     ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
     ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
 
     ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
-    surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+    surroundFormats = getSurroundFormatsHelper();
     ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
     ASSERT_TRUE(surroundFormats[AUDIO_FORMAT_E_AC3]);
 
     ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
-    surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+    surroundFormats = getSurroundFormatsHelper();
     ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
     ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
 }
@@ -879,8 +962,8 @@
         GetReportedSurroundFormatsReturnsHdmiReportedFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    auto surroundFormats = getSurroundFormatsHelper(true /*reported*/);
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    auto surroundFormats = getReportedSurroundFormatsHelper();
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
 }
 
 TEST_F(AudioPolicyManagerTestForHdmi,
@@ -890,13 +973,13 @@
 
     status_t ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
-    auto surroundFormats = getSurroundFormatsHelper(true /*reported*/);
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    auto surroundFormats = getReportedSurroundFormatsHelper();
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
 
     ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
-    surroundFormats = getSurroundFormatsHelper(true /*reported*/);
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    surroundFormats = getReportedSurroundFormatsHelper();
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
 }
 
 class AudioPolicyManagerTestDPNoRemoteSubmixModule : public AudioPolicyManagerTestDynamicPolicy {
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index eb63636..ff4d568 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -4,8 +4,6 @@
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-MIT
-    //   SPDX-license-identifier-Unicode-DFS
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index e9f95cb..32c0267 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -113,6 +113,7 @@
         "libutilscallstack",
         "libutils",
         "libbinder",
+        "libactivitymanager_aidl",
         "libcutils",
         "libmedia",
         "libmediautils",
@@ -143,19 +144,25 @@
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.5",
-        "android.hardware.camera.device@3.6"
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+        "media_permission-aidl-cpp",
     ],
 
     static_libs: [
+        "libprocessinfoservice_aidl",
         "libbinderthreadstateutils",
+        "media_permission-aidl-cpp",
     ],
 
     export_shared_lib_headers: [
         "libbinder",
+        "libactivitymanager_aidl",
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index b5e2267..6efb90b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -42,7 +42,6 @@
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <binder/PermissionController.h>
-#include <binder/ProcessInfoService.h>
 #include <binder/IResultReceiver.h>
 #include <binderthreadstate/CallerUtils.h>
 #include <cutils/atomic.h>
@@ -58,6 +57,7 @@
 #include <media/IMediaHTTPService.h>
 #include <media/mediaplayer.h>
 #include <mediautils/BatteryNotifier.h>
+#include <processinfo/ProcessInfoService.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
@@ -129,10 +129,9 @@
 static const String16 sCameraOpenCloseListenerPermission(
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
 
-// Matches with PERCEPTIBLE_APP_ADJ in ProcessList.java
-static constexpr int32_t kVendorClientScore = 200;
-// Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
-static constexpr int32_t kVendorClientState = 1;
+static constexpr int32_t kVendorClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
+static constexpr int32_t kVendorClientState = ActivityManager::PROCESS_STATE_PERSISTENT_UI;
+
 const String8 CameraService::kOfflineDevice("offline-");
 
 CameraService::CameraService() :
@@ -226,10 +225,16 @@
     return OK;
 }
 
-void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status) {
+void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status,
+        SystemCameraKind systemCameraKind) {
     Mutex::Autolock lock(mStatusListenerLock);
-
     for (auto& i : mListenerList) {
+        if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
+                i->getListenerUid())) {
+            ALOGV("Skipping torch callback for system-only camera device %s",
+                    cameraId.c_str());
+            continue;
+        }
         i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
     }
 }
@@ -315,7 +320,7 @@
         Mutex::Autolock al(mTorchStatusMutex);
         mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
 
-        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF);
+        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF, deviceKind);
     }
 
     updateCameraNumAndIds();
@@ -476,12 +481,19 @@
 
 void CameraService::onTorchStatusChanged(const String8& cameraId,
         TorchModeStatus newStatus) {
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    status_t res = getSystemCameraKind(cameraId, &systemCameraKind);
+    if (res != OK) {
+        ALOGE("%s: Could not get system camera kind for camera id %s", __FUNCTION__,
+                cameraId.string());
+        return;
+    }
     Mutex::Autolock al(mTorchStatusMutex);
-    onTorchStatusChangedLocked(cameraId, newStatus);
+    onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
 void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
-        TorchModeStatus newStatus) {
+        TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
             __FUNCTION__, cameraId.string(), newStatus);
 
@@ -530,8 +542,7 @@
             }
         }
     }
-
-    broadcastTorchModeStatus(cameraId, newStatus);
+    broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
 static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
@@ -741,7 +752,7 @@
     return Status::ok();
 }
 
-int CameraService::getDeviceVersion(const String8& cameraId, int* facing) {
+int CameraService::getDeviceVersion(const String8& cameraId, int* facing, int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -758,6 +769,9 @@
         res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
         if (res != OK) return -1;
         *facing = info.facing;
+        if (orientation) {
+            *orientation = info.orientation;
+        }
     }
 
     return deviceVersion;
@@ -783,8 +797,8 @@
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName,
         const std::optional<String16>& featureId,  const String8& cameraId,
-        int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
-        int deviceVersion, apiLevel effectiveApiLevel,
+        int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
+        int servicePid, int deviceVersion, apiLevel effectiveApiLevel,
         /*out*/sp<BasicClient>* client) {
 
     // Create CameraClient based on device version reported by the HAL.
@@ -802,17 +816,18 @@
         case CAMERA_DEVICE_API_VERSION_3_4:
         case CAMERA_DEVICE_API_VERSION_3_5:
         case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_7:
             if (effectiveApiLevel == API_1) { // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
                         cameraId, api1CameraId,
-                        facing, clientPid, clientUid,
+                        facing, sensorOrientation, clientPid, clientUid,
                         servicePid);
             } else { // Camera2 API route
                 sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                         static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
                 *client = new CameraDeviceClient(cameraService, tmp, packageName, featureId,
-                        cameraId, facing, clientPid, clientUid, servicePid);
+                        cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid);
             }
             break;
         default:
@@ -1552,6 +1567,7 @@
 
     sp<CLIENT> client = nullptr;
     int facing = -1;
+    int orientation = 0;
     bool isNdk = (clientPackageName.size() == 0);
     {
         // Acquire mServiceLock and prevent other clients from connecting
@@ -1617,7 +1633,7 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
-        int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing);
+        int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1626,7 +1642,7 @@
 
         sp<BasicClient> tmp = nullptr;
         if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, facing,
+                cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersion, effectiveApiLevel,
                 /*out*/&tmp)).isOk()) {
@@ -1685,6 +1701,9 @@
         // Set rotate-and-crop override behavior
         if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+        } else if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(clientPackageName,
+                    orientation, facing)) {
+            client->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
         }
 
         // Set camera muting behavior
@@ -1803,6 +1822,10 @@
     String8 id = String8(cameraId.string());
     int uid = CameraThreadState::getCallingUid();
 
+    if (shouldRejectSystemCameraConnection(id)) {
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
+                " for system only device %s: ", id.string());
+    }
     // verify id is valid.
     auto state = getCameraState(id);
     if (state == nullptr) {
@@ -2005,7 +2028,50 @@
     return Status::ok();
 }
 
- Status CameraService::getConcurrentCameraIds(
+Status CameraService::notifyDisplayConfigurationChange() {
+    ATRACE_CALL();
+    const int callingPid = CameraThreadState::getCallingPid();
+    const int selfPid = getpid();
+
+    // Permission checks
+    if (callingPid != selfPid) {
+        // Ensure we're being called by system_server, or similar process with
+        // permissions to notify the camera service about system events
+        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+            const int uid = CameraThreadState::getCallingUid();
+            ALOGE("Permission Denial: cannot send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+                    "No permission to send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+        }
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+
+    // Don't do anything if rotate-and-crop override via cmd is active
+    if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return Status::ok();
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
+                            basicClient->getPackageName(), basicClient->getCameraOrientation(),
+                            basicClient->getCameraFacing())) {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
+                } else {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE);
+                }
+            }
+        }
+    }
+
+    return Status::ok();
+}
+
+Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
     if (!concurrentCameraIds) {
@@ -2159,6 +2225,11 @@
                     return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
                             clientUid);}), cameraStatuses->end());
 
+    //cameraStatuses will have non-eligible camera ids removed.
+    std::set<String16> idsChosenForCallback;
+    for (const auto &s : *cameraStatuses) {
+        idsChosenForCallback.insert(String16(s.cameraId));
+    }
 
     /*
      * Immediately signal current torch status to this listener only
@@ -2168,7 +2239,11 @@
         Mutex::Autolock al(mTorchStatusMutex);
         for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
             String16 id = String16(mTorchStatusMap.keyAt(i).string());
-            listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+            // The camera id is visible to the client. Fine to send torch
+            // callback.
+            if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
+                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+            }
         }
     }
 
@@ -2269,6 +2344,7 @@
         case CAMERA_DEVICE_API_VERSION_3_4:
         case CAMERA_DEVICE_API_VERSION_3_5:
         case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_7:
             ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
                     __FUNCTION__, id.string());
             *isSupported = true;
@@ -2655,13 +2731,13 @@
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr,
-        int api1CameraId, int cameraFacing,
+        int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, clientFeatureId,
-                cameraIdStr, cameraFacing,
+                cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
                 servicePid),
         mCameraId(api1CameraId)
@@ -2691,10 +2767,10 @@
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
         const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-        const String8& cameraIdStr, int cameraFacing,
+        const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid):
-        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing),
+        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mClientFeatureId(clientFeatureId),
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
@@ -2791,6 +2867,13 @@
     return mClientPackageName;
 }
 
+int CameraService::BasicClient::getCameraFacing() const {
+    return mCameraFacing;
+}
+
+int CameraService::BasicClient::getCameraOrientation() const {
+    return mOrientation;
+}
 
 int CameraService::BasicClient::getClientPid() const {
     return mClientPid;
@@ -3748,7 +3831,7 @@
                             TorchModeStatus::AVAILABLE_OFF :
                             TorchModeStatus::NOT_AVAILABLE;
                     if (torchStatus != newTorchStatus) {
-                        onTorchStatusChangedLocked(cameraId, newTorchStatus);
+                        onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
                     }
                 }
             }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 4cdee2c..6317c7a 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -167,6 +167,8 @@
 
     virtual binder::Status    notifyDeviceStateChange(int64_t newState);
 
+    virtual binder::Status    notifyDisplayConfigurationChange();
+
     // OK = supports api of that version, -EOPNOTSUPP = does not support
     virtual binder::Status    supportsCameraApi(
             const String16& cameraId, int32_t apiVersion,
@@ -213,7 +215,8 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    int                 getDeviceVersion(const String8& cameraId, int* facing = NULL);
+    int                 getDeviceVersion(const String8& cameraId, int* facing = nullptr,
+            int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
     // Shared utilities
@@ -245,6 +248,12 @@
         // Return the package name for this client
         virtual String16 getPackageName() const;
 
+        // Return the camera facing for this client
+        virtual int getCameraFacing() const;
+
+        // Return the camera orientation for this client
+        virtual int getCameraOrientation() const;
+
         // Notify client about a fatal error
         virtual void notifyError(int32_t errorCode,
                 const CaptureResultExtras& resultExtras) = 0;
@@ -291,6 +300,7 @@
                 const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -307,6 +317,7 @@
         static sp<CameraService>        sCameraService;
         const String8                   mCameraIdStr;
         const int                       mCameraFacing;
+        const int                       mOrientation;
         String16                        mClientPackageName;
         std::optional<String16>         mClientFeatureId;
         pid_t                           mClientPid;
@@ -384,6 +395,7 @@
                 const String8& cameraIdStr,
                 int api1CameraId,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -992,7 +1004,8 @@
     // handle torch mode status change and invoke callbacks. mTorchStatusMutex
     // should be locked.
     void onTorchStatusChangedLocked(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus newStatus);
+            hardware::camera::common::V1_0::TorchModeStatus newStatus,
+            SystemCameraKind systemCameraKind);
 
     // get a camera's torch status. mTorchStatusMutex should be locked.
     status_t getTorchStatusLocked(const String8 &cameraId,
@@ -1070,7 +1083,7 @@
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
             const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
             int deviceVersion, apiLevel effectiveApiLevel,
             /*out*/sp<BasicClient>* client);
 
@@ -1083,7 +1096,8 @@
 
 
     void broadcastTorchModeStatus(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus status);
+            hardware::camera::common::V1_0::TorchModeStatus status,
+            SystemCameraKind systemCameraKind);
 
     void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
 
@@ -1098,7 +1112,7 @@
     // Aggreated audio restriction mode for all camera clients
     int32_t mAudioRestriction;
 
-    // Current override rotate-and-crop mode
+    // Current override cmd rotate-and-crop mode; AUTO means no override
     uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
 
     // Current image dump mask
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 31cfed6..72b3c40 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -55,11 +55,12 @@
         const String8& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                cameraDeviceId, api1CameraId, cameraFacing,
+                cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation,
                 clientPid, clientUid, servicePid),
         mParameters(api1CameraId, cameraFacing)
 {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 4d667e3..d16b242 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -101,6 +101,7 @@
             const String8& cameraDeviceId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 4c3ded6..ee764ec 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -158,7 +158,7 @@
         res = device->createStream(mCallbackWindow,
                 params.previewWidth, params.previewHeight, callbackFormat,
                 HAL_DATASPACE_V0_JFIF, CAMERA_STREAM_ROTATION_0, &mCallbackStreamId,
-                String8());
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index ff2e398..eed2654 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -151,7 +151,7 @@
                 params.pictureWidth, params.pictureHeight,
                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
                 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
-                String8());
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for capture: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index e062c14..8164df0 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -29,7 +29,6 @@
 
 #include "Parameters.h"
 #include "system/camera.h"
-#include "hardware/camera_common.h"
 #include <android/hardware/ICamera.h>
 #include <media/MediaProfiles.h>
 #include <media/mediarecorder.h>
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 3a709c9..02ac638 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -56,7 +56,7 @@
     int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION
 
     int pictureWidth, pictureHeight;
-    // Store the picture size before they are overriden by video snapshot
+    // Store the picture size before they are overridden by video snapshot
     int pictureWidthLastSet, pictureHeightLastSet;
     bool pictureSizeOverriden;
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 8b1eb28..2d3597c 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -198,7 +198,8 @@
         res = device->createStream(mPreviewWindow,
                 params.previewWidth, params.previewHeight,
                 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_UNKNOWN,
-                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8());
+                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8(),
+                std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
                     __FUNCTION__, mId, strerror(-res), res);
@@ -384,7 +385,7 @@
                 params.videoWidth, params.videoHeight,
                 params.videoFormat, params.videoDataSpace,
                 CAMERA_STREAM_ROTATION_0, &mRecordingStreamId,
-                String8());
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 0701b6f..8e598f1 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -237,7 +237,7 @@
     if (mInputStreamId == NO_STREAM) {
         res = device->createInputStream(params.fastInfo.maxZslSize.width,
             params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
-            &mInputStreamId);
+            /*isMultiResolution*/false, &mInputStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create input stream: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -261,7 +261,7 @@
         res = device->createStream(outSurface, params.fastInfo.maxZslSize.width,
             params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
             HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
-            String8());
+            String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create ZSL stream: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d47014e..343f4a7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,6 +61,7 @@
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
@@ -70,6 +71,7 @@
             clientFeatureId,
             cameraId,
             cameraFacing,
+            sensorOrientation,
             clientPid,
             clientUid,
             servicePid),
@@ -86,12 +88,13 @@
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
                 cameraId, /*API1 camera ID*/ -1,
-                cameraFacing, clientPid, clientUid, servicePid),
+                cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0) {
@@ -125,8 +128,8 @@
                                       /*listener*/this,
                                       /*sendPartials*/true);
 
-    auto deviceInfo = mDevice->info();
-    camera_metadata_entry_t physicalKeysEntry = deviceInfo.find(
+    const CameraMetadata &deviceInfo = mDevice->info();
+    camera_metadata_ro_entry_t physicalKeysEntry = deviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS);
     if (physicalKeysEntry.count > 0) {
         mSupportedPhysicalRequestKeys.insert(mSupportedPhysicalRequestKeys.begin(),
@@ -135,6 +138,17 @@
     }
 
     mProviderManager = providerPtr;
+    // Cache physical camera ids corresponding to this device and also the high
+    // resolution sensors in this device + physical camera ids
+    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
+    if (isUltraHighResolutionSensor(mCameraIdStr)) {
+        mHighResolutionSensors.insert(mCameraIdStr.string());
+    }
+    for (auto &physicalId : mPhysicalCameraIds) {
+        if (isUltraHighResolutionSensor(String8(physicalId.c_str()))) {
+            mHighResolutionSensors.insert(physicalId.c_str());
+        }
+    }
     return OK;
 }
 
@@ -186,6 +200,17 @@
     return binder::Status::ok();
 }
 
+static std::list<int> getIntersection(const std::unordered_set<int> &streamIdsForThisCamera,
+        const Vector<int> &streamIdsForThisRequest) {
+    std::list<int> intersection;
+    for (auto &streamId : streamIdsForThisRequest) {
+        if (streamIdsForThisCamera.find(streamId) != streamIdsForThisCamera.end()) {
+            intersection.emplace_back(streamId);
+        }
+    }
+    return intersection;
+}
+
 binder::Status CameraDeviceClient::submitRequestList(
         const std::vector<hardware::camera2::CaptureRequest>& requests,
         bool streaming,
@@ -332,6 +357,24 @@
                         "Request settings are empty");
             }
 
+            // Check whether the physical / logical stream has settings
+            // consistent with the sensor pixel mode(s) it was configured with.
+            // mCameraIdToStreamSet will only have ids that are high resolution
+            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
+            if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
+                std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
+                        outputStreamIds);
+                if (!request.mIsReprocess &&
+                        !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
+                     ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
+                            "consistent with configured streams. Rejecting request.",
+                            __FUNCTION__, it.id.c_str());
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                        "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
+                        "streams configured");
+                }
+            }
+
             String8 physicalId(it.id.c_str());
             if (physicalId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
@@ -494,7 +537,7 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+    res = camera3::SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
             mCameraIdStr);
     if (!res.isOk()) {
         return res;
@@ -560,8 +603,8 @@
 
 binder::Status CameraDeviceClient::isSessionConfigurationSupported(
         const SessionConfiguration& sessionConfiguration, bool *status /*out*/) {
-    ATRACE_CALL();
 
+    ATRACE_CALL();
     binder::Status res;
     status_t ret = OK;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -573,7 +616,7 @@
     }
 
     auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+    res = camera3::SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
             mCameraIdStr);
     if (!res.isOk()) {
         return res;
@@ -584,12 +627,12 @@
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
-    hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
+    hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
     metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
     std::vector<std::string> physicalCameraIds;
     mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
+    res = camera3::SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
             mCameraIdStr, mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration,
             &earlyExit);
     if (!res.isOk()) {
@@ -714,6 +757,13 @@
                 }
                 mCompositeStreamMap.removeItemsAt(compositeIndex);
             }
+            for (auto &mapIt: mHighResolutionCameraIdToStreamIdSet) {
+                auto &streamSet = mapIt.second;
+                if (streamSet.find(streamId) != streamSet.end()) {
+                    streamSet.erase(streamId);
+                    break;
+                }
+            }
         }
     }
 
@@ -738,8 +788,9 @@
     bool isShared = outputConfiguration.isShared();
     String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
+    bool isMultiResolution = outputConfiguration.isMultiResolution();
 
-    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
+    res = camera3::SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
     if (!res.isOk()) {
         return res;
@@ -748,10 +799,8 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
-    std::vector<std::string> physicalCameraIds;
-    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = SessionConfigurationUtils::checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
-            mCameraIdStr);
+    res = camera3::SessionConfigurationUtils::checkPhysicalCameraId(mPhysicalCameraIds,
+            physicalCameraId, mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -767,6 +816,8 @@
 
     OutputStreamInfo streamInfo;
     bool isStreamInfoValid = false;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         sp<IBinder> binder = IInterface::asBinder(bufferProducer);
@@ -779,8 +830,9 @@
         }
 
         sp<Surface> surface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo, isStreamInfoValid,
-                surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+        res = camera3::SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
+                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
 
         if (!res.isOk())
             return res;
@@ -792,10 +844,10 @@
         binders.push_back(IInterface::asBinder(bufferProducer));
         surfaces.push_back(surface);
     }
-
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<int> surfaceIds;
-    bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
     bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
     if (isDepthCompositeStream || isHeicCompisiteStream) {
         sp<CompositeStream> compositeStream;
@@ -808,8 +860,8 @@
         err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
-                isShared);
+                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
         if (err == OK) {
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
                     compositeStream);
@@ -818,8 +870,8 @@
         err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format, streamInfo.dataSpace,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
-                isShared);
+                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
     }
 
     if (err != OK) {
@@ -847,6 +899,16 @@
         // Set transform flags to ensure preview to be rotated correctly.
         res = setStreamTransformLocked(streamId);
 
+        // Fill in mHighResolutionCameraIdToStreamIdSet map
+        const String8 &cameraIdUsed =
+                physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
+        const char *cameraIdUsedCStr = cameraIdUsed.string();
+        // Only needed for high resolution sensors
+        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+                mHighResolutionSensors.end()) {
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsedCStr].insert(streamId);
+        }
+
         *newStreamId = streamId;
     }
 
@@ -883,12 +945,27 @@
     std::vector<sp<Surface>> noSurface;
     std::vector<int> surfaceIds;
     String8 physicalCameraId(outputConfiguration.getPhysicalCameraId());
+    const String8 &cameraIdUsed =
+            physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
+    // Here, we override sensor pixel modes
+    std::unordered_set<int32_t> overriddenSensorPixelModesUsed;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
+    if (camera3::SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+            sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
+            /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "sensor pixel modes used not valid for deferred stream");
+    }
+
     err = mDevice->createStream(noSurface, /*hasDeferredConsumer*/true, width,
             height, format, dataSpace,
             static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-            &streamId, physicalCameraId, &surfaceIds,
+            &streamId, physicalCameraId,
+            overriddenSensorPixelModesUsed,
+            &surfaceIds,
             outputConfiguration.getSurfaceSetID(), isShared,
-            consumerUsage);
+            outputConfiguration.isMultiResolution(), consumerUsage);
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -899,9 +976,9 @@
         // a separate list to track. Once the deferred surface is set, this id will be
         // relocated to mStreamMap.
         mDeferredStreams.push_back(streamId);
-
         mStreamInfoMap.emplace(std::piecewise_construct, std::forward_as_tuple(streamId),
-                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage));
+                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
+                        overriddenSensorPixelModesUsed));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
@@ -911,6 +988,13 @@
         res = setStreamTransformLocked(streamId);
 
         *newStreamId = streamId;
+        // Fill in mHighResolutionCameraIdToStreamIdSet
+        const char *cameraIdUsedCStr = cameraIdUsed.string();
+        // Only needed for high resolution sensors
+        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+                mHighResolutionSensors.end()) {
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsed.string()].insert(streamId);
+        }
     }
     return res;
 }
@@ -943,12 +1027,13 @@
 }
 
 binder::Status CameraDeviceClient::createInputStream(
-        int width, int height, int format,
+        int width, int height, int format, bool isMultiResolution,
         /*out*/
         int32_t* newStreamId) {
 
     ATRACE_CALL();
-    ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format);
+    ALOGV("%s (w = %d, h = %d, f = 0x%x, isMultiResolution %d)", __FUNCTION__,
+            width, height, format, isMultiResolution);
 
     binder::Status res;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -967,7 +1052,7 @@
     }
 
     int streamId = -1;
-    status_t err = mDevice->createInputStream(width, height, format, &streamId);
+    status_t err = mDevice->createInputStream(width, height, format, isMultiResolution, &streamId);
     if (err == OK) {
         mInputStream.configured = true;
         mInputStream.width = width;
@@ -1079,13 +1164,15 @@
             newOutputsMap.removeItemsAt(idx);
         }
     }
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
         OutputStreamInfo outInfo;
         sp<Surface> surface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false,
-                surface, newOutputsMap.valueAt(i), mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId));
+        res = camera3::SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
+                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
         if (!res.isOk())
             return res;
 
@@ -1440,6 +1527,8 @@
     }
 
     std::vector<sp<Surface>> consumerSurfaces;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1450,9 +1539,9 @@
         }
 
         sp<Surface> surface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
+        res = camera3::SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalId));
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed);
 
         if (!res.isOk())
             return res;
@@ -1625,7 +1714,8 @@
     if (offlineSession.get() != nullptr) {
         offlineClient = new CameraOfflineSessionClient(sCameraService,
                 offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
-                mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+                mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
+                mServicePid);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
@@ -1934,4 +2024,54 @@
 
     return ret;
 }
+
+const CameraMetadata &CameraDeviceClient::getStaticInfo(const String8 &cameraId) {
+    if (mDevice->getId() == cameraId) {
+        return mDevice->info();
+    }
+    return mDevice->infoPhysical(cameraId);
+}
+
+bool CameraDeviceClient::isUltraHighResolutionSensor(const String8 &cameraId) {
+    const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
+    return camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+}
+
+bool CameraDeviceClient::isSensorPixelModeConsistent(
+        const std::list<int> &streamIdList, const CameraMetadata &settings) {
+    // First we get the sensorPixelMode from the settings metadata.
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = settings.find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT &&
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode not is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return false;
+        }
+    }
+    // Check whether each stream has max resolution allowed.
+    bool consistent = true;
+    for (auto it : streamIdList) {
+        auto const streamInfoIt = mStreamInfoMap.find(it);
+        if (streamInfoIt == mStreamInfoMap.end()) {
+            ALOGE("%s: stream id %d not created, skipping", __FUNCTION__, it);
+            return false;
+        }
+        consistent =
+                streamInfoIt->second.sensorPixelModesUsed.find(sensorPixelMode) !=
+                        streamInfoIt->second.sensorPixelModesUsed.end();
+        if (!consistent) {
+            ALOGE("sensorPixelMode used %i not consistent with configured modes", sensorPixelMode);
+            for (auto m : streamInfoIt->second.sensorPixelModesUsed) {
+                ALOGE("sensor pixel mode used list: %i", m);
+            }
+            break;
+        }
+    }
+
+    return consistent;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 5588285..44ffeef 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -28,6 +28,7 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 #include "CompositeStream.h"
+#include "utils/SessionConfigurationUtils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::CompositeStream;
@@ -54,6 +55,7 @@
             const String8& cameraId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
@@ -115,6 +117,7 @@
 
     // Create an input stream of width, height, and format.
     virtual binder::Status createInputStream(int width, int height, int format,
+            bool isMultiResolution,
             /*out*/
             int32_t* newStreamId = NULL) override;
 
@@ -179,6 +182,7 @@
             const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
@@ -221,6 +225,13 @@
     // Calculate the ANativeWindow transform from android.sensor.orientation
     status_t              getRotationTransformLocked(/*out*/int32_t* transform);
 
+    bool isUltraHighResolutionSensor(const String8 &cameraId);
+
+    bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
+            const CameraMetadata &settings);
+
+    const CameraMetadata &getStaticInfo(const String8 &cameraId);
+
 private:
     // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
     // streamId specifies the index of the stream the surface belongs to, and the
@@ -304,6 +315,8 @@
 
     int32_t mRequestIdCounter;
 
+    std::vector<std::string> mPhysicalCameraIds;
+
     // The list of output streams whose surfaces are deferred. We have to track them separately
     // as there are no surfaces available and can not be put into mStreamMap. Once the deferred
     // Surface is configured, the stream id will be moved to mStreamMap.
@@ -312,6 +325,12 @@
     // stream ID -> outputStreamInfo mapping
     std::unordered_map<int32_t, OutputStreamInfo> mStreamInfoMap;
 
+    // map high resolution camera id (logical / physical) -> list of stream ids configured
+    std::unordered_map<std::string, std::unordered_set<int>> mHighResolutionCameraIdToStreamIdSet;
+
+    // set of high resolution camera id (logical / physical)
+    std::unordered_set<std::string> mHighResolutionSensors;
+
     KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
 
     sp<CameraProviderManager> mProviderManager;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 5c5fcda..ba49325 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -49,13 +49,13 @@
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId,
-            const String8& cameraIdStr, int cameraFacing,
+            const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid) :
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
                     clientPackageName, clientFeatureId,
-                    cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 2f8ca6b..4b840fc 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -47,7 +47,9 @@
 status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
-        std::vector<int> * surfaceIds, int streamSetId, bool isShared) {
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> * surfaceIds,
+        int streamSetId, bool isShared, bool isMultiResolution) {
     if (hasDeferredConsumer) {
         ALOGE("%s: Deferred consumers not supported in case of composite streams!",
                 __FUNCTION__);
@@ -66,8 +68,14 @@
         return BAD_VALUE;
     }
 
-    return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation, id,
-            physicalCameraId, surfaceIds, streamSetId, isShared);
+    if (isMultiResolution) {
+        ALOGE("%s: Multi-resolution output not supported in case of composite streams!",
+                __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
 }
 
 status_t CompositeStream::deleteStream() {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 2a934df..600bd28 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -44,7 +44,9 @@
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared, bool isMultiResolution);
 
     status_t deleteStream();
 
@@ -55,7 +57,9 @@
     virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) = 0;
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) = 0;
 
     // Release all internal streams and corresponding resources.
     virtual status_t deleteInternalStreams() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 2c553f3..19b54e0 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -20,6 +20,7 @@
 
 #include "api1/client2/JpegProcessor.h"
 #include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
@@ -78,7 +79,10 @@
             }
         }
 
-        getSupportedDepthSizes(staticInfo, &mSupportedDepthSizes);
+        getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
+        if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+            getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
+        }
     }
 }
 
@@ -484,17 +488,82 @@
     return false;
 }
 
+static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
+    return containerSet.find(value) != containerSet.end();
+}
+
+status_t DepthCompositeStream::checkAndGetMatchingDepthSize(size_t width, size_t height,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizes,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        size_t *depthWidth, size_t *depthHeight) {
+    if (depthWidth == nullptr || depthHeight == nullptr) {
+        return BAD_VALUE;
+    }
+    size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
+    bool hasDefaultSensorPixelMode =
+            setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+
+    bool hasMaximumResolutionSensorPixelMode =
+        setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+
+    if (!hasDefaultSensorPixelMode && !hasMaximumResolutionSensorPixelMode) {
+        ALOGE("%s: sensor pixel modes don't contain either maximum resolution or default modes",
+                __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (hasDefaultSensorPixelMode) {
+        auto ret = getMatchingDepthSize(width, height, depthSizes, &chosenDepthWidth,
+                &chosenDepthHeight);
+        if (ret != OK) {
+            ALOGE("%s: No matching depth stream size found", __FUNCTION__);
+            return ret;
+        }
+    }
+
+    if (hasMaximumResolutionSensorPixelMode) {
+        size_t depthWidth = 0, depthHeight = 0;
+        auto ret = getMatchingDepthSize(width, height,
+                depthSizesMaximumResolution, &depthWidth, &depthHeight);
+        if (ret != OK) {
+            ALOGE("%s: No matching max resolution depth stream size found", __FUNCTION__);
+            return ret;
+        }
+        // Both matching depth sizes should be the same.
+        if (chosenDepthWidth != 0 && chosenDepthWidth != depthWidth &&
+                chosenDepthHeight != depthHeight) {
+            ALOGE("%s: Maximum resolution sensor pixel mode and default sensor pixel mode don't"
+                    " have matching depth sizes", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        if (chosenDepthWidth == 0) {
+            chosenDepthWidth = depthWidth;
+            chosenDepthHeight = depthHeight;
+        }
+    }
+    *depthWidth = chosenDepthWidth;
+    *depthHeight = chosenDepthHeight;
+    return OK;
+}
+
+
 status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/) {
     if (mSupportedDepthSizes.empty()) {
         ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
         return INVALID_OPERATION;
     }
 
     size_t depthWidth, depthHeight;
-    auto ret = getMatchingDepthSize(width, height, mSupportedDepthSizes, &depthWidth, &depthHeight);
+    auto ret =
+            checkAndGetMatchingDepthSize(width, height, mSupportedDepthSizes,
+                    mSupportedDepthSizesMaximumResolution, sensorPixelModesUsed, &depthWidth,
+                    &depthHeight);
     if (ret != OK) {
         ALOGE("%s: Failed to find an appropriate depth stream size!", __FUNCTION__);
         return ret;
@@ -515,7 +584,7 @@
     mBlobSurface = new Surface(producer);
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
-            id, physicalCameraId, surfaceIds);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
     if (ret == OK) {
         mBlobStreamId = *id;
         mBlobSurfaceId = (*surfaceIds)[0];
@@ -531,7 +600,8 @@
     mDepthSurface = new Surface(producer);
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
-            kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, &depthSurfaceId);
+            kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
+            &depthSurfaceId);
     if (ret == OK) {
         mDepthSurfaceId = depthSurfaceId[0];
     } else {
@@ -749,13 +819,15 @@
     return ((*depthWidth > 0) && (*depthHeight > 0)) ? OK : BAD_VALUE;
 }
 
-void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch,
+void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
         std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/) {
     if (depthSizes == nullptr) {
         return;
     }
 
-    auto entry = ch.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+    auto entry = ch.find(
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution));
     if (entry.count > 0) {
         // Depth stream dimensions have four int32_t components
         // (pixelformat, width, height, type)
@@ -779,30 +851,43 @@
     }
 
     std::vector<std::tuple<size_t, size_t>> depthSizes;
-    getSupportedDepthSizes(ch, &depthSizes);
+    std::vector<std::tuple<size_t, size_t>> depthSizesMaximumResolution;
+    getSupportedDepthSizes(ch, /*maxResolution*/false, &depthSizes);
     if (depthSizes.empty()) {
         ALOGE("%s: No depth stream configurations present", __FUNCTION__);
         return BAD_VALUE;
     }
 
-    size_t depthWidth, depthHeight;
-    auto ret = getMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes, &depthWidth,
-            &depthHeight);
+    if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+        getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
+        if (depthSizesMaximumResolution.empty()) {
+            ALOGE("%s: No depth stream configurations for maximum resolution present",
+                    __FUNCTION__);
+            return BAD_VALUE;
+        }
+    }
+
+    size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
+    auto ret = checkAndGetMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes,
+            depthSizesMaximumResolution, streamInfo.sensorPixelModesUsed, &chosenDepthWidth,
+            &chosenDepthHeight);
+
     if (ret != OK) {
-        ALOGE("%s: No matching depth stream size found", __FUNCTION__);
+        ALOGE("%s: Couldn't get matching depth sizes", __FUNCTION__);
         return ret;
     }
 
     compositeOutput->clear();
     compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
 
+    // Sensor pixel modes should stay the same here. They're already overridden.
     // Jpeg/Blob stream info
     (*compositeOutput)[0].dataSpace = kJpegDataSpace;
     (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
 
     // Depth stream info
-    (*compositeOutput)[1].width = depthWidth;
-    (*compositeOutput)[1].height = depthHeight;
+    (*compositeOutput)[1].width = chosenDepthWidth;
+    (*compositeOutput)[1].height = chosenDepthHeight;
     (*compositeOutput)[1].format = kDepthMapPixelFormat;
     (*compositeOutput)[1].dataSpace = kDepthMapDataSpace;
     (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 05bc504..a520bbf 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -51,7 +51,9 @@
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) override;
     status_t deleteInternalStreams() override;
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -86,11 +88,17 @@
     };
 
     // Helper methods
-    static void getSupportedDepthSizes(const CameraMetadata& ch,
+    static void getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
             std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/);
     static status_t getMatchingDepthSize(size_t width, size_t height,
             const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
             size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
+    static status_t checkAndGetMatchingDepthSize(size_t width, size_t height,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizes,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
+
 
     // Dynamic depth processing
     status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
@@ -126,6 +134,7 @@
 
     ssize_t              mMaxJpegSize;
     std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
+    std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
     std::vector<float>   mIntrinsicCalibration, mLensDistortion;
     bool                 mIsLogicalCamera;
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 7d68485..582001d 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -36,6 +36,7 @@
 
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -115,7 +116,9 @@
 status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/) {
 
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
@@ -141,7 +144,8 @@
     mStaticInfo = device->info();
 
     res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
-            kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
+            kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
+            sensorPixelModesUsed,surfaceIds);
     if (res == OK) {
         mAppSegmentSurfaceId = (*surfaceIds)[0];
     } else {
@@ -177,7 +181,7 @@
     int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
-            rotation, id, physicalCameraId, &sourceSurfaceId);
+            rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
     if (res == OK) {
         mMainImageSurfaceId = sourceSurfaceId[0];
         mMainImageStreamId = *id;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index cbd9d21..1077a1f 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -46,7 +46,9 @@
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) override;
 
     status_t deleteInternalStreams() override;
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6fd8d45..1f79354 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -51,11 +51,13 @@
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         TClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid, clientUid,
+                servicePid),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
         mDevice(new Camera3Device(cameraId)),
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 1ce4393..dab0050 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -52,6 +52,7 @@
                       const String8& cameraId,
                       int api1CameraId,
                       int cameraFacing,
+                      int sensorOrientation,
                       int clientPid,
                       uid_t clientUid,
                       int servicePid);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 1be46d6..85b0cc2 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -164,9 +164,11 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t>  &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) = 0;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -179,9 +181,11 @@
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) = 0;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) = 0;
 
     /**
      * Create an input stream of width, height, and format.
@@ -189,7 +193,7 @@
      * Return value is the stream ID if non-negative and an error if negative.
      */
     virtual status_t createInputStream(uint32_t width, uint32_t height,
-            int32_t format, /*out*/ int32_t *id) = 0;
+            int32_t format, bool multiResolution, /*out*/ int32_t *id) = 0;
 
     struct StreamInfo {
         uint32_t width;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index e9dcb01..6dffc5d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -20,7 +20,7 @@
 
 #include "CameraProviderManager.h"
 
-#include <android/hardware/camera/device/3.5/ICameraDevice.h>
+#include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
 #include <algorithm>
 #include <chrono>
@@ -28,7 +28,6 @@
 #include <dlfcn.h>
 #include <future>
 #include <inttypes.h>
-#include <hardware/camera_common.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <functional>
@@ -49,7 +48,7 @@
 using namespace ::android::hardware::camera::common::V1_0;
 using std::literals::chrono_literals::operator""s;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
-using hardware::camera::provider::V2_6::CameraIdAndStreamCombination;
+using hardware::camera::provider::V2_7::CameraIdAndStreamCombination;
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -267,7 +266,7 @@
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
-        const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+        const hardware::camera::device::V3_7::StreamConfiguration &configuration,
         bool *status /*out*/) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
@@ -475,12 +474,12 @@
 hardware::Return<void> CameraProviderManager::onRegistration(
         const hardware::hidl_string& /*fqName*/,
         const hardware::hidl_string& name,
-        bool /*preexisting*/) {
+        bool preexisting) {
     std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
     {
         std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-        addProviderLocked(name);
+        addProviderLocked(name, preexisting);
     }
 
     sp<StatusListener> listener = getStatusListener();
@@ -687,9 +686,39 @@
     }
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags() {
-    uint32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
-    uint32_t depthSizesTag = ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
+        bool maxResolution) {
+    const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
+
+    const int32_t scalerSizesTag =
+              camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t scalerMinFrameDurationsTag =
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+    const int32_t scalerStallDurationsTag =
+                 camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+    const int32_t depthSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t depthStallDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, maxResolution);
+    const int32_t depthMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, maxResolution);
+
+    const int32_t dynamicDepthSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t dynamicDepthStallDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, maxResolution);
+    const int32_t dynamicDepthMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                 ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, maxResolution);
+
     auto& c = mCameraCharacteristics;
     std::vector<std::tuple<size_t, size_t>> supportedBlobSizes, supportedDepthSizes,
             supportedDynamicDepthSizes, internalDepthSizes;
@@ -719,7 +748,7 @@
         return BAD_VALUE;
     }
 
-    getSupportedSizes(c, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, HAL_PIXEL_FORMAT_BLOB,
+    getSupportedSizes(c, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
             &supportedBlobSizes);
     getSupportedSizes(c, depthSizesTag, HAL_PIXEL_FORMAT_Y16, &supportedDepthSizes);
     if (supportedBlobSizes.empty() || supportedDepthSizes.empty()) {
@@ -746,10 +775,10 @@
     std::vector<int64_t> blobMinDurations, blobStallDurations;
     std::vector<int64_t> dynamicDepthMinDurations, dynamicDepthStallDurations;
 
-    getSupportedDurations(c, ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
-            HAL_PIXEL_FORMAT_Y16, internalDepthSizes, &depthMinDurations);
-    getSupportedDurations(c, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
-            HAL_PIXEL_FORMAT_BLOB, supportedDynamicDepthSizes, &blobMinDurations);
+    getSupportedDurations(c, depthMinFrameDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
+                          &depthMinDurations);
+    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+                          supportedDynamicDepthSizes, &blobMinDurations);
     if (blobMinDurations.empty() || depthMinDurations.empty() ||
             (depthMinDurations.size() != blobMinDurations.size())) {
         ALOGE("%s: Unexpected number of available depth min durations! %zu vs. %zu",
@@ -757,10 +786,10 @@
         return BAD_VALUE;
     }
 
-    getSupportedDurations(c, ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
-            HAL_PIXEL_FORMAT_Y16, internalDepthSizes, &depthStallDurations);
-    getSupportedDurations(c, ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
-            HAL_PIXEL_FORMAT_BLOB, supportedDynamicDepthSizes, &blobStallDurations);
+    getSupportedDurations(c, depthStallDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
+            &depthStallDurations);
+    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedDynamicDepthSizes, &blobStallDurations);
     if (blobStallDurations.empty() || depthStallDurations.empty() ||
             (depthStallDurations.size() != blobStallDurations.size())) {
         ALOGE("%s: Unexpected number of available depth stall durations! %zu vs. %zu",
@@ -805,15 +834,14 @@
     supportedChTags.reserve(chTags.count + 3);
     supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
             chTags.data.i32 + chTags.count);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
-            dynamicDepthEntries.data(), dynamicDepthEntries.size());
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS,
-            dynamicDepthMinDurationEntries.data(), dynamicDepthMinDurationEntries.size());
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS,
-            dynamicDepthStallDurationEntries.data(), dynamicDepthStallDurationEntries.size());
+    supportedChTags.push_back(dynamicDepthSizesTag);
+    supportedChTags.push_back(dynamicDepthMinFrameDurationsTag);
+    supportedChTags.push_back(dynamicDepthStallDurationsTag);
+    c.update(dynamicDepthSizesTag, dynamicDepthEntries.data(), dynamicDepthEntries.size());
+    c.update(dynamicDepthMinFrameDurationsTag, dynamicDepthMinDurationEntries.data(),
+            dynamicDepthMinDurationEntries.size());
+    c.update(dynamicDepthStallDurationsTag, dynamicDepthStallDurationEntries.data(),
+             dynamicDepthStallDurationEntries.size());
     c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
             supportedChTags.size());
 
@@ -1047,7 +1075,24 @@
     return OK;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags() {
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags(bool maxResolution) {
+    int32_t scalerStreamSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    int32_t scalerMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+
+    int32_t heicStreamSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+    int32_t heicMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, maxResolution);
+    int32_t heicStallDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, maxResolution);
+
     auto& c = mCameraCharacteristics;
 
     camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED);
@@ -1076,10 +1121,8 @@
     std::vector<int64_t> heicDurations;
     std::vector<int64_t> heicStallDurations;
 
-    camera_metadata_entry halStreamConfigs =
-            c.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-    camera_metadata_entry minFrameDurations =
-            c.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+    camera_metadata_entry halStreamConfigs = c.find(scalerStreamSizesTag);
+    camera_metadata_entry minFrameDurations = c.find(scalerMinFrameDurationsTag);
 
     status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations,
             halStreamConfigs, minFrameDurations);
@@ -1089,12 +1132,9 @@
         return res;
     }
 
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS,
-           heicOutputs.data(), heicOutputs.size());
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS,
-            heicDurations.data(), heicDurations.size());
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
-            heicStallDurations.data(), heicStallDurations.size());
+    c.update(heicStreamSizesTag, heicOutputs.data(), heicOutputs.size());
+    c.update(heicMinFrameDurationsTag, heicDurations.data(), heicDurations.size());
+    c.update(heicStallDurationsTag, heicStallDurations.data(), heicStallDurations.size());
 
     return OK;
 }
@@ -1190,31 +1230,53 @@
     return falseRet;
 }
 
-status_t CameraProviderManager::addProviderLocked(const std::string& newProvider) {
-    for (const auto& providerInfo : mProviders) {
-        if (providerInfo->mProviderName == newProvider) {
-            ALOGW("%s: Camera provider HAL with name '%s' already registered", __FUNCTION__,
-                    newProvider.c_str());
-            return ALREADY_EXISTS;
-        }
-    }
-
+status_t CameraProviderManager::tryToInitializeProviderLocked(
+        const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     sp<provider::V2_4::ICameraProvider> interface;
-    interface = mServiceProxy->tryGetService(newProvider);
+    interface = mServiceProxy->tryGetService(providerName);
 
     if (interface == nullptr) {
-        ALOGE("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
-                newProvider.c_str());
+        // The interface may not be started yet. In that case, this is not a
+        // fatal error.
+        ALOGW("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
+                providerName.c_str());
         return BAD_VALUE;
     }
 
-    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, this);
-    status_t res = providerInfo->initialize(interface, mDeviceState);
-    if (res != OK) {
-        return res;
+    return providerInfo->initialize(interface, mDeviceState);
+}
+
+status_t CameraProviderManager::addProviderLocked(const std::string& newProvider,
+        bool preexisting) {
+    // Several camera provider instances can be temporarily present.
+    // Defer initialization of a new instance until the older instance is properly removed.
+    auto providerInstance = newProvider + "-" + std::to_string(mProviderInstanceId);
+    bool providerPresent = false;
+    for (const auto& providerInfo : mProviders) {
+        if (providerInfo->mProviderName == newProvider) {
+            ALOGW("%s: Camera provider HAL with name '%s' already registered",
+                    __FUNCTION__, newProvider.c_str());
+            if (preexisting) {
+                return ALREADY_EXISTS;
+            } else{
+                ALOGW("%s: The new provider instance will get initialized immediately after the"
+                        " currently present instance is removed!", __FUNCTION__);
+                providerPresent = true;
+                break;
+            }
+        }
+    }
+
+    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, providerInstance, this);
+    if (!providerPresent) {
+        status_t res = tryToInitializeProviderLocked(newProvider, providerInfo);
+        if (res != OK) {
+            return res;
+        }
     }
 
     mProviders.push_back(providerInfo);
+    mProviderInstanceId++;
 
     return OK;
 }
@@ -1224,12 +1286,14 @@
     std::unique_lock<std::mutex> lock(mInterfaceMutex);
     std::vector<String8> removedDeviceIds;
     status_t res = NAME_NOT_FOUND;
+    std::string removedProviderName;
     for (auto it = mProviders.begin(); it != mProviders.end(); it++) {
-        if ((*it)->mProviderName == provider) {
+        if ((*it)->mProviderInstance == provider) {
             removedDeviceIds.reserve((*it)->mDevices.size());
             for (auto& deviceInfo : (*it)->mDevices) {
                 removedDeviceIds.push_back(String8(deviceInfo->mId.c_str()));
             }
+            removedProviderName = (*it)->mProviderName;
             mProviders.erase(it);
             res = OK;
             break;
@@ -1239,6 +1303,14 @@
         ALOGW("%s: Camera provider HAL with name '%s' is not registered", __FUNCTION__,
                 provider.c_str());
     } else {
+        // Check if there are any newer camera instances from the same provider and try to
+        // initialize.
+        for (const auto& providerInfo : mProviders) {
+            if (providerInfo->mProviderName == removedProviderName) {
+                return tryToInitializeProviderLocked(removedProviderName, providerInfo);
+            }
+        }
+
         // Inform camera service of loss of presence for all the devices from this provider,
         // without lock held for reentrancy
         sp<StatusListener> listener = getStatusListener();
@@ -1247,7 +1319,9 @@
             for (auto& id : removedDeviceIds) {
                 listener->onDeviceStatusChanged(id, CameraDeviceStatus::NOT_PRESENT);
             }
+            lock.lock();
         }
+
     }
     return res;
 }
@@ -1261,8 +1335,10 @@
 
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
+        const std::string &providerInstance,
         CameraProviderManager *manager) :
         mProviderName(providerName),
+        mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
@@ -1302,6 +1378,14 @@
                 mMinorVersion = 5;
             }
         }
+    } else {
+        auto cast2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+        if (cast2_7.isOk()) {
+            sp<provider::V2_7::ICameraProvider> interface2_7 = cast2_7;
+            if (interface2_7 != nullptr) {
+                mMinorVersion = 7;
+            }
+        }
     }
 
     // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
@@ -1578,7 +1662,7 @@
 
 status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
     dprintf(fd, "== Camera Provider HAL %s (v2.%d, %s) static info: %zu devices: ==\n",
-            mProviderName.c_str(),
+            mProviderInstance.c_str(),
             mMinorVersion,
             mIsRemote ? "remote" : "passthrough",
             mDevices.size());
@@ -1894,12 +1978,12 @@
 void CameraProviderManager::ProviderInfo::serviceDied(uint64_t cookie,
         const wp<hidl::base::V1_0::IBase>& who) {
     (void) who;
-    ALOGI("Camera provider '%s' has died; removing it", mProviderName.c_str());
+    ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
                 __FUNCTION__, cookie, mId);
     }
-    mManager->removeProvider(mProviderName);
+    mManager->removeProvider(mProviderInstance);
 }
 
 status_t CameraProviderManager::ProviderInfo::setUpVendorTags() {
@@ -1973,38 +2057,71 @@
             // TODO: This might be some other problem
             return INVALID_OPERATION;
         }
-        auto castResult = provider::V2_6::ICameraProvider::castFrom(interface);
-        if (castResult.isOk()) {
-            sp<provider::V2_6::ICameraProvider> interface_2_6 = castResult;
-            if (interface_2_6 != nullptr) {
-                Status callStatus;
-                auto cb =
-                        [&isSupported, &callStatus](Status s, bool supported) {
-                              callStatus = s;
-                              *isSupported = supported; };
+        auto castResult2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
+        auto castResult2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+        Status callStatus;
+        auto cb =
+                [&isSupported, &callStatus](Status s, bool supported) {
+                      callStatus = s;
+                      *isSupported = supported; };
 
-                auto ret =  interface_2_6->isConcurrentStreamCombinationSupported(
-                            halCameraIdsAndStreamCombinations, cb);
-                if (ret.isOk()) {
-                    switch (callStatus) {
-                        case Status::OK:
-                            // Expected case, do nothing.
-                            res = OK;
-                            break;
-                        case Status::METHOD_NOT_SUPPORTED:
-                            res = INVALID_OPERATION;
-                            break;
-                        default:
-                            ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                      callStatus);
-                            res = UNKNOWN_ERROR;
-                    }
-                } else {
-                    ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
-                    res = UNKNOWN_ERROR;
-                }
-                return res;
+        ::android::hardware::Return<void> ret;
+        sp<provider::V2_7::ICameraProvider> interface_2_7;
+        sp<provider::V2_6::ICameraProvider> interface_2_6;
+        if (mMinorVersion >= 7 && castResult2_7.isOk()) {
+            interface_2_7 = castResult2_7;
+            if (interface_2_7 != nullptr) {
+                ret = interface_2_7->isConcurrentStreamCombinationSupported_2_7(
+                        halCameraIdsAndStreamCombinations, cb);
             }
+        } else if (mMinorVersion == 6 && castResult2_6.isOk()) {
+            interface_2_6 = castResult2_6;
+            if (interface_2_6 != nullptr) {
+                hardware::hidl_vec<provider::V2_6::CameraIdAndStreamCombination>
+                        halCameraIdsAndStreamCombinations_2_6;
+                size_t numStreams = halCameraIdsAndStreamCombinations.size();
+                halCameraIdsAndStreamCombinations_2_6.resize(numStreams);
+                for (size_t i = 0; i < numStreams; i++) {
+                    using namespace camera3;
+                    auto const& combination = halCameraIdsAndStreamCombinations[i];
+                    halCameraIdsAndStreamCombinations_2_6[i].cameraId = combination.cameraId;
+                    bool success =
+                            SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                                    halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+                                    combination.streamConfiguration);
+                    if (!success) {
+                        *isSupported = false;
+                        return OK;
+                    }
+                    camera3::SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                            halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+                            combination.streamConfiguration);
+                }
+                ret = interface_2_6->isConcurrentStreamCombinationSupported(
+                        halCameraIdsAndStreamCombinations_2_6, cb);
+            }
+        }
+
+        if (interface_2_7 != nullptr || interface_2_6 != nullptr) {
+            if (ret.isOk()) {
+                switch (callStatus) {
+                    case Status::OK:
+                        // Expected case, do nothing.
+                        res = OK;
+                        break;
+                    case Status::METHOD_NOT_SUPPORTED:
+                        res = INVALID_OPERATION;
+                        break;
+                    default:
+                        ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
+                                  callStatus);
+                        res = UNKNOWN_ERROR;
+                }
+            } else {
+                ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
+                res = UNKNOWN_ERROR;
+            }
+            return res;
         }
     }
     // unsupported operation
@@ -2184,6 +2301,21 @@
         ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+
+    if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+        status_t status = addDynamicDepthTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
+                    __FUNCTION__, strerror(-status), status);
+        }
+
+        status = deriveHeicTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
+    }
+
     res = addRotateCropTags();
     if (OK != res) {
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
@@ -2374,7 +2506,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::isSessionConfigurationSupported(
-        const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+        const hardware::camera::device::V3_7::StreamConfiguration &configuration,
         bool *status /*out*/) {
 
     const sp<CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT> interface =
@@ -2382,19 +2514,33 @@
     if (interface == nullptr) {
         return DEAD_OBJECT;
     }
-    auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
-    sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult;
-    if (interface_3_5 == nullptr) {
-        return INVALID_OPERATION;
-    }
+    auto castResult_3_5 = device::V3_5::ICameraDevice::castFrom(interface);
+    sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult_3_5;
+    auto castResult_3_7 = device::V3_7::ICameraDevice::castFrom(interface);
+    sp<hardware::camera::device::V3_7::ICameraDevice> interface_3_7 = castResult_3_7;
 
     status_t res;
     Status callStatus;
-    auto ret =  interface_3_5->isStreamCombinationSupported(configuration,
+    ::android::hardware::Return<void> ret;
+    auto halCb =
             [&callStatus, &status] (Status s, bool combStatus) {
                 callStatus = s;
                 *status = combStatus;
-            });
+            };
+    if (interface_3_7 != nullptr) {
+        ret = interface_3_7->isStreamCombinationSupported_3_7(configuration, halCb);
+    } else if (interface_3_5 != nullptr) {
+        hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
+        bool success = camera3::SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                configuration_3_4, configuration);
+        if (!success) {
+            *status = false;
+            return OK;
+        }
+        ret = interface_3_5->isStreamCombinationSupported(configuration_3_4, halCb);
+    } else {
+        return INVALID_OPERATION;
+    }
     if (ret.isOk()) {
         switch (callStatus) {
             case Status::OK:
@@ -2769,13 +2915,13 @@
     bool shouldExit = false;
     status_t res = OK;
     for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
-        hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
+        hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
         CameraMetadata deviceInfo;
         res = getCameraCharacteristicsLocked(cameraIdAndSessionConfig.mCameraId, &deviceInfo);
         if (res != OK) {
             return res;
         }
-        metadataGetter getMetadata =
+        camera3::metadataGetter getMetadata =
                 [this](const String8 &id) {
                     CameraMetadata physicalDeviceInfo;
                     getCameraCharacteristicsLocked(id.string(), &physicalDeviceInfo);
@@ -2784,7 +2930,7 @@
         std::vector<std::string> physicalCameraIds;
         isLogicalCameraLocked(cameraIdAndSessionConfig.mCameraId, &physicalCameraIds);
         bStatus =
-            SessionConfigurationUtils::convertToHALStreamCombination(
+            camera3::SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
                     String8(cameraIdAndSessionConfig.mCameraId.c_str()), deviceInfo, getMetadata,
                     physicalCameraIds, streamConfiguration, &shouldExit);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 8727e7f..5531dd7 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -33,7 +33,8 @@
 #include <android/hardware/camera/provider/2.5/ICameraProvider.h>
 #include <android/hardware/camera/provider/2.6/ICameraProviderCallback.h>
 #include <android/hardware/camera/provider/2.6/ICameraProvider.h>
-#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
+#include <android/hardware/camera/device/3.7/types.h>
 #include <android/hidl/manager/1.0/IServiceNotification.h>
 #include <camera/VendorTagDescriptor.h>
 
@@ -78,6 +79,16 @@
    HIDDEN_SECURE_CAMERA
 };
 
+#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
+#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
+#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
+#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
+#define CAMERA_DEVICE_API_VERSION_3_3 HARDWARE_DEVICE_API_VERSION(3, 3)
+#define CAMERA_DEVICE_API_VERSION_3_4 HARDWARE_DEVICE_API_VERSION(3, 4)
+#define CAMERA_DEVICE_API_VERSION_3_5 HARDWARE_DEVICE_API_VERSION(3, 5)
+#define CAMERA_DEVICE_API_VERSION_3_6 HARDWARE_DEVICE_API_VERSION(3, 6)
+#define CAMERA_DEVICE_API_VERSION_3_7 HARDWARE_DEVICE_API_VERSION(3, 7)
+
 /**
  * A manager for all camera providers available on an Android device.
  *
@@ -227,7 +238,7 @@
      * Check for device support of specific stream combination.
      */
     status_t isSessionConfigurationSupported(const std::string& id,
-            const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+            const hardware::camera::device::V3_7::StreamConfiguration &configuration,
             bool *status /*out*/) const;
 
     /**
@@ -354,6 +365,7 @@
             virtual public hardware::hidl_death_recipient
     {
         const std::string mProviderName;
+        const std::string mProviderInstance;
         const metadata_vendor_id_t mProviderTagid;
         int mMinorVersion;
         sp<VendorTagDescriptor> mVendorTagDescriptor;
@@ -368,7 +380,7 @@
 
         sp<hardware::camera::provider::V2_4::ICameraProvider> mSavedInterface;
 
-        ProviderInfo(const std::string &providerName,
+        ProviderInfo(const std::string &providerName, const std::string &providerInstance,
                 CameraProviderManager *manager);
         ~ProviderInfo();
 
@@ -430,7 +442,7 @@
          */
         status_t isConcurrentSessionConfigurationSupported(
                 const hardware::hidl_vec<
-                        hardware::camera::provider::V2_6::CameraIdAndStreamCombination>
+                        hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
                                 &halCameraIdsAndStreamCombinations,
                 bool *isSupported);
 
@@ -470,7 +482,7 @@
             }
 
             virtual status_t isSessionConfigurationSupported(
-                    const hardware::camera::device::V3_4::StreamConfiguration &/*configuration*/,
+                    const hardware::camera::device::V3_7::StreamConfiguration &/*configuration*/,
                     bool * /*status*/) {
                 return INVALID_OPERATION;
             }
@@ -529,7 +541,7 @@
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t isSessionConfigurationSupported(
-                    const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+                    const hardware::camera::device::V3_7::StreamConfiguration &configuration,
                     bool *status /*out*/)
                     override;
 
@@ -545,8 +557,8 @@
             void queryPhysicalCameraIds();
             SystemCameraKind getSystemCameraKind();
             status_t fixupMonochromeTags();
-            status_t addDynamicDepthTags();
-            status_t deriveHeicTags();
+            status_t addDynamicDepthTags(bool maxResolution = false);
+            status_t deriveHeicTags(bool maxResolution = false);
             status_t addRotateCropTags();
             status_t addPreCorrectionActiveArraySize();
 
@@ -646,7 +658,10 @@
             hardware::hidl_version minVersion = hardware::hidl_version{0,0},
             hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
 
-    status_t addProviderLocked(const std::string& newProvider);
+    status_t addProviderLocked(const std::string& newProvider, bool preexisting = false);
+
+    status_t tryToInitializeProviderLocked(const std::string& providerName,
+            const sp<ProviderInfo>& providerInfo);
 
     bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
@@ -655,6 +670,7 @@
 
     bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
 
+    size_t mProviderInstanceId = 0;
     std::vector<sp<ProviderInfo>> mProviders;
 
     void addProviderToMap(
@@ -684,7 +700,7 @@
     status_t convertToHALStreamCombinationAndCameraIdsLocked(
               const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
                       &cameraIdsAndSessionConfigs,
-              hardware::hidl_vec<hardware::camera::provider::V2_6::CameraIdAndStreamCombination>
+              hardware::hidl_vec<hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
                       *halCameraIdsAndStreamCombinations,
               bool *earlyExit);
 };
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index d6bf83e..a556200 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -40,16 +40,17 @@
     ATRACE_CALL();
 
     int streamId = streamInfo.streamId;
-    int streamSetId = streamInfo.streamSetId;
+    StreamSetKey streamSetKey = {streamInfo.streamSetId, streamInfo.isMultiRes};
 
-    if (streamId == CAMERA3_STREAM_ID_INVALID || streamSetId == CAMERA3_STREAM_SET_ID_INVALID) {
+    if (streamId == CAMERA3_STREAM_ID_INVALID ||
+            streamSetKey.id == CAMERA3_STREAM_SET_ID_INVALID) {
         ALOGE("%s: Stream id (%d) or stream set id (%d) is invalid",
-                __FUNCTION__, streamId, streamSetId);
+                __FUNCTION__, streamId, streamSetKey.id);
         return BAD_VALUE;
     }
     if (streamInfo.totalBufferCount > kMaxBufferCount || streamInfo.totalBufferCount == 0) {
         ALOGE("%s: Stream id (%d) with stream set id (%d) total buffer count %zu is invalid",
-                __FUNCTION__, streamId, streamSetId, streamInfo.totalBufferCount);
+                __FUNCTION__, streamId, streamSetKey.id, streamInfo.totalBufferCount);
         return BAD_VALUE;
     }
     if (!streamInfo.isConfigured) {
@@ -75,7 +76,8 @@
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
         ssize_t streamIdx = mStreamSetMap[i].streamInfoMap.indexOfKey(streamId);
         if (streamIdx != NAME_NOT_FOUND &&
-            mStreamSetMap[i].streamInfoMap[streamIdx].streamSetId != streamInfo.streamSetId) {
+            mStreamSetMap[i].streamInfoMap[streamIdx].streamSetId != streamInfo.streamSetId &&
+            mStreamSetMap[i].streamInfoMap[streamIdx].isMultiRes != streamInfo.isMultiRes) {
             ALOGE("%s: It is illegal to register the same stream id with different stream set",
                     __FUNCTION__);
             return BAD_VALUE;
@@ -83,20 +85,20 @@
     }
     // Check if there is an existing stream set registered; if not, create one; otherwise, add this
     // stream info to the existing stream set entry.
-    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetId);
+    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetKey);
     if (setIdx == NAME_NOT_FOUND) {
-        ALOGV("%s: stream set %d is not registered to stream set map yet, create it.",
-                __FUNCTION__, streamSetId);
+        ALOGV("%s: stream set %d(%d) is not registered to stream set map yet, create it.",
+                __FUNCTION__, streamSetKey.id, streamSetKey.isMultiRes);
         // Create stream info map, then add to mStreamsetMap.
         StreamSet newStreamSet;
-        setIdx = mStreamSetMap.add(streamSetId, newStreamSet);
+        setIdx = mStreamSetMap.add(streamSetKey, newStreamSet);
     }
     // Update stream set map and water mark.
     StreamSet& currentStreamSet = mStreamSetMap.editValueAt(setIdx);
     ssize_t streamIdx = currentStreamSet.streamInfoMap.indexOfKey(streamId);
     if (streamIdx != NAME_NOT_FOUND) {
-        ALOGW("%s: stream %d was already registered with stream set %d",
-                __FUNCTION__, streamId, streamSetId);
+        ALOGW("%s: stream %d was already registered with stream set %d(%d)",
+                __FUNCTION__, streamId, streamSetKey.id, streamSetKey.isMultiRes);
         return OK;
     }
     currentStreamSet.streamInfoMap.add(streamId, streamInfo);
@@ -113,21 +115,22 @@
     return OK;
 }
 
-status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId) {
+status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId, bool isMultiRes) {
     ATRACE_CALL();
 
     Mutex::Autolock l(mLock);
-    ALOGV("%s: unregister stream %d with stream set %d", __FUNCTION__,
-            streamId, streamSetId);
+    ALOGV("%s: unregister stream %d with stream set %d(%d)", __FUNCTION__,
+            streamId, streamSetId, isMultiRes);
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
-        ALOGE("%s: stream %d with set id %d wasn't properly registered to this buffer manager!",
-                __FUNCTION__, streamId, streamSetId);
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
+        ALOGE("%s: stream %d with set %d(%d) wasn't properly registered to this"
+                " buffer manager!", __FUNCTION__, streamId, streamSetId, isMultiRes);
         return BAD_VALUE;
     }
 
     // De-list all the buffers associated with this stream first.
-    StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetKey);
     BufferCountMap& handOutBufferCounts = currentSet.handoutBufferCountMap;
     BufferCountMap& attachedBufferCounts = currentSet.attachedBufferCountMap;
     InfoMap& infoMap = currentSet.streamInfoMap;
@@ -150,26 +153,28 @@
 
     // Remove this stream set if all its streams have been removed.
     if (handOutBufferCounts.size() == 0 && infoMap.size() == 0) {
-        mStreamSetMap.removeItem(streamSetId);
+        mStreamSetMap.removeItem(streamSetKey);
     }
 
     return OK;
 }
 
-void Camera3BufferManager::notifyBufferRemoved(int streamId, int streamSetId) {
+void Camera3BufferManager::notifyBufferRemoved(int streamId, int streamSetId, bool isMultiRes) {
     Mutex::Autolock l(mLock);
-    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
     size_t& attachedBufferCount =
             streamSet.attachedBufferCountMap.editValueFor(streamId);
     attachedBufferCount--;
 }
 
 status_t Camera3BufferManager::checkAndFreeBufferOnOtherStreamsLocked(
-        int streamId, int streamSetId) {
+        int streamId, StreamSetKey streamSetKey) {
     StreamId firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
-    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
     if (streamSet.streamInfoMap.size() == 1) {
-        ALOGV("StreamSet %d has no other stream available to free", streamSetId);
+        ALOGV("StreamSet %d(%d) has no other stream available to free",
+                streamSetKey.id, streamSetKey.isMultiRes);
         return OK;
     }
 
@@ -190,7 +195,8 @@
         firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
     }
     if (firstOtherStreamId == CAMERA3_STREAM_ID_INVALID || !freeBufferIsAttached) {
-        ALOGV("StreamSet %d has no buffer available to free", streamSetId);
+        ALOGV("StreamSet %d(%d) has no buffer available to free",
+                streamSetKey.id, streamSetKey.isMultiRes);
         return OK;
     }
 
@@ -237,20 +243,21 @@
 }
 
 status_t Camera3BufferManager::getBufferForStream(int streamId, int streamSetId,
-        sp<GraphicBuffer>* gb, int* fenceFd, bool noFreeBufferAtConsumer) {
+        bool isMultiRes, sp<GraphicBuffer>* gb, int* fenceFd, bool noFreeBufferAtConsumer) {
     ATRACE_CALL();
 
     Mutex::Autolock l(mLock);
-    ALOGV("%s: get buffer for stream %d with stream set %d", __FUNCTION__,
-            streamId, streamSetId);
+    ALOGV("%s: get buffer for stream %d with stream set %d(%d)", __FUNCTION__,
+            streamId, streamSetId, isMultiRes);
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)) {
-        ALOGE("%s: stream %d is not registered with stream set %d yet!!!",
-                __FUNCTION__, streamId, streamSetId);
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)) {
+        ALOGE("%s: stream %d is not registered with stream set %d(%d) yet!!!",
+                __FUNCTION__, streamId, streamSetId, isMultiRes);
         return BAD_VALUE;
     }
 
-    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
     BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
     size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
     BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
@@ -272,7 +279,8 @@
         bufferCount++;
         return ALREADY_EXISTS;
     }
-    ALOGV("Stream %d set %d: Get buffer for stream: Allocate new", streamId, streamSetId);
+    ALOGV("Stream %d set %d(%d): Get buffer for stream: Allocate new",
+            streamId, streamSetId, isMultiRes);
 
     if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
         const StreamInfo& info = streamSet.streamInfoMap.valueFor(streamId);
@@ -313,13 +321,13 @@
         // in returnBufferForStream() if we want to free buffer more quickly.
         // TODO: probably should find out all the inactive stream IDs, and free the firstly found
         // buffers for them.
-        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetId);
+        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetKey);
         if (res != OK) {
             return res;
         }
         // Since we just allocated one new buffer above, try free one more buffer from other streams
         // to prevent total buffer count from growing
-        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetId);
+        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetKey);
         if (res != OK) {
             return res;
         }
@@ -332,7 +340,7 @@
 }
 
 status_t Camera3BufferManager::onBufferReleased(
-        int streamId, int streamSetId, bool* shouldFreeBuffer) {
+        int streamId, int streamSetId, bool isMultiRes, bool* shouldFreeBuffer) {
     ATRACE_CALL();
 
     if (shouldFreeBuffer == nullptr) {
@@ -341,22 +349,24 @@
     }
 
     Mutex::Autolock l(mLock);
-    ALOGV("Stream %d set %d: Buffer released", streamId, streamSetId);
+    ALOGV("Stream %d set %d(%d): Buffer released", streamId, streamSetId, isMultiRes);
     *shouldFreeBuffer = false;
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
         ALOGV("%s: signaling buffer release for an already unregistered stream "
-                "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+                "(stream %d with set id %d(%d))", __FUNCTION__, streamId, streamSetId,
+                isMultiRes);
         return OK;
     }
 
     if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
-        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetKey);
         BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
         size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
         bufferCount--;
-        ALOGV("%s: Stream %d set %d: Buffer count now %zu", __FUNCTION__, streamId, streamSetId,
-                bufferCount);
+        ALOGV("%s: Stream %d set %d(%d): Buffer count now %zu", __FUNCTION__, streamId,
+                streamSetId, isMultiRes, bufferCount);
 
         size_t totalAllocatedBufferCount = 0;
         size_t totalHandOutBufferCount = 0;
@@ -371,8 +381,9 @@
             // BufferManager got more than enough buffers, so decrease watermark
             // to trigger more buffers free operation.
             streamSet.allocatedBufferWaterMark = newWaterMark;
-            ALOGV("%s: Stream %d set %d: watermark--; now %zu",
-                    __FUNCTION__, streamId, streamSetId, streamSet.allocatedBufferWaterMark);
+            ALOGV("%s: Stream %d set %d(%d): watermark--; now %zu",
+                    __FUNCTION__, streamId, streamSetId, isMultiRes,
+                    streamSet.allocatedBufferWaterMark);
         }
 
         size_t attachedBufferCount = streamSet.attachedBufferCountMap.valueFor(streamId);
@@ -395,20 +406,22 @@
     return OK;
 }
 
-status_t Camera3BufferManager::onBuffersRemoved(int streamId, int streamSetId, size_t count) {
+status_t Camera3BufferManager::onBuffersRemoved(int streamId, int streamSetId,
+        bool isMultiRes, size_t count) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
 
-    ALOGV("Stream %d set %d: Buffer removed", streamId, streamSetId);
+    ALOGV("Stream %d set %d(%d): Buffer removed", streamId, streamSetId, isMultiRes);
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
         ALOGV("%s: signaling buffer removal for an already unregistered stream "
-                "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+                "(stream %d with set id %d(%d))", __FUNCTION__, streamId, streamSetId, isMultiRes);
         return OK;
     }
 
     if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
-        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetKey);
         BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
         size_t& totalHandoutCount = handOutBufferCounts.editValueFor(streamId);
         BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
@@ -427,8 +440,9 @@
 
         totalHandoutCount -= count;
         totalAttachedCount -= count;
-        ALOGV("%s: Stream %d set %d: Buffer count now %zu, attached buffer count now %zu",
-                __FUNCTION__, streamId, streamSetId, totalHandoutCount, totalAttachedCount);
+        ALOGV("%s: Stream %d set %d(%d): Buffer count now %zu, attached buffer count now %zu",
+                __FUNCTION__, streamId, streamSetId, isMultiRes, totalHandoutCount,
+                totalAttachedCount);
     } else {
         // TODO: implement gralloc V1 support
         return BAD_VALUE;
@@ -444,7 +458,8 @@
     String8 lines;
     lines.appendFormat("      Total stream sets: %zu\n", mStreamSetMap.size());
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
-        lines.appendFormat("        Stream set %d has below streams:\n", mStreamSetMap.keyAt(i));
+        lines.appendFormat("        Stream set %d(%d) has below streams:\n",
+                mStreamSetMap.keyAt(i).id, mStreamSetMap.keyAt(i).isMultiRes);
         for (size_t j = 0; j < mStreamSetMap[i].streamInfoMap.size(); j++) {
             lines.appendFormat("          Stream %d\n", mStreamSetMap[i].streamInfoMap[j].streamId);
         }
@@ -470,11 +485,12 @@
     write(fd, lines.string(), lines.size());
 }
 
-bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId, int streamSetId) const {
-    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetId);
+bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId,
+        StreamSetKey streamSetKey) const {
+    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetKey);
     if (setIdx == NAME_NOT_FOUND) {
-        ALOGV("%s: stream set %d is not registered to stream set map yet!",
-                __FUNCTION__, streamSetId);
+        ALOGV("%s: stream set %d(%d) is not registered to stream set map yet!",
+                __FUNCTION__, streamSetKey.id, streamSetKey.isMultiRes);
         return false;
     }
 
@@ -486,9 +502,10 @@
 
     size_t bufferWaterMark = mStreamSetMap[setIdx].maxAllowedBufferCount;
     if (bufferWaterMark == 0 || bufferWaterMark > kMaxBufferCount) {
-        ALOGW("%s: stream %d with stream set %d is not registered correctly to stream set map,"
+        ALOGW("%s: stream %d with stream set %d(%d) is not registered correctly to stream set map,"
                 " as the water mark (%zu) is wrong!",
-                __FUNCTION__, streamId, streamSetId, bufferWaterMark);
+                __FUNCTION__, streamId, streamSetKey.id, streamSetKey.isMultiRes,
+                bufferWaterMark);
         return false;
     }
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index f0de1c1..64aaa230 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -99,7 +99,7 @@
      *             combination doesn't match what was registered, or this stream wasn't registered
      *             to this buffer manager before.
      */
-    status_t unregisterStream(int streamId, int streamSetId);
+    status_t unregisterStream(int streamId, int streamSetId, bool isMultiRes);
 
     /**
      * This method obtains a buffer for a stream from this buffer manager.
@@ -127,8 +127,8 @@
      *  NO_MEMORY: Unable to allocate a buffer for this stream at this time.
      */
     status_t getBufferForStream(
-            int streamId, int streamSetId, sp<GraphicBuffer>* gb, int* fenceFd,
-            bool noFreeBufferAtConsumer = false);
+            int streamId, int streamSetId, bool isMultiRes, sp<GraphicBuffer>* gb,
+            int* fenceFd, bool noFreeBufferAtConsumer = false);
 
     /**
      * This method notifies the manager that a buffer has been released by the consumer.
@@ -153,7 +153,8 @@
      *             combination doesn't match what was registered, or this stream wasn't registered
      *             to this buffer manager before, or shouldFreeBuffer is null/
      */
-    status_t onBufferReleased(int streamId, int streamSetId, /*out*/bool* shouldFreeBuffer);
+    status_t onBufferReleased(int streamId, int streamSetId, bool isMultiRes,
+                              /*out*/bool* shouldFreeBuffer);
 
     /**
      * This method notifies the manager that certain buffers has been removed from the
@@ -171,13 +172,13 @@
      *             to this buffer manager before, or the removed buffer count is larger than
      *             current total handoutCount or attachedCount.
      */
-    status_t onBuffersRemoved(int streamId, int streamSetId, size_t count);
+    status_t onBuffersRemoved(int streamId, int streamSetId, bool isMultiRes, size_t count);
 
     /**
      * This method notifiers the manager that a buffer is freed from the buffer queue, usually
      * because onBufferReleased signals the caller to free a buffer via the shouldFreeBuffer flag.
      */
-    void notifyBufferRemoved(int streamId, int streamSetId);
+    void notifyBufferRemoved(int streamId, int streamSetId, bool isMultiRes);
 
     /**
      * Dump the buffer manager statistics.
@@ -292,8 +293,20 @@
     /**
      * Stream set map managed by this buffer manager.
      */
-    typedef int StreamSetId;
-    KeyedVector<StreamSetId, StreamSet> mStreamSetMap;
+    struct StreamSetKey {
+        // The stream set ID
+        int id;
+        // Whether this stream set is for multi-resolution output streams. It's
+        // valid for 2 stream sets to have the same stream set ID if: one is for
+        // multi-resolution output stream, and the other one is not.
+        bool isMultiRes;
+
+        inline bool operator<(const StreamSetKey& other) const {
+            return (isMultiRes < other.isMultiRes) ||
+                    ((isMultiRes == other.isMultiRes) && (id < other.id));
+        }
+    };
+    KeyedVector<StreamSetKey, StreamSet> mStreamSetMap;
     KeyedVector<StreamId, wp<Camera3OutputStream>> mStreamMap;
 
     // TODO: There is no easy way to query the Gralloc version in this code yet, we have different
@@ -304,13 +317,13 @@
      * Check if this stream was successfully registered already. This method needs to be called with
      * mLock held.
      */
-    bool checkIfStreamRegisteredLocked(int streamId, int streamSetId) const;
+    bool checkIfStreamRegisteredLocked(int streamId, StreamSetKey streamSetKey) const;
 
     /**
      * Check if other streams in the stream set has extra buffer available to be freed, and
      * free one if so.
      */
-    status_t checkAndFreeBufferOnOtherStreamsLocked(int streamId, int streamSetId);
+    status_t checkAndFreeBufferOnOtherStreamsLocked(int streamId, StreamSetKey streamSetKey);
 };
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 3204217..bf7e597 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -60,6 +60,7 @@
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 #include "utils/CameraThreadState.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
@@ -69,6 +70,7 @@
 using namespace android::camera3;
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
 
@@ -313,6 +315,7 @@
     mFakeStreamId = NO_STREAM;
     mNeedConfig = true;
     mPauseStateNotify = false;
+    mIsInputStreamMultiResolution = false;
 
     // Measure the clock domain offset between camera and video/hw_composer
     camera_metadata_entry timestampSource =
@@ -330,17 +333,6 @@
         mUsePartialResult = (mNumPartialResults > 1);
     }
 
-    camera_metadata_entry configs =
-            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-    for (uint32_t i = 0; i < configs.count; i += 4) {
-        if (configs.data.i32[i] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
-                configs.data.i32[i + 3] ==
-                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT) {
-            mSupportedOpaqueInputSizes.add(Size(configs.data.i32[i + 1],
-                    configs.data.i32[i + 2]));
-        }
-    }
-
     bool usePrecorrectArray = DistortionMapper::isDistortionSupported(mDeviceInfo);
     if (usePrecorrectArray) {
         res = mDistortionMappers[mId.c_str()].setupStaticInfo(mDeviceInfo);
@@ -492,18 +484,23 @@
     return gotLock;
 }
 
-Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
+camera3::Size Camera3Device::getMaxJpegResolution() const {
     int32_t maxJpegWidth = 0, maxJpegHeight = 0;
     const int STREAM_CONFIGURATION_SIZE = 4;
     const int STREAM_FORMAT_OFFSET = 0;
     const int STREAM_WIDTH_OFFSET = 1;
     const int STREAM_HEIGHT_OFFSET = 2;
     const int STREAM_IS_INPUT_OFFSET = 3;
+    bool isHighResolutionSensor =
+            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo);
+    int32_t scalerSizesTag = isHighResolutionSensor ?
+            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
     camera_metadata_ro_entry_t availableStreamConfigs =
-            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+            mDeviceInfo.find(scalerSizesTag);
     if (availableStreamConfigs.count == 0 ||
             availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
-        return Size(0, 0);
+        return camera3::Size(0, 0);
     }
 
     // Get max jpeg size (area-wise).
@@ -520,7 +517,7 @@
         }
     }
 
-    return Size(maxJpegWidth, maxJpegHeight);
+    return camera3::Size(maxJpegWidth, maxJpegHeight);
 }
 
 nsecs_t Camera3Device::getMonoToBoottimeOffset() {
@@ -614,7 +611,7 @@
 
 ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
     // Get max jpeg size (area-wise).
-    Size maxJpegResolution = getMaxJpegResolution();
+    camera3::Size maxJpegResolution = getMaxJpegResolution();
     if (maxJpegResolution.width == 0) {
         ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
                 __FUNCTION__, mId.string());
@@ -638,6 +635,8 @@
     ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
             kMinJpegBufferSize;
     if (jpegBufferSize > maxJpegBufferSize) {
+        ALOGI("%s: jpeg buffer size calculated is > maxJpeg bufferSize(%zd), clamping",
+                  __FUNCTION__, maxJpegBufferSize);
         jpegBufferSize = maxJpegBufferSize;
     }
 
@@ -657,13 +656,17 @@
     return maxBytesForPointCloud;
 }
 
-ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height) const {
+ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height,
+        bool maxResolution) const {
     const int PER_CONFIGURATION_SIZE = 3;
     const int WIDTH_OFFSET = 0;
     const int HEIGHT_OFFSET = 1;
     const int SIZE_OFFSET = 2;
     camera_metadata_ro_entry rawOpaqueSizes =
-        mDeviceInfo.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
+        mDeviceInfo.find(
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SENSOR_OPAQUE_RAW_SIZE,
+                    maxResolution));
     size_t count = rawOpaqueSizes.count;
     if (count == 0 || (count % PER_CONFIGURATION_SIZE)) {
         ALOGE("%s: Camera %s: bad opaque RAW size static metadata length(%zu)!",
@@ -1263,7 +1266,7 @@
 }
 
 status_t Camera3Device::createInputStream(
-        uint32_t width, uint32_t height, int format, int *id) {
+        uint32_t width, uint32_t height, int format, bool isMultiResolution, int *id) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
@@ -1310,6 +1313,7 @@
     newStream->setStatusTracker(mStatusTracker);
 
     mInputStream = newStream;
+    mIsInputStreamMultiResolution = isMultiResolution;
 
     *id = mNextStreamId++;
 
@@ -1334,7 +1338,9 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
+            uint64_t consumerUsage) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1346,24 +1352,37 @@
     consumers.push_back(consumer);
 
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
-            format, dataSpace, rotation, id, physicalCameraId, surfaceIds, streamSetId,
-            isShared, consumerUsage);
+            format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            streamSetId, isShared, isMultiResolution, consumerUsage);
+}
+
+static bool isRawFormat(int format) {
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW12:
+        case HAL_PIXEL_FORMAT_RAW10:
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+            return true;
+        default:
+            return false;
+    }
 }
 
 status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-        const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
+        const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
+        uint64_t consumerUsage) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
-            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s", mId.string(),
-            mNextStreamId, width, height, format, dataSpace, rotation, consumerUsage, isShared,
-            physicalCameraId.string());
+            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
+            mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
+            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
 
     status_t res;
     bool wasActive = false;
@@ -1406,6 +1425,12 @@
         return BAD_VALUE;
     }
 
+    if (isRawFormat(format) && sensorPixelModesUsed.size() > 1) {
+        // We can't use one stream with a raw format in both sensor pixel modes since its going to
+        // be found in only one sensor pixel mode.
+        ALOGE("%s: RAW opaque stream cannot be used with > 1 sensor pixel modes", __FUNCTION__);
+        return BAD_VALUE;
+    }
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ssize_t blobBufferSize;
         if (dataSpace == HAL_DATASPACE_DEPTH) {
@@ -1425,29 +1450,36 @@
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, blobBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
-        ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height);
+        bool maxResolution =
+                sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+                        sensorPixelModesUsed.end();
+        ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height, maxResolution);
         if (rawOpaqueBufferSize <= 0) {
             SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
             return BAD_VALUE;
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId,
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                 mUseHalBufManager);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     }
 
     size_t consumerCount = consumers.size();
@@ -2560,8 +2592,9 @@
     if (mInputStream != NULL && notifyRequestThread) {
         while (true) {
             camera_stream_buffer_t inputBuffer;
+            camera3::Size inputBufferSize;
             status_t res = mInputStream->getInputBuffer(&inputBuffer,
-                    /*respectHalLimit*/ false);
+                    &inputBufferSize, /*respectHalLimit*/ false);
             if (res != OK) {
                 // Exhausted acquiring all input buffers.
                 break;
@@ -2598,6 +2631,7 @@
     camera_stream_configuration config;
     config.operation_mode = mOperatingMode;
     config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
+    config.input_is_multi_resolution = false;
 
     Vector<camera3::camera_stream_t*> streams;
     streams.setCapacity(config.num_streams);
@@ -2613,8 +2647,11 @@
             return INVALID_OPERATION;
         }
         streams.add(inputStream);
+
+        config.input_is_multi_resolution = mIsInputStreamMultiResolution;
     }
 
+    mGroupIdPhysicalCameraMap.clear();
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
 
         // Don't configure bidi streams twice, nor add them twice to the list
@@ -2648,6 +2685,12 @@
                         __FUNCTION__, outputStream->data_space);
             }
         }
+
+        if (mOutputStreams[i]->isMultiResolution()) {
+            int32_t streamGroupId = mOutputStreams[i]->getHalStreamGroupId();
+            const String8& physicalCameraId = mOutputStreams[i]->getPhysicalCameraId();
+            mGroupIdPhysicalCameraMap[streamGroupId].insert(physicalCameraId);
+        }
     }
 
     config.streams = streams.editArray();
@@ -2718,7 +2761,8 @@
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
     if (notifyRequestThread) {
-        mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration, sessionParams);
+        mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration,
+                sessionParams, mGroupIdPhysicalCameraMap);
     }
 
     char value[PROPERTY_VALUE_MAX];
@@ -2891,8 +2935,9 @@
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t maxExpectedDuration,
-        std::set<String8>& physicalCameraIds, bool isStillCapture,
-        bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& cameraIdsWithZoom,
+        const std::set<std::set<String8>>& physicalCameraIds,
+        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+        const std::set<std::string>& cameraIdsWithZoom,
         const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
@@ -3010,6 +3055,10 @@
         mSupportOfflineProcessing(supportOfflineProcessing) {
     // Check with hardware service manager if we can downcast these interfaces
     // Somewhat expensive, so cache the results at startup
+    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_7.isOk()) {
+        mHidlSession_3_7 = castResult_3_7;
+    }
     auto castResult_3_6 = device::V3_6::ICameraDeviceSession::castFrom(mHidlSession);
     if (castResult_3_6.isOk()) {
         mHidlSession_3_6 = castResult_3_6;
@@ -3043,6 +3092,7 @@
 }
 
 void Camera3Device::HalInterface::clear() {
+    mHidlSession_3_7.clear();
     mHidlSession_3_6.clear();
     mHidlSession_3_5.clear();
     mHidlSession_3_4.clear();
@@ -3169,15 +3219,23 @@
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
+    if (config->input_is_multi_resolution && mHidlSession_3_7 == nullptr) {
+        ALOGE("%s: Camera device doesn't support multi-resolution input stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
     // Convert stream config to HIDL
     std::set<int> activeStreams;
     device::V3_2::StreamConfiguration requestedConfiguration3_2;
     device::V3_4::StreamConfiguration requestedConfiguration3_4;
+    device::V3_7::StreamConfiguration requestedConfiguration3_7;
     requestedConfiguration3_2.streams.resize(config->num_streams);
     requestedConfiguration3_4.streams.resize(config->num_streams);
+    requestedConfiguration3_7.streams.resize(config->num_streams);
     for (size_t i = 0; i < config->num_streams; i++) {
         device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
         device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
+        device::V3_7::Stream &dst3_7 = requestedConfiguration3_7.streams[i];
         camera3::camera_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
@@ -3203,7 +3261,7 @@
         dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
         dst3_2.rotation = mapToStreamRotation((camera_stream_rotation_t) src->rotation);
         // For HidlSession version 3.5 or newer, the format and dataSpace sent
-        // to HAL are original, not the overriden ones.
+        // to HAL are original, not the overridden ones.
         if (mHidlSession_3_5 != nullptr) {
             dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden() ?
                     cam3stream->getOriginalFormat() : src->format);
@@ -3218,7 +3276,14 @@
         if (src->physical_camera_id != nullptr) {
             dst3_4.physicalCameraId = src->physical_camera_id;
         }
-
+        dst3_7.v3_4 = dst3_4;
+        dst3_7.groupId = cam3stream->getHalStreamGroupId();
+        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst3_7.sensorPixelModesUsed[j++] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+        }
         activeStreams.insert(streamId);
         // Create Buffer ID map if necessary
         mBufferRecords.tryCreateBufferCache(streamId);
@@ -3235,9 +3300,15 @@
     }
     requestedConfiguration3_2.operationMode = operationMode;
     requestedConfiguration3_4.operationMode = operationMode;
+    requestedConfiguration3_7.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
     requestedConfiguration3_4.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            get_camera_metadata_size(sessionParams));
+            sessionParamSize);
+    requestedConfiguration3_7.operationMode = operationMode;
+    requestedConfiguration3_7.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            sessionParamSize);
 
     // Invoke configureStreams
     device::V3_3::HalStreamConfiguration finalConfiguration;
@@ -3284,7 +3355,17 @@
             };
 
     // See which version of HAL we have
-    if (mHidlSession_3_6 != nullptr) {
+    if (mHidlSession_3_7 != nullptr) {
+        ALOGV("%s: v3.7 device found", __FUNCTION__);
+        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
+        requestedConfiguration3_7.multiResolutionInputImage = config->input_is_multi_resolution;
+        auto err = mHidlSession_3_7->configureStreams_3_7(
+                requestedConfiguration3_7, configStream36Cb);
+        res = postprocConfigStream36(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_6 != nullptr) {
         ALOGV("%s: v3.6 device found", __FUNCTION__);
         device::V3_5::StreamConfiguration requestedConfiguration3_5;
         requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
@@ -3542,6 +3623,11 @@
     if (!valid()) return INVALID_OPERATION;
 
     sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
+    sp<device::V3_7::ICameraDeviceSession> hidlSession_3_7;
+    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_7.isOk()) {
+        hidlSession_3_7 = castResult_3_7;
+    }
     auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
     if (castResult_3_4.isOk()) {
         hidlSession_3_4 = castResult_3_4;
@@ -3549,8 +3635,11 @@
 
     hardware::hidl_vec<device::V3_2::CaptureRequest> captureRequests;
     hardware::hidl_vec<device::V3_4::CaptureRequest> captureRequests_3_4;
+    hardware::hidl_vec<device::V3_7::CaptureRequest> captureRequests_3_7;
     size_t batchSize = requests.size();
-    if (hidlSession_3_4 != nullptr) {
+    if (hidlSession_3_7 != nullptr) {
+        captureRequests_3_7.resize(batchSize);
+    } else if (hidlSession_3_4 != nullptr) {
         captureRequests_3_4.resize(batchSize);
     } else {
         captureRequests.resize(batchSize);
@@ -3560,7 +3649,10 @@
 
     status_t res = OK;
     for (size_t i = 0; i < batchSize; i++) {
-        if (hidlSession_3_4 != nullptr) {
+        if (hidlSession_3_7 != nullptr) {
+            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_7[i].v3_4.v3_2,
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
+        } else if (hidlSession_3_4 != nullptr) {
             res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
                     /*out*/&handlesCreated, /*out*/&inflightBuffers);
         } else {
@@ -3593,7 +3685,9 @@
     for (size_t i = 0; i < batchSize; i++) {
         camera_capture_request_t* request = requests[i];
         device::V3_2::CaptureRequest* captureRequest;
-        if (hidlSession_3_4 != nullptr) {
+        if (hidlSession_3_7 != nullptr) {
+            captureRequest = &captureRequests_3_7[i].v3_4.v3_2;
+        } else if (hidlSession_3_4 != nullptr) {
             captureRequest = &captureRequests_3_4[i].v3_2;
         } else {
             captureRequest = &captureRequests[i];
@@ -3620,33 +3714,42 @@
             captureRequest->fmqSettingsSize = 0u;
         }
 
-        if (hidlSession_3_4 != nullptr) {
-            captureRequests_3_4[i].physicalCameraSettings.resize(request->num_physcam_settings);
+        // hidl session 3.7 specific handling.
+        if (hidlSession_3_7 != nullptr) {
+            captureRequests_3_7[i].inputWidth = request->input_width;
+            captureRequests_3_7[i].inputHeight = request->input_height;
+        }
+
+        // hidl session 3.7 and 3.4 specific handling.
+        if (hidlSession_3_7 != nullptr || hidlSession_3_4 != nullptr) {
+            hardware::hidl_vec<device::V3_4::PhysicalCameraSetting>& physicalCameraSettings =
+                    (hidlSession_3_7 != nullptr) ?
+                    captureRequests_3_7[i].v3_4.physicalCameraSettings :
+                    captureRequests_3_4[i].physicalCameraSettings;
+            physicalCameraSettings.resize(request->num_physcam_settings);
             for (size_t j = 0; j < request->num_physcam_settings; j++) {
                 if (request->physcam_settings != nullptr) {
                     size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
                     if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
                                 reinterpret_cast<const uint8_t*>(request->physcam_settings[j]),
                                 settingsSize)) {
-                        captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
-                        captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize =
-                            settingsSize;
+                        physicalCameraSettings[j].settings.resize(0);
+                        physicalCameraSettings[j].fmqSettingsSize = settingsSize;
                     } else {
                         if (mRequestMetadataQueue != nullptr) {
                             ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
                         }
-                        captureRequests_3_4[i].physicalCameraSettings[j].settings.setToExternal(
+                        physicalCameraSettings[j].settings.setToExternal(
                                 reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
                                         request->physcam_settings[j])),
                                 get_camera_metadata_size(request->physcam_settings[j]));
-                        captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
+                        physicalCameraSettings[j].fmqSettingsSize = 0u;
                     }
                 } else {
-                    captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
-                    captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
+                    physicalCameraSettings[j].fmqSettingsSize = 0u;
+                    physicalCameraSettings[j].settings.resize(0);
                 }
-                captureRequests_3_4[i].physicalCameraSettings[j].physicalCameraId =
-                    request->physcam_id[j];
+                physicalCameraSettings[j].physicalCameraId = request->physcam_id[j];
             }
         }
     }
@@ -3657,7 +3760,10 @@
                 status = s;
                 *numRequestProcessed = n;
         };
-    if (hidlSession_3_4 != nullptr) {
+    if (hidlSession_3_7 != nullptr) {
+        err = hidlSession_3_7->processCaptureRequest_3_7(captureRequests_3_7, cachesToRemove,
+                                                         resultCallback);
+    } else if (hidlSession_3_4 != nullptr) {
         err = hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
                                                          resultCallback);
     } else {
@@ -3912,11 +4018,13 @@
 }
 
 void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed,
-        const CameraMetadata& sessionParams) {
+        const CameraMetadata& sessionParams,
+        const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap) {
     ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
     mLatestSessionParams = sessionParams;
+    mGroupIdPhysicalCameraMap = groupIdPhysicalCameraMap;
     // Prepare video stream for high speed recording.
     mPrepareVideoStream = isConstrainedHighSpeed;
     mConstrainedMode = isConstrainedHighSpeed;
@@ -4066,8 +4174,9 @@
             // Abort the input buffers for reprocess requests.
             if ((*it)->mInputStream != NULL) {
                 camera_stream_buffer_t inputBuffer;
+                camera3::Size inputBufferSize;
                 status_t res = (*it)->mInputStream->getInputBuffer(&inputBuffer,
-                        /*respectHalLimit*/ false);
+                        &inputBufferSize, /*respectHalLimit*/ false);
                 if (res != OK) {
                     ALOGW("%s: %d: couldn't get input buffer while clearing the request "
                             "list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
@@ -4273,33 +4382,34 @@
 
 void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
     // Update the latest request sent to HAL
-    if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
+    camera_capture_request_t& halRequest = nextRequest.halRequest;
+    if (halRequest.settings != NULL) { // Don't update if they were unchanged
         Mutex::Autolock al(mLatestRequestMutex);
 
-        camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
+        camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
         mLatestRequest.acquire(cloned);
 
         mLatestPhysicalRequest.clear();
-        for (uint32_t i = 0; i < nextRequest.halRequest.num_physcam_settings; i++) {
-            cloned = clone_camera_metadata(nextRequest.halRequest.physcam_settings[i]);
-            mLatestPhysicalRequest.emplace(nextRequest.halRequest.physcam_id[i],
+        for (uint32_t i = 0; i < halRequest.num_physcam_settings; i++) {
+            cloned = clone_camera_metadata(halRequest.physcam_settings[i]);
+            mLatestPhysicalRequest.emplace(halRequest.physcam_id[i],
                     CameraMetadata(cloned));
         }
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent != NULL) {
             parent->monitorMetadata(TagMonitor::REQUEST,
-                    nextRequest.halRequest.frame_number,
+                    halRequest.frame_number,
                     0, mLatestRequest, mLatestPhysicalRequest);
         }
     }
 
-    if (nextRequest.halRequest.settings != NULL) {
+    if (halRequest.settings != NULL) {
         nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
-                nextRequest.halRequest.settings);
+                halRequest.settings);
     }
 
-    cleanupPhysicalSettings(nextRequest.captureRequest, &nextRequest.halRequest);
+    cleanupPhysicalSettings(nextRequest.captureRequest, &halRequest);
 }
 
 bool Camera3Device::RequestThread::updateSessionParameters(const CameraMetadata& settings) {
@@ -4662,6 +4772,9 @@
         // Fill in buffers
         if (captureRequest->mInputStream != NULL) {
             halRequest->input_buffer = &captureRequest->mInputBuffer;
+
+            halRequest->input_width = captureRequest->mInputBufferSize.width;
+            halRequest->input_height = captureRequest->mInputBufferSize.height;
             totalNumBuffers += 1;
         } else {
             halRequest->input_buffer = NULL;
@@ -4670,7 +4783,7 @@
         outputBuffers->insertAt(camera_stream_buffer_t(), 0,
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
-        std::set<String8> requestedPhysicalCameras;
+        std::set<std::set<String8>> requestedPhysicalCameras;
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent == NULL) {
@@ -4765,14 +4878,11 @@
             }
 
             String8 physicalCameraId = outputStream->getPhysicalCameraId();
-
-            if (!physicalCameraId.isEmpty()) {
-                // Physical stream isn't supported for input request.
-                if (halRequest->input_buffer) {
-                    CLOGE("Physical stream is not supported for input request");
-                    return INVALID_OPERATION;
-                }
-                requestedPhysicalCameras.insert(physicalCameraId);
+            int32_t streamGroupId = outputStream->getHalStreamGroupId();
+            if (streamGroupId != -1 && mGroupIdPhysicalCameraMap.count(streamGroupId) == 1) {
+                requestedPhysicalCameras.insert(mGroupIdPhysicalCameraMap[streamGroupId]);
+            } else if (!physicalCameraId.isEmpty()) {
+                requestedPhysicalCameras.insert(std::set<String8>({physicalCameraId}));
             }
             halRequest->num_output_buffers++;
         }
@@ -5248,7 +5358,8 @@
         // Since RequestThread::clear() removes buffers from the input stream,
         // get the right buffer here before unlocking mRequestLock
         if (nextRequest->mInputStream != NULL) {
-            res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer);
+            res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer,
+                    &nextRequest->mInputBufferSize);
             if (res != OK) {
                 // Can't get input buffer from gralloc queue - this could be due to
                 // disconnected queue or other producer misbehavior, so not a fatal
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 567b3ad..d9e89fd 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -35,6 +35,7 @@
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.6/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
@@ -52,6 +53,7 @@
 #include "device3/InFlightRequest.h"
 #include "device3/Camera3OutputInterface.h"
 #include "device3/Camera3OfflineSession.h"
+#include "device3/Camera3StreamInterface.h"
 #include "utils/TagMonitor.h"
 #include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
@@ -71,7 +73,6 @@
 
 class Camera3Stream;
 class Camera3ZslStream;
-class Camera3OutputStreamInterface;
 class Camera3StreamInterface;
 
 } // namespace camera3
@@ -131,19 +132,24 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) override;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) override;
+
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) override;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) override;
 
     status_t createInputStream(
-            uint32_t width, uint32_t height, int format,
+            uint32_t width, uint32_t height, int format, bool isMultiResolution,
             int *id) override;
 
     status_t getStreamInfo(int id, StreamInfo *streamInfo) override;
@@ -187,7 +193,7 @@
 
     ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
     ssize_t getPointCloudBufferSize() const;
-    ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
+    ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height, bool maxResolution) const;
 
     // Methods called by subclasses
     void             notifyStatus(bool idle); // updates from StatusTracker
@@ -418,6 +424,8 @@
         sp<hardware::camera::device::V3_5::ICameraDeviceSession> mHidlSession_3_5;
         // Valid if ICameraDeviceSession is @3.6 or newer
         sp<hardware::camera::device::V3_6::ICameraDeviceSession> mHidlSession_3_6;
+        // Valid if ICameraDeviceSession is @3.7 or newer
+        sp<hardware::camera::device::V3_7::ICameraDeviceSession> mHidlSession_3_7;
 
         std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
 
@@ -470,8 +478,6 @@
         uint32_t height;
         explicit Size(uint32_t w = 0, uint32_t h = 0) : width(w), height(h){}
     };
-    // Map from format to size.
-    Vector<Size>               mSupportedOpaqueInputSizes;
 
     enum Status {
         STATUS_ERROR,
@@ -492,7 +498,10 @@
 
     camera3::StreamSet         mOutputStreams;
     sp<camera3::Camera3Stream> mInputStream;
+    bool                       mIsInputStreamMultiResolution;
     SessionStatsBuilder        mSessionStatsBuilder;
+    // Map from stream group ID to physical cameras backing the stream group
+    std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
 
     int                        mNextStreamId;
     bool                       mNeedConfig;
@@ -525,6 +534,7 @@
         PhysicalCameraSettingsList          mSettingsList;
         sp<camera3::Camera3Stream>          mInputStream;
         camera_stream_buffer_t              mInputBuffer;
+        camera3::Size                       mInputBufferSize;
         Vector<sp<camera3::Camera3OutputStreamInterface> >
                                             mOutputStreams;
         SurfaceMap                          mOutputSurfaces;
@@ -750,7 +760,7 @@
      * Helper function to get the largest Jpeg resolution (in area)
      * Return Size(0, 0) if static metatdata is invalid
      */
-    Size getMaxJpegResolution() const;
+    camera3::Size getMaxJpegResolution() const;
 
     /**
      * Helper function to get the offset between MONOTONIC and BOOTTIME
@@ -795,7 +805,8 @@
          * Call after stream (re)-configuration is completed.
          */
         void     configurationComplete(bool isConstrainedHighSpeed,
-                const CameraMetadata& sessionParams);
+                const CameraMetadata& sessionParams,
+                const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -1052,6 +1063,8 @@
         Vector<int32_t>    mSessionParamKeys;
         CameraMetadata     mLatestSessionParams;
 
+        std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
+
         const bool         mUseHalBufManager;
     };
     sp<RequestThread> mRequestThread;
@@ -1071,7 +1084,8 @@
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-            bool callback, nsecs_t maxExpectedDuration, std::set<String8>& physicalCameraIds,
+            bool callback, nsecs_t maxExpectedDuration,
+            const std::set<std::set<String8>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
             nsecs_t requestTimeNs);
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 2196c7d..8cc6833 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -31,7 +31,7 @@
 Camera3FakeStream::Camera3FakeStream(int id) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, FAKE_WIDTH, FAKE_HEIGHT,
                 /*maxSize*/0, FAKE_FORMAT, FAKE_DATASPACE, FAKE_ROTATION,
-                FAKE_ID) {
+                FAKE_ID, std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT}) {
 
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index f6acda8..0204d49 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -32,10 +32,12 @@
 Camera3IOStreamBase::Camera3IOStreamBase(int id, camera_stream_type_t type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        const String8& physicalCameraId, int setId) :
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
-                physicalCameraId, setId),
+                physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mTotalBufferCount(0),
         mHandoutTotalBufferCount(0),
         mHandoutOutputBufferCount(0),
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 719fa14..90c8a7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -36,7 +36,8 @@
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
   public:
 
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index ad70a3a..6d8317b 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -33,7 +33,8 @@
         uint32_t width, uint32_t height, int format) :
         Camera3IOStreamBase(id, CAMERA_STREAM_INPUT, width, height, /*maxSize*/0,
                             format, HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0,
-                            FAKE_ID) {
+                            FAKE_ID,
+                            std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT}) {
 
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
@@ -46,10 +47,14 @@
 }
 
 status_t Camera3InputStream::getInputBufferLocked(
-        camera_stream_buffer *buffer) {
+        camera_stream_buffer *buffer, Size *size) {
     ATRACE_CALL();
     status_t res;
 
+    if (size == nullptr) {
+        ALOGE("%s: size must not be null", __FUNCTION__);
+        return BAD_VALUE;
+    }
     // FIXME: will not work in (re-)registration
     if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
         ALOGE("%s: Stream %d: Buffer registration for input streams"
@@ -77,10 +82,12 @@
         return res;
     }
 
+    size->width  = bufferItem.mGraphicBuffer->getWidth();
+    size->height = bufferItem.mGraphicBuffer->getHeight();
+
     anb = bufferItem.mGraphicBuffer->getNativeBuffer();
     assert(anb != NULL);
     fenceFd = bufferItem.mFence->dup();
-
     /**
      * FenceFD now owned by HAL except in case of error,
      * in which case we reassign it to acquire_fence
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 03afa17..46221d1 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -70,7 +70,7 @@
      * Camera3Stream interface
      */
 
-    virtual status_t getInputBufferLocked(camera_stream_buffer *buffer);
+    virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size *size);
     virtual status_t returnInputBufferLocked(
             const camera_stream_buffer &buffer);
     virtual status_t getInputBufferProducerLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index c835f51..221bebb 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -44,10 +44,11 @@
         uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
-        int setId) :
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, setId),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -70,9 +71,12 @@
         sp<Surface> consumer,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        nsecs_t timestampOffset, const String8& physicalCameraId, int setId) :
+        nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
-                            format, dataSpace, rotation, physicalCameraId, setId),
+                            format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
+                            setId, isMultiResolution),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -102,10 +106,12 @@
         uint32_t width, uint32_t height, int format,
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation, nsecs_t timestampOffset,
-        const String8& physicalCameraId, int setId) :
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, setId),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -140,12 +146,13 @@
                                          android_dataspace dataSpace,
                                          camera_stream_rotation_t rotation,
                                          const String8& physicalCameraId,
+                                        const std::unordered_set<int32_t> &sensorPixelModesUsed,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
-                                         int setId) :
+                                         int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
-                            physicalCameraId, setId),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseMonoTimestamp(false),
@@ -570,10 +577,12 @@
             !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
         uint64_t consumerUsage = 0;
         getEndpointUsage(&consumerUsage);
+        uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
+        uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
         StreamInfo streamInfo(
-                getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
+                getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
                 mUsage | consumerUsage, mTotalBufferCount,
-                /*isConfigured*/true);
+                /*isConfigured*/true, isMultiResolution());
         wp<Camera3OutputStream> weakThis(this);
         res = mBufferManager->registerStream(weakThis,
                 streamInfo);
@@ -604,7 +613,8 @@
 
     if (mUseBufferManager) {
         sp<GraphicBuffer> gb;
-        res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, fenceFd);
+        res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
+                isMultiResolution(), &gb, fenceFd);
         if (res == OK) {
             // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
             // successful return.
@@ -693,7 +703,8 @@
 
             sp<GraphicBuffer> gb;
             res = mBufferManager->getBufferForStream(
-                    getId(), getStreamSetId(), &gb, fenceFd, /*noFreeBuffer*/true);
+                    getId(), getStreamSetId(), isMultiResolution(),
+                    &gb, fenceFd, /*noFreeBuffer*/true);
 
             if (res == OK) {
                 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
@@ -740,7 +751,8 @@
         onBuffersRemovedLocked(removedBuffers);
 
         if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
-            mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), removedBuffers.size());
+            mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
+                    removedBuffers.size());
         }
     }
 }
@@ -802,7 +814,7 @@
     // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
     // stream at this point should be safe.
     if (mUseBufferManager) {
-        res = mBufferManager->unregisterStream(getId(), getStreamSetId());
+        res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
         if (res != OK) {
             ALOGE("%s: Unable to unregister stream %d from buffer manager "
                     "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
@@ -914,7 +926,8 @@
     ALOGV("Stream %d: Buffer released", stream->getId());
     bool shouldFreeBuffer = false;
     status_t res = stream->mBufferManager->onBufferReleased(
-        stream->getId(), stream->getStreamSetId(), &shouldFreeBuffer);
+        stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
+        &shouldFreeBuffer);
     if (res != OK) {
         ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
                 strerror(-res), res);
@@ -927,7 +940,7 @@
         stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
         if (buffer.get() != nullptr) {
             stream->mBufferManager->notifyBufferRemoved(
-                    stream->getId(), stream->getStreamSetId());
+                    stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
         }
     }
 }
@@ -945,7 +958,7 @@
         stream->onBuffersRemovedLocked(buffers);
         if (stream->mUseBufferManager) {
             stream->mBufferManager->onBuffersRemoved(stream->getId(),
-                    stream->getStreamSetId(), buffers.size());
+                    stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
         }
         ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
     }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 366d22a..00e4854 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -48,6 +48,7 @@
     uint64_t combinedUsage;
     size_t totalBufferCount;
     bool isConfigured;
+    bool isMultiRes;
     explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             uint32_t w = 0,
@@ -56,7 +57,8 @@
             android_dataspace ds = HAL_DATASPACE_UNKNOWN,
             uint64_t usage = 0,
             size_t bufferCount = 0,
-            bool configured = false) :
+            bool configured = false,
+            bool multiRes = false) :
                 streamId(id),
                 streamSetId(setId),
                 width(w),
@@ -65,7 +67,8 @@
                 dataSpace(ds),
                 combinedUsage(usage),
                 totalBufferCount(bufferCount),
-                isConfigured(configured){}
+                isConfigured(configured),
+                isMultiRes(multiRes) {}
 };
 
 /**
@@ -84,8 +87,8 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
-
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -96,8 +99,8 @@
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
-
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -107,7 +110,8 @@
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
     virtual ~Camera3OutputStream();
 
@@ -231,8 +235,9 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
     /**
      * Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 384c2c6..9f225d0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -484,6 +484,20 @@
     states.inflightIntf.checkInflightMapLengthLocked();
 }
 
+// Erase the subset of physicalCameraIds that contains id
+bool erasePhysicalCameraIdSet(
+        std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
+    bool found = false;
+    for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
+        if (iter->count(id) == 1) {
+            physicalCameraIds.erase(iter);
+            found = true;
+            break;
+        }
+    }
+    return found;
+}
+
 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
     ATRACE_CALL();
 
@@ -583,12 +597,10 @@
             }
             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
                 String8 physicalId(result->physcam_ids[i]);
-                std::set<String8>::iterator cameraIdIter =
-                        request.physicalCameraIds.find(physicalId);
-                if (cameraIdIter != request.physicalCameraIds.end()) {
-                    request.physicalCameraIds.erase(cameraIdIter);
-                } else {
-                    SET_ERR("Total result for frame %d has already returned for camera %s",
+                bool validPhysicalCameraMetadata =
+                        erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
+                if (!validPhysicalCameraMetadata) {
+                    SET_ERR("Unexpected total result for frame %d camera %s",
                             frameNumber, physicalId.c_str());
                     return;
                 }
@@ -1083,14 +1095,14 @@
                             errorCode) {
                         if (physicalCameraId.size() > 0) {
                             String8 cameraId(physicalCameraId);
-                            auto iter = r.physicalCameraIds.find(cameraId);
-                            if (iter == r.physicalCameraIds.end()) {
+                            bool validPhysicalCameraId =
+                                    erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
+                            if (!validPhysicalCameraId) {
                                 ALOGE("%s: Reported result failure for physical camera device: %s "
                                         " which is not part of the respective request!",
                                         __FUNCTION__, cameraId.string());
                                 break;
                             }
-                            r.physicalCameraIds.erase(iter);
                             resultExtras.errorPhysicalCameraId = physicalCameraId;
                             physicalDeviceResultError = true;
                         }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 772fe6e..142889a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -46,6 +46,7 @@
         uint32_t num_streams;
         camera_stream_t **streams;
         uint32_t operation_mode;
+        bool input_is_multi_resolution;
     } camera_stream_configuration_t;
 
     typedef struct camera_capture_request {
@@ -57,6 +58,8 @@
         uint32_t num_physcam_settings;
         const char **physcam_id;
         const camera_metadata_t **physcam_settings;
+        int32_t input_width;
+        int32_t input_height;
     } camera_capture_request_t;
 
     typedef struct camera_capture_result {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 8aa5f1a..15cf7f4 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -32,9 +32,10 @@
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool useHalBufManager) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
-                            format, dataSpace, rotation, physicalCameraId,
+                            format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             consumerUsage, timestampOffset, setId),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index a61316c..4b6341b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -38,6 +38,7 @@
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             bool useHalBufManager = false);
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4cb954e..02b6585 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -49,7 +49,9 @@
         camera_stream_type type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        const String8& physicalCameraId, int setId) :
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -73,7 +75,8 @@
     mDataSpaceOverridden(false),
     mOriginalDataSpace(dataSpace),
     mPhysicalCameraId(physicalCameraId),
-    mLastTimestamp(0) {
+    mLastTimestamp(0),
+    mIsMultiResolution(isMultiResolution) {
 
     camera_stream::stream_type = type;
     camera_stream::width = width;
@@ -83,6 +86,7 @@
     camera_stream::rotation = rotation;
     camera_stream::max_buffers = 0;
     camera_stream::physical_camera_id = mPhysicalCameraId.string();
+    camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
@@ -99,6 +103,14 @@
     return mSetId;
 }
 
+int Camera3Stream::getHalStreamGroupId() const {
+    return mIsMultiResolution ? mSetId : -1;
+}
+
+bool Camera3Stream::isMultiResolution() const {
+    return mIsMultiResolution;
+}
+
 uint32_t Camera3Stream::getWidth() const {
     return camera_stream::width;
 }
@@ -743,11 +755,16 @@
     return res;
 }
 
-status_t Camera3Stream::getInputBuffer(camera_stream_buffer *buffer, bool respectHalLimit) {
+status_t Camera3Stream::getInputBuffer(camera_stream_buffer *buffer,
+        Size* size, bool respectHalLimit) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
     status_t res = OK;
 
+    if (size == nullptr) {
+        ALOGE("%s: size must not be null", __FUNCTION__);
+        return BAD_VALUE;
+    }
     // This function should be only called when the stream is configured already.
     if (mState != STATE_CONFIGURED) {
         ALOGE("%s: Stream %d: Can't get input buffers if stream is not in CONFIGURED state %d",
@@ -769,7 +786,7 @@
         }
     }
 
-    res = getInputBufferLocked(buffer);
+    res = getInputBufferLocked(buffer, size);
     if (res == OK) {
         fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
         if (buffer->buffer) {
@@ -918,7 +935,7 @@
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
     return INVALID_OPERATION;
 }
-status_t Camera3Stream::getInputBufferLocked(camera_stream_buffer *) {
+status_t Camera3Stream::getInputBufferLocked(camera_stream_buffer *, Size *) {
     ALOGE("%s: This type of stream does not support input", __FUNCTION__);
     return INVALID_OPERATION;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 55ed2f2..5a364ab 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -147,6 +147,14 @@
      * Get the output stream set id.
      */
     int              getStreamSetId() const;
+    /**
+     * Is this stream part of a multi-resolution stream set
+     */
+    bool             isMultiResolution() const;
+    /**
+     * Get the HAL stream group id for a multi-resolution stream set
+     */
+    int              getHalStreamGroupId() const;
 
     /**
      * Get the stream's dimensions and format
@@ -356,10 +364,13 @@
      * For bidirectional streams, this method applies to the input-side
      * buffers.
      *
+     * This method also returns the size of the returned input buffer.
+     *
      * Normally this call will block until the handed out buffer count is less than the stream
      * max buffer count; if respectHalLimit is set to false, this is ignored.
      */
-    status_t         getInputBuffer(camera_stream_buffer *buffer, bool respectHalLimit = true);
+    status_t         getInputBuffer(camera_stream_buffer *buffer,
+                             Size* size, bool respectHalLimit = true);
 
     /**
      * Return a buffer to the stream after use by the HAL.
@@ -487,7 +498,9 @@
     Camera3Stream(int id, camera_stream_type type,
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            const String8& physicalCameraId, int setId);
+            const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId, bool isMultiResolution);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
@@ -509,7 +522,7 @@
 
     virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
 
-    virtual status_t getInputBufferLocked(camera_stream_buffer *buffer);
+    virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
 
     virtual status_t returnInputBufferLocked(
             const camera_stream_buffer &buffer);
@@ -608,6 +621,7 @@
     String8 mPhysicalCameraId;
     nsecs_t mLastTimestamp;
 
+    bool mIsMultiResolution = false;
     bool mSupportOfflineProcessing = false;
 }; // class Camera3Stream
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index c558b07..2d3397c 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -62,6 +62,8 @@
     android_dataspace_t data_space;
     camera_stream_rotation_t rotation;
     const char* physical_camera_id;
+
+    std::unordered_set<int32_t> sensor_pixel_modes_used;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -72,6 +74,12 @@
     int release_fence;
 } camera_stream_buffer_t;
 
+struct Size {
+    uint32_t width;
+    uint32_t height;
+    explicit Size(uint32_t w = 0, uint32_t h = 0) : width(w), height(h){}
+};
+
 enum {
     /**
      * This stream set ID indicates that the set ID is invalid, and this stream doesn't intend to
@@ -98,13 +106,15 @@
         uint64_t consumerUsage;
         bool finalized = false;
         bool supportsOffline = false;
+        std::unordered_set<int32_t> sensorPixelModesUsed;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
-                uint64_t _consumerUsage) :
+                uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed) :
             width(_width), height(_height), format(_format),
-            dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
+            dataSpace(_dataSpace), consumerUsage(_consumerUsage),
+            sensorPixelModesUsed(_sensorPixelModesUsed) {}
 };
 
 /**
@@ -129,6 +139,16 @@
     virtual int      getStreamSetId() const = 0;
 
     /**
+     * Is this stream part of a multi-resolution stream set
+     */
+    virtual bool     isMultiResolution() const = 0;
+
+    /**
+     * Get the HAL stream group id for a multi-resolution stream set
+     */
+    virtual int      getHalStreamGroupId() const = 0;
+
+    /**
      * Get the stream's dimensions and format
      */
     virtual uint32_t getWidth() const = 0;
@@ -352,7 +372,8 @@
      * Normally this call will block until the handed out buffer count is less than the stream
      * max buffer count; if respectHalLimit is set to false, this is ignored.
      */
-    virtual status_t getInputBuffer(camera_stream_buffer *buffer, bool respectHalLimit = true) = 0;
+    virtual status_t getInputBuffer(camera_stream_buffer *buffer,
+            Size *size, bool respectHalLimit = true) = 0;
 
     /**
      * Return a buffer to the stream after use by the HAL.
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 316303e..89dd115 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -22,13 +22,14 @@
 #include <cmath>
 
 #include "device3/DistortionMapper.h"
+#include "utils/SessionConfigurationUtils.h"
 
 namespace android {
 
 namespace camera3 {
 
 
-DistortionMapper::DistortionMapper() : mValidMapping(false), mValidGrids(false) {
+DistortionMapper::DistortionMapper() {
     initRemappedKeys();
 }
 
@@ -61,41 +62,81 @@
 
 status_t DistortionMapper::setupStaticInfo(const CameraMetadata &deviceInfo) {
     std::lock_guard<std::mutex> lock(mMutex);
+    status_t res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/false);
+    if (res != OK) {
+        return res;
+    }
+
+    bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+    if (mMaxResolution) {
+        res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
+    }
+    return res;
+}
+
+status_t DistortionMapper::setupStaticInfoLocked(const CameraMetadata &deviceInfo,
+        bool maxResolution) {
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
+
     camera_metadata_ro_entry_t array;
 
-    array = deviceInfo.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
+    array = deviceInfo.find(
+        SessionConfigurationUtils::getAppropriateModeTag(
+                ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, maxResolution));
     if (array.count != 4) return BAD_VALUE;
 
     float arrayX = static_cast<float>(array.data.i32[0]);
     float arrayY = static_cast<float>(array.data.i32[1]);
-    mArrayWidth = static_cast<float>(array.data.i32[2]);
-    mArrayHeight = static_cast<float>(array.data.i32[3]);
+    mapperInfo->mArrayWidth = static_cast<float>(array.data.i32[2]);
+    mapperInfo->mArrayHeight = static_cast<float>(array.data.i32[3]);
 
-    array = deviceInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+    array = deviceInfo.find(
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, maxResolution));
     if (array.count != 4) return BAD_VALUE;
 
     float activeX = static_cast<float>(array.data.i32[0]);
     float activeY = static_cast<float>(array.data.i32[1]);
-    mActiveWidth = static_cast<float>(array.data.i32[2]);
-    mActiveHeight = static_cast<float>(array.data.i32[3]);
+    mapperInfo->mActiveWidth = static_cast<float>(array.data.i32[2]);
+    mapperInfo->mActiveHeight = static_cast<float>(array.data.i32[3]);
 
-    mArrayDiffX = activeX - arrayX;
-    mArrayDiffY = activeY - arrayY;
+    mapperInfo->mArrayDiffX = activeX - arrayX;
+    mapperInfo->mArrayDiffY = activeY - arrayY;
 
-    return updateCalibration(deviceInfo);
+    return updateCalibration(deviceInfo, /*isStatic*/ true, maxResolution);
+}
+
+static bool doesSettingsHaveMaxResolution(const CameraMetadata *settings) {
+    if (settings == nullptr) {
+        return false;
+    }
+    // First we get the sensorPixelMode from the settings metadata.
+    camera_metadata_ro_entry sensorPixelModeEntry = settings->find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        return (sensorPixelModeEntry.data.u8[0] == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+    }
+    return false;
 }
 
 bool DistortionMapper::calibrationValid() const {
     std::lock_guard<std::mutex> lock(mMutex);
-
-    return mValidMapping;
+    bool isValid =  mDistortionMapperInfo.mValidMapping;
+    if (mMaxResolution) {
+        isValid = isValid && mDistortionMapperInfoMaximumResolution.mValidMapping;
+    }
+    return isValid;
 }
 
 status_t DistortionMapper::correctCaptureRequest(CameraMetadata *request) {
     std::lock_guard<std::mutex> lock(mMutex);
     status_t res;
 
-    if (!mValidMapping) return OK;
+    bool maxResolution = doesSettingsHaveMaxResolution(request);
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
+
+    if (!mapperInfo->mValidMapping) return OK;
 
     camera_metadata_entry_t e;
     e = request->find(ANDROID_DISTORTION_CORRECTION_MODE);
@@ -107,27 +148,30 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapCorrectedToRaw(e.data.i32 + j, 2, /*clamp*/true);
+                res = mapCorrectedToRaw(e.data.i32 + j, 2, mapperInfo, /*clamp*/true);
                 if (res != OK) return res;
             }
         }
         for (auto rect : kRectsToCorrect) {
             e = request->find(rect);
-            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, /*clamp*/true);
+            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, mapperInfo, /*clamp*/true);
             if (res != OK) return res;
         }
     }
-
     return OK;
 }
 
 status_t DistortionMapper::correctCaptureResult(CameraMetadata *result) {
     std::lock_guard<std::mutex> lock(mMutex);
+
+    bool maxResolution = doesSettingsHaveMaxResolution(result);
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
     status_t res;
 
-    if (!mValidMapping) return OK;
+    if (!mapperInfo->mValidMapping) return OK;
 
-    res = updateCalibration(*result);
+    res = updateCalibration(*result, /*isStatic*/ false, maxResolution);
     if (res != OK) {
         ALOGE("Failure to update lens calibration information");
         return INVALID_OPERATION;
@@ -143,18 +187,18 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapRawToCorrected(e.data.i32 + j, 2, /*clamp*/true);
+                res = mapRawToCorrected(e.data.i32 + j, 2, mapperInfo, /*clamp*/true);
                 if (res != OK) return res;
             }
         }
         for (auto rect : kRectsToCorrect) {
             e = result->find(rect);
-            res = mapRawRectToCorrected(e.data.i32, e.count / 4, /*clamp*/true);
+            res = mapRawRectToCorrected(e.data.i32, e.count / 4, mapperInfo, /*clamp*/true);
             if (res != OK) return res;
         }
         for (auto pts : kResultPointsToCorrectNoClamp) {
             e = result->find(pts);
-            res = mapRawToCorrected(e.data.i32, e.count / 2, /*clamp*/false);
+            res = mapRawToCorrected(e.data.i32, e.count / 2, mapperInfo, /*clamp*/false);
             if (res != OK) return res;
         }
     }
@@ -164,25 +208,37 @@
 
 // Utility methods; not guarded by mutex
 
-status_t DistortionMapper::updateCalibration(const CameraMetadata &result) {
+status_t DistortionMapper::updateCalibration(const CameraMetadata &result, bool isStatic,
+        bool maxResolution) {
     camera_metadata_ro_entry_t calib, distortion;
+    DistortionMapperInfo *mapperInfo =
+            maxResolution ? &mDistortionMapperInfoMaximumResolution : &mDistortionMapperInfo;
+    // We only need maximum resolution version of LENS_INTRINSIC_CALIBRATION and
+    // LENS_DISTORTION since CaptureResults would still use the same key
+    // regardless of sensor pixel mode.
+    int calibrationKey =
+        SessionConfigurationUtils::getAppropriateModeTag(ANDROID_LENS_INTRINSIC_CALIBRATION,
+                maxResolution && isStatic);
+    int distortionKey =
+        SessionConfigurationUtils::getAppropriateModeTag(ANDROID_LENS_DISTORTION,
+                maxResolution && isStatic);
 
-    calib = result.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
-    distortion = result.find(ANDROID_LENS_DISTORTION);
+    calib = result.find(calibrationKey);
+    distortion = result.find(distortionKey);
 
     if (calib.count != 5) return BAD_VALUE;
     if (distortion.count != 5) return BAD_VALUE;
 
     // Skip redoing work if no change to calibration fields
-    if (mValidMapping &&
-            mFx == calib.data.f[0] &&
-            mFy == calib.data.f[1] &&
-            mCx == calib.data.f[2] &&
-            mCy == calib.data.f[3] &&
-            mS == calib.data.f[4]) {
+    if (mapperInfo->mValidMapping &&
+            mapperInfo->mFx == calib.data.f[0] &&
+            mapperInfo->mFy == calib.data.f[1] &&
+            mapperInfo->mCx == calib.data.f[2] &&
+            mapperInfo->mCy == calib.data.f[3] &&
+            mapperInfo->mS == calib.data.f[4]) {
         bool noChange = true;
         for (size_t i = 0; i < distortion.count; i++) {
-            if (mK[i] != distortion.data.f[i]) {
+            if (mapperInfo->mK[i] != distortion.data.f[i]) {
                 noChange = false;
                 break;
             }
@@ -190,39 +246,39 @@
         if (noChange) return OK;
     }
 
-    mFx = calib.data.f[0];
-    mFy = calib.data.f[1];
-    mCx = calib.data.f[2];
-    mCy = calib.data.f[3];
-    mS = calib.data.f[4];
+    mapperInfo->mFx = calib.data.f[0];
+    mapperInfo->mFy = calib.data.f[1];
+    mapperInfo->mCx = calib.data.f[2];
+    mapperInfo->mCy = calib.data.f[3];
+    mapperInfo->mS = calib.data.f[4];
 
-    mInvFx = 1 / mFx;
-    mInvFy = 1 / mFy;
+    mapperInfo->mInvFx = 1 / mapperInfo->mFx;
+    mapperInfo->mInvFy = 1 / mapperInfo->mFy;
 
     for (size_t i = 0; i < distortion.count; i++) {
-        mK[i] = distortion.data.f[i];
+        mapperInfo->mK[i] = distortion.data.f[i];
     }
 
-    mValidMapping = true;
+    mapperInfo->mValidMapping = true;
     // Need to recalculate grid
-    mValidGrids = false;
+    mapperInfo->mValidGrids = false;
 
     return OK;
 }
 
 status_t DistortionMapper::mapRawToCorrected(int32_t *coordPairs, int coordCount,
-        bool clamp, bool simple) {
-    if (!mValidMapping) return INVALID_OPERATION;
+        DistortionMapperInfo *mapperInfo, bool clamp, bool simple) {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, clamp);
+    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, mapperInfo, clamp);
 
-    if (!mValidGrids) {
-        status_t res = buildGrids();
+    if (!mapperInfo->mValidGrids) {
+        status_t res = buildGrids(mapperInfo);
         if (res != OK) return res;
     }
 
     for (int i = 0; i < coordCount * 2; i += 2) {
-        const GridQuad *quad = findEnclosingQuad(coordPairs + i, mDistortedGrid);
+        const GridQuad *quad = findEnclosingQuad(coordPairs + i, mapperInfo->mDistortedGrid);
         if (quad == nullptr) {
             ALOGE("Raw to corrected mapping failure: No quad found for (%d, %d)",
                     *(coordPairs + i), *(coordPairs + i + 1));
@@ -258,8 +314,8 @@
 
         // Clamp to within active array
         if (clamp) {
-            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
-            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+            corrX = std::min(mapperInfo->mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mapperInfo->mActiveHeight - 1, std::max(0.f, corrY));
         }
 
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
@@ -270,19 +326,19 @@
 }
 
 status_t DistortionMapper::mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
-        bool clamp) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+       const DistortionMapperInfo *mapperInfo, bool clamp) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    float scaleX = mActiveWidth / mArrayWidth;
-    float scaleY = mActiveHeight / mArrayHeight;
+    float scaleX = mapperInfo->mActiveWidth / mapperInfo->mArrayWidth;
+    float scaleY = mapperInfo->mActiveHeight / mapperInfo->mArrayHeight;
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
         float corrX = x * scaleX;
         float corrY = y * scaleY;
         if (clamp) {
-            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
-            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+            corrX = std::min(mapperInfo->mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mapperInfo->mActiveHeight - 1, std::max(0.f, corrY));
         }
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
         coordPairs[i + 1] = static_cast<int32_t>(std::round(corrY));
@@ -291,9 +347,9 @@
     return OK;
 }
 
-status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
-        bool simple) {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount,
+       DistortionMapperInfo *mapperInfo, bool clamp, bool simple) {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
         int32_t coords[4] = {
@@ -303,7 +359,7 @@
             rects[i + 1] + rects[i + 3] - 1
         };
 
-        mapRawToCorrected(coords, 2, clamp, simple);
+        mapRawToCorrected(coords, 2, mapperInfo, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -315,60 +371,60 @@
     return OK;
 }
 
-status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount, bool clamp,
-        bool simple) const {
-    return mapCorrectedToRawImpl(coordPairs, coordCount, clamp, simple);
+status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    return mapCorrectedToRawImpl(coordPairs, coordCount, mapperInfo, clamp, simple);
 }
 
 template<typename T>
-status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount, bool clamp,
-        bool simple) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, clamp);
+    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, mapperInfo, clamp);
 
-    float activeCx = mCx - mArrayDiffX;
-    float activeCy = mCy - mArrayDiffY;
+    float activeCx = mapperInfo->mCx - mapperInfo->mArrayDiffX;
+    float activeCy = mapperInfo->mCy - mapperInfo->mArrayDiffY;
     for (int i = 0; i < coordCount * 2; i += 2) {
         // Move to normalized space from active array space
-        float ywi = (coordPairs[i + 1] - activeCy) * mInvFy;
-        float xwi = (coordPairs[i] - activeCx - mS * ywi) * mInvFx;
+        float ywi = (coordPairs[i + 1] - activeCy) * mapperInfo->mInvFy;
+        float xwi = (coordPairs[i] - activeCx - mapperInfo->mS * ywi) * mapperInfo->mInvFx;
         // Apply distortion model to calculate raw image coordinates
+        const std::array<float, 5> &kK = mapperInfo->mK;
         float rSq = xwi * xwi + ywi * ywi;
-        float Fr = 1.f + (mK[0] * rSq) + (mK[1] * rSq * rSq) + (mK[2] * rSq * rSq * rSq);
-        float xc = xwi * Fr + (mK[3] * 2 * xwi * ywi) + mK[4] * (rSq + 2 * xwi * xwi);
-        float yc = ywi * Fr + (mK[4] * 2 * xwi * ywi) + mK[3] * (rSq + 2 * ywi * ywi);
+        float Fr = 1.f + (kK[0] * rSq) + (kK[1] * rSq * rSq) + (kK[2] * rSq * rSq * rSq);
+        float xc = xwi * Fr + (kK[3] * 2 * xwi * ywi) + kK[4] * (rSq + 2 * xwi * xwi);
+        float yc = ywi * Fr + (kK[4] * 2 * xwi * ywi) + kK[3] * (rSq + 2 * ywi * ywi);
         // Move back to image space
-        float xr = mFx * xc + mS * yc + mCx;
-        float yr = mFy * yc + mCy;
+        float xr = mapperInfo->mFx * xc + mapperInfo->mS * yc + mapperInfo->mCx;
+        float yr = mapperInfo->mFy * yc + mapperInfo->mCy;
         // Clamp to within pre-correction active array
         if (clamp) {
-            xr = std::min(mArrayWidth - 1, std::max(0.f, xr));
-            yr = std::min(mArrayHeight - 1, std::max(0.f, yr));
+            xr = std::min(mapperInfo->mArrayWidth - 1, std::max(0.f, xr));
+            yr = std::min(mapperInfo->mArrayHeight - 1, std::max(0.f, yr));
         }
 
         coordPairs[i] = static_cast<T>(std::round(xr));
         coordPairs[i + 1] = static_cast<T>(std::round(yr));
     }
-
     return OK;
 }
 
 template<typename T>
 status_t DistortionMapper::mapCorrectedToRawImplSimple(T *coordPairs, int coordCount,
-        bool clamp) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+       const DistortionMapperInfo *mapperInfo, bool clamp) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    float scaleX = mArrayWidth / mActiveWidth;
-    float scaleY = mArrayHeight / mActiveHeight;
+    float scaleX = mapperInfo->mArrayWidth / mapperInfo->mActiveWidth;
+    float scaleY = mapperInfo->mArrayHeight / mapperInfo->mActiveHeight;
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
         float rawX = x * scaleX;
         float rawY = y * scaleY;
         if (clamp) {
-            rawX = std::min(mArrayWidth - 1, std::max(0.f, rawX));
-            rawY = std::min(mArrayHeight - 1, std::max(0.f, rawY));
+            rawX = std::min(mapperInfo->mArrayWidth - 1, std::max(0.f, rawX));
+            rawY = std::min(mapperInfo->mArrayHeight - 1, std::max(0.f, rawY));
         }
         coordPairs[i] = static_cast<T>(std::round(rawX));
         coordPairs[i + 1] = static_cast<T>(std::round(rawY));
@@ -377,9 +433,9 @@
     return OK;
 }
 
-status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
-        bool simple) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
@@ -390,7 +446,7 @@
             rects[i + 1] + rects[i + 3] - 1
         };
 
-        mapCorrectedToRaw(coords, 2, clamp, simple);
+        mapCorrectedToRaw(coords, 2, mapperInfo, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -402,37 +458,37 @@
     return OK;
 }
 
-status_t DistortionMapper::buildGrids() {
-    if (mCorrectedGrid.size() != kGridSize * kGridSize) {
-        mCorrectedGrid.resize(kGridSize * kGridSize);
-        mDistortedGrid.resize(kGridSize * kGridSize);
+status_t DistortionMapper::buildGrids(DistortionMapperInfo *mapperInfo) {
+    if (mapperInfo->mCorrectedGrid.size() != kGridSize * kGridSize) {
+        mapperInfo->mCorrectedGrid.resize(kGridSize * kGridSize);
+        mapperInfo->mDistortedGrid.resize(kGridSize * kGridSize);
     }
 
-    float gridMargin = mArrayWidth * kGridMargin;
-    float gridSpacingX = (mArrayWidth + 2 * gridMargin) / kGridSize;
-    float gridSpacingY = (mArrayHeight + 2 * gridMargin) / kGridSize;
+    float gridMargin = mapperInfo->mArrayWidth * kGridMargin;
+    float gridSpacingX = (mapperInfo->mArrayWidth + 2 * gridMargin) / kGridSize;
+    float gridSpacingY = (mapperInfo->mArrayHeight + 2 * gridMargin) / kGridSize;
 
     size_t index = 0;
     float x = -gridMargin;
     for (size_t i = 0; i < kGridSize; i++, x += gridSpacingX) {
         float y = -gridMargin;
         for (size_t j = 0; j < kGridSize; j++, y += gridSpacingY, index++) {
-            mCorrectedGrid[index].src = nullptr;
-            mCorrectedGrid[index].coords = {
+            mapperInfo->mCorrectedGrid[index].src = nullptr;
+            mapperInfo->mCorrectedGrid[index].coords = {
                 x, y,
                 x + gridSpacingX, y,
                 x + gridSpacingX, y + gridSpacingY,
                 x, y + gridSpacingY
             };
-            mDistortedGrid[index].src = &mCorrectedGrid[index];
-            mDistortedGrid[index].coords = mCorrectedGrid[index].coords;
-            status_t res = mapCorrectedToRawImpl(mDistortedGrid[index].coords.data(), 4,
-                    /*clamp*/false, /*simple*/false);
+            mapperInfo->mDistortedGrid[index].src = &(mapperInfo->mCorrectedGrid[index]);
+            mapperInfo->mDistortedGrid[index].coords = mapperInfo->mCorrectedGrid[index].coords;
+            status_t res = mapCorrectedToRawImpl(mapperInfo->mDistortedGrid[index].coords.data(), 4,
+                    mapperInfo, /*clamp*/false, /*simple*/false);
             if (res != OK) return res;
         }
     }
 
-    mValidGrids = true;
+    mapperInfo->mValidGrids = true;
     return OK;
 }
 
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 5027bd0..96f4fda 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -37,13 +37,8 @@
     DistortionMapper();
 
     DistortionMapper(const DistortionMapper& other) :
-            mValidMapping(other.mValidMapping), mValidGrids(other.mValidGrids),
-            mFx(other.mFx), mFy(other.mFy), mCx(other.mCx), mCy(other.mCy), mS(other.mS),
-            mInvFx(other.mInvFx), mInvFy(other.mInvFy), mK(other.mK),
-            mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
-            mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
-            mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
-            mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {
+            mDistortionMapperInfo(other.mDistortionMapperInfo),
+            mDistortionMapperInfoMaximumResolution(other.mDistortionMapperInfoMaximumResolution) {
             initRemappedKeys(); }
 
     void initRemappedKeys() override;
@@ -75,10 +70,14 @@
 
 
   public: // Visible for testing. Not guarded by mutex; do not use concurrently
+
+    struct DistortionMapperInfo;
+
     /**
      * Update lens calibration from capture results or equivalent
      */
-    status_t updateCalibration(const CameraMetadata &result);
+    status_t updateCalibration(const CameraMetadata &result, bool isStatic = false,
+            bool maxResolution = false);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -89,8 +88,8 @@
      *   clamp: Whether to clamp the result to the bounds of the active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount, bool clamp,
-            bool simple = true);
+    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount,
+            DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -101,8 +100,8 @@
      *   clamp: Whether to clamp the result to the bounds of the active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
-            bool simple = true);
+    status_t mapRawRectToCorrected(int32_t *rects, int rectCount,
+          DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true);
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -113,8 +112,8 @@
      *   clamp: Whether to clamp the result to the bounds of the precorrection active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount, bool clamp,
-            bool simple = true) const;
+    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true) const;
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -125,8 +124,8 @@
      *   clamp: Whether to clamp the result to the bounds of the precorrection active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
-            bool simple = true) const;
+    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount,
+           const DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true) const;
 
     struct GridQuad {
         // Source grid quad, or null
@@ -136,6 +135,28 @@
         std::array<float, 8> coords;
     };
 
+    struct DistortionMapperInfo {
+        bool mValidMapping = false;
+        bool mValidGrids = false;
+
+        // intrisic parameters, in pixels
+        float mFx, mFy, mCx, mCy, mS;
+        // pre-calculated inverses for speed
+        float mInvFx, mInvFy;
+        // radial/tangential distortion parameters
+        std::array<float, 5> mK;
+
+        // pre-correction active array dimensions
+        float mArrayWidth, mArrayHeight;
+        // active array dimensions
+        float mActiveWidth, mActiveHeight;
+        // corner offsets between pre-correction and active arrays
+        float mArrayDiffX, mArrayDiffY;
+
+        std::vector<GridQuad> mCorrectedGrid;
+        std::vector<GridQuad> mDistortedGrid;
+    };
+
     // Find which grid quad encloses the point; returns null if none do
     static const GridQuad* findEnclosingQuad(
             const int32_t pt[2], const std::vector<GridQuad>& grid);
@@ -153,6 +174,11 @@
     // if it is false, then an interpolation coordinate for edges E14 and E23 is found.
     static float calculateUorV(const int32_t pt[2], const GridQuad& quad, bool calculateU);
 
+    DistortionMapperInfo *getMapperInfo(bool maxResolution = false) {
+          return maxResolution ? &mDistortionMapperInfoMaximumResolution :
+                  &mDistortionMapperInfo;
+    };
+
   private:
     mutable std::mutex mMutex;
 
@@ -163,39 +189,28 @@
     // Fuzziness for float inequality tests
     constexpr static float kFloatFuzz = 1e-4;
 
+    bool mMaxResolution = false;
+
+    status_t setupStaticInfoLocked(const CameraMetadata &deviceInfo, bool maxResolution);
+
     // Single implementation for various mapCorrectedToRaw methods
     template<typename T>
-    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount, bool clamp, bool simple) const;
+    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const;
 
     // Simple linear interpolation option
     template<typename T>
-    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount, bool clamp) const;
+    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp) const;
 
-    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount, bool clamp) const;
+    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp) const;
 
     // Utility to create reverse mapping grids
-    status_t buildGrids();
+    status_t buildGrids(DistortionMapperInfo *mapperInfo);
 
-
-    bool mValidMapping;
-    bool mValidGrids;
-
-    // intrisic parameters, in pixels
-    float mFx, mFy, mCx, mCy, mS;
-    // pre-calculated inverses for speed
-    float mInvFx, mInvFy;
-    // radial/tangential distortion parameters
-    std::array<float, 5> mK;
-
-    // pre-correction active array dimensions
-    float mArrayWidth, mArrayHeight;
-    // active array dimensions
-    float mActiveWidth, mActiveHeight;
-    // corner offsets between pre-correction and active arrays
-    float mArrayDiffX, mArrayDiffY;
-
-    std::vector<GridQuad> mCorrectedGrid;
-    std::vector<GridQuad> mDistortedGrid;
+    DistortionMapperInfo mDistortionMapperInfo;
+    DistortionMapperInfo mDistortionMapperInfoMaximumResolution;
 
 }; // class DistortionMapper
 
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index e3aaf44..523a2c7 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -96,7 +96,10 @@
     ERROR_BUF_STRATEGY errorBufStrategy;
 
     // The physical camera ids being requested.
-    std::set<String8> physicalCameraIds;
+    // For request on a physical camera stream, the inside set contains one Id
+    // For request on a stream group containing physical camera streams, the
+    // inside set contains all stream Ids in the group.
+    std::set<std::set<String8>> physicalCameraIds;
 
     // Map of physicalCameraId <-> Metadata
     std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
@@ -142,7 +145,7 @@
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
             bool hasAppCallback, nsecs_t maxDuration,
-            const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
+            const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
             nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
             shutterTimestamp(0),
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 1bc2081..1a39510 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -20,6 +20,7 @@
 #include <algorithm>
 
 #include "device3/ZoomRatioMapper.h"
+#include "utils/SessionConfigurationUtils.h"
 
 namespace android {
 
@@ -128,43 +129,120 @@
     return OK;
 }
 
+static bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
+        int32_t arrayTag, int32_t *width, int32_t *height) {
+    if (width == nullptr || height == nullptr) {
+        ALOGE("%s: width / height nullptr", __FUNCTION__);
+        return false;
+    }
+    camera_metadata_ro_entry_t entry;
+    entry = deviceInfo->find(arrayTag);
+    if (entry.count != 4) return false;
+    *width = entry.data.i32[2];
+    *height = entry.data.i32[3];
+    return true;
+}
+
 ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
         bool supportNativeZoomRatio, bool usePrecorrectArray) {
     initRemappedKeys();
 
-    camera_metadata_ro_entry_t entry;
+    int32_t arrayW = 0;
+    int32_t arrayH = 0;
+    int32_t arrayMaximumResolutionW = 0;
+    int32_t arrayMaximumResolutionH = 0;
+    int32_t activeW = 0;
+    int32_t activeH = 0;
+    int32_t activeMaximumResolutionW = 0;
+    int32_t activeMaximumResolutionH = 0;
 
-    entry = deviceInfo->find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
-    if (entry.count != 4) return;
-    int32_t arrayW = entry.data.i32[2];
-    int32_t arrayH = entry.data.i32[3];
+    if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            &arrayW, &arrayH)) {
+        ALOGE("%s: Couldn't get pre correction active array size", __FUNCTION__);
+        return;
+    }
+     if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            &activeW, &activeH)) {
+        ALOGE("%s: Couldn't get active array size", __FUNCTION__);
+        return;
+    }
 
-    entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-    if (entry.count != 4) return;
-    int32_t activeW = entry.data.i32[2];
-    int32_t activeH = entry.data.i32[3];
+    bool isUltraHighResolutionSensor =
+            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
+    if (isUltraHighResolutionSensor) {
+        if (!getArrayWidthAndHeight(deviceInfo,
+                ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                &arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
+            ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+                    __FUNCTION__);
+            return;
+        }
+         if (!getArrayWidthAndHeight(deviceInfo,
+                ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                &activeMaximumResolutionW, &activeMaximumResolutionH)) {
+            ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+                    __FUNCTION__);
+            return;
+        }
+    }
 
     if (usePrecorrectArray) {
         mArrayWidth = arrayW;
         mArrayHeight = arrayH;
+        mArrayWidthMaximumResolution = arrayMaximumResolutionW;
+        mArrayHeightMaximumResolution = arrayMaximumResolutionH;
     } else {
         mArrayWidth = activeW;
         mArrayHeight = activeH;
+        mArrayWidthMaximumResolution = activeMaximumResolutionW;
+        mArrayHeightMaximumResolution = activeMaximumResolutionH;
     }
     mHalSupportsZoomRatio = supportNativeZoomRatio;
 
-    ALOGV("%s: array size: %d x %d, mHalSupportsZoomRatio %d",
-            __FUNCTION__, mArrayWidth, mArrayHeight, mHalSupportsZoomRatio);
+    ALOGV("%s: array size: %d x %d, full res array size: %d x %d,  mHalSupportsZoomRatio %d",
+            __FUNCTION__, mArrayWidth, mArrayHeight, mArrayWidthMaximumResolution,
+            mArrayHeightMaximumResolution, mHalSupportsZoomRatio);
     mIsValid = true;
 }
 
+status_t ZoomRatioMapper::getArrayDimensionsToBeUsed(const CameraMetadata *settings,
+        int32_t *arrayWidth, int32_t *arrayHeight) {
+    if (settings == nullptr || arrayWidth == nullptr || arrayHeight == nullptr) {
+        return BAD_VALUE;
+    }
+    // First we get the sensorPixelMode from the settings metadata.
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = settings->find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT &&
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return BAD_VALUE;
+        }
+    }
+    if (sensorPixelMode == ANDROID_SENSOR_PIXEL_MODE_DEFAULT) {
+        *arrayWidth = mArrayWidth;
+        *arrayHeight = mArrayHeight;
+    } else {
+        *arrayWidth = mArrayWidthMaximumResolution;
+        *arrayHeight = mArrayHeightMaximumResolution;
+    }
+    return OK;
+}
+
 status_t ZoomRatioMapper::updateCaptureRequest(CameraMetadata* request) {
     if (!mIsValid) return INVALID_OPERATION;
 
     status_t res = OK;
     bool zoomRatioIs1 = true;
     camera_metadata_entry_t entry;
-
+    int arrayHeight, arrayWidth = 0;
+    res = getArrayDimensionsToBeUsed(request, &arrayWidth, &arrayHeight);
+    if (res != OK) {
+        return res;
+    }
     entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
     if (entry.count == 1 && entry.data.f[0] != 1.0f) {
         zoomRatioIs1 = false;
@@ -174,19 +252,19 @@
         if (cropRegionEntry.count == 4) {
             int cropWidth = cropRegionEntry.data.i32[2];
             int cropHeight = cropRegionEntry.data.i32[3];
-            if (cropWidth < mArrayWidth && cropHeight < mArrayHeight) {
+            if (cropWidth < arrayWidth && cropHeight < arrayHeight) {
                 cropRegionEntry.data.i32[0] = 0;
                 cropRegionEntry.data.i32[1] = 0;
-                cropRegionEntry.data.i32[2] = mArrayWidth;
-                cropRegionEntry.data.i32[3] = mArrayHeight;
+                cropRegionEntry.data.i32[2] = arrayWidth;
+                cropRegionEntry.data.i32[3] = arrayHeight;
             }
         }
     }
 
     if (mHalSupportsZoomRatio && zoomRatioIs1) {
-        res = separateZoomFromCropLocked(request, false/*isResult*/);
+        res = separateZoomFromCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
-        res = combineZoomAndCropLocked(request, false/*isResult*/);
+        res = combineZoomAndCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     }
 
     // If CONTROL_ZOOM_RATIO is in request, but HAL doesn't support
@@ -203,10 +281,15 @@
 
     status_t res = OK;
 
+    int arrayHeight, arrayWidth = 0;
+    res = getArrayDimensionsToBeUsed(result, &arrayWidth, &arrayHeight);
+    if (res != OK) {
+        return res;
+    }
     if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
-        res = combineZoomAndCropLocked(result, true/*isResult*/);
+        res = combineZoomAndCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
-        res = separateZoomFromCropLocked(result, true/*isResult*/);
+        res = separateZoomFromCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else {
         camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
         if (entry.count == 0) {
@@ -218,16 +301,22 @@
     return res;
 }
 
-float ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata) {
+status_t ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata, float *zoomRatioRet,
+        int arrayWidth, int arrayHeight) {
+    if (metadata == nullptr || zoomRatioRet == nullptr) {
+        return BAD_VALUE;
+    }
     float zoomRatio = 1.0;
 
     camera_metadata_ro_entry_t entry;
     entry = metadata->find(ANDROID_SCALER_CROP_REGION);
-    if (entry.count != 4) return zoomRatio;
-
+    if (entry.count != 4) {
+        *zoomRatioRet = 1;
+        return OK;
+    }
     // Center of the preCorrection/active size
-    float arrayCenterX = mArrayWidth / 2.0;
-    float arrayCenterY = mArrayHeight / 2.0;
+    float arrayCenterX = arrayWidth / 2.0;
+    float arrayCenterY = arrayHeight / 2.0;
 
     // Re-map crop region to coordinate system centered to (arrayCenterX,
     // arrayCenterY).
@@ -237,22 +326,30 @@
     float cropRegionBottom = entry.data.i32[1] + entry.data.i32[3] - arrayCenterY;
 
     // Calculate the scaling factor for left, top, bottom, right
-    float zoomRatioLeft = std::max(mArrayWidth / (2 * cropRegionLeft), 1.0f);
-    float zoomRatioTop = std::max(mArrayHeight / (2 * cropRegionTop), 1.0f);
-    float zoomRatioRight = std::max(mArrayWidth / (2 * cropRegionRight), 1.0f);
-    float zoomRatioBottom = std::max(mArrayHeight / (2 * cropRegionBottom), 1.0f);
+    float zoomRatioLeft = std::max(arrayWidth / (2 * cropRegionLeft), 1.0f);
+    float zoomRatioTop = std::max(arrayHeight / (2 * cropRegionTop), 1.0f);
+    float zoomRatioRight = std::max(arrayWidth / (2 * cropRegionRight), 1.0f);
+    float zoomRatioBottom = std::max(arrayHeight / (2 * cropRegionBottom), 1.0f);
 
     // Use minimum scaling factor to handle letterboxing or pillarboxing
     zoomRatio = std::min(std::min(zoomRatioLeft, zoomRatioRight),
             std::min(zoomRatioTop, zoomRatioBottom));
 
     ALOGV("%s: derived zoomRatio is %f", __FUNCTION__, zoomRatio);
-    return zoomRatio;
+    *zoomRatioRet = zoomRatio;
+    return OK;
 }
 
-status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult) {
-    status_t res;
-    float zoomRatio = deriveZoomRatio(metadata);
+status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult,
+        int arrayWidth, int arrayHeight) {
+    float zoomRatio = 1.0;
+    status_t res = deriveZoomRatio(metadata, &zoomRatio, arrayWidth, arrayHeight);
+
+    if (res != OK) {
+        ALOGE("%s: Failed to derive zoom ratio: %s(%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     // Update zoomRatio metadata tag
     res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
@@ -272,12 +369,14 @@
                 continue;
             }
             // Top left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             // Bottom right (exclusive): Use adjacent inclusive pixel to
             // calculate.
             entry.data.i32[j+2] -= 1;
             entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             entry.data.i32[j+2] += 1;
             entry.data.i32[j+3] += 1;
         }
@@ -285,20 +384,22 @@
 
     for (auto rect : kRectsToCorrect) {
         entry = metadata->find(rect);
-        scaleRects(entry.data.i32, entry.count / 4, zoomRatio);
+        scaleRects(entry.data.i32, entry.count / 4, zoomRatio, arrayWidth, arrayHeight);
     }
 
     if (isResult) {
         for (auto pts : kResultPointsToCorrectNoClamp) {
             entry = metadata->find(pts);
-            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, false /*clamp*/);
+            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, false /*clamp*/,
+                    arrayWidth, arrayHeight);
         }
     }
 
     return OK;
 }
 
-status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult) {
+status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult,
+        int arrayWidth, int arrayHeight) {
     float zoomRatio = 1.0f;
     camera_metadata_entry_t entry;
     entry = metadata->find(ANDROID_CONTROL_ZOOM_RATIO);
@@ -307,7 +408,6 @@
     }
 
     // Unscale regions with zoomRatio
-    status_t res;
     for (auto region : kMeteringRegionsToCorrect) {
         entry = metadata->find(region);
         for (size_t j = 0; j < entry.count; j += 5) {
@@ -316,29 +416,32 @@
                 continue;
             }
             // Top-left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             // Bottom-right (exclusive): Use adjacent inclusive pixel to
             // calculate.
             entry.data.i32[j+2] -= 1;
             entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             entry.data.i32[j+2] += 1;
             entry.data.i32[j+3] += 1;
         }
     }
     for (auto rect : kRectsToCorrect) {
         entry = metadata->find(rect);
-        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio);
+        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio, arrayWidth, arrayHeight);
     }
     if (isResult) {
         for (auto pts : kResultPointsToCorrectNoClamp) {
             entry = metadata->find(pts);
-            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, false /*clamp*/);
+            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, false /*clamp*/,
+                    arrayWidth, arrayHeight);
         }
     }
 
     zoomRatio = 1.0;
-    res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    status_t res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
     if (res != OK) {
         return res;
     }
@@ -347,7 +450,7 @@
 }
 
 void ZoomRatioMapper::scaleCoordinates(int32_t* coordPairs, int coordCount,
-        float scaleRatio, bool clamp) {
+        float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight) {
     // A pixel's coordinate is represented by the position of its top-left corner.
     // To avoid the rounding error, we use the coordinate for the center of the
     // pixel instead:
@@ -360,18 +463,18 @@
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
-        float xCentered = x - (mArrayWidth - 2) / 2;
-        float yCentered = y - (mArrayHeight - 2) / 2;
+        float xCentered = x - (arrayWidth - 2) / 2;
+        float yCentered = y - (arrayHeight - 2) / 2;
         float scaledX = xCentered * scaleRatio;
         float scaledY = yCentered * scaleRatio;
-        scaledX += (mArrayWidth - 2) / 2;
-        scaledY += (mArrayHeight - 2) / 2;
+        scaledX += (arrayWidth - 2) / 2;
+        scaledY += (arrayHeight - 2) / 2;
         coordPairs[i] = static_cast<int32_t>(std::round(scaledX));
         coordPairs[i+1] = static_cast<int32_t>(std::round(scaledY));
         // Clamp to within activeArray/preCorrectionActiveArray
         if (clamp) {
-            int32_t right = mArrayWidth - 1;
-            int32_t bottom = mArrayHeight - 1;
+            int32_t right = arrayWidth - 1;
+            int32_t bottom = arrayHeight - 1;
             coordPairs[i] =
                     std::min(right, std::max(0, coordPairs[i]));
             coordPairs[i+1] =
@@ -382,7 +485,7 @@
 }
 
 void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
-        float scaleRatio) {
+        float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, l+width-1, t+height-1),
         // where both top-left and bottom-right are inclusive.
@@ -394,9 +497,9 @@
         };
 
         // top-left
-        scaleCoordinates(coords, 1, scaleRatio, true /*clamp*/);
+        scaleCoordinates(coords, 1, scaleRatio, true /*clamp*/, arrayWidth, arrayHeight);
         // bottom-right
-        scaleCoordinates(coords+2, 1, scaleRatio, true /*clamp*/);
+        scaleCoordinates(coords+2, 1, scaleRatio, true /*clamp*/, arrayWidth, arrayHeight);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 3769299..b7a9e41 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -68,22 +68,31 @@
 
   public: // Visible for testing. Do not use concurently.
     void scaleCoordinates(int32_t* coordPairs, int coordCount,
-            float scaleRatio, bool clamp);
+            float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
 
     bool isValid() { return mIsValid; }
   private:
     // const after construction
     bool mHalSupportsZoomRatio;
-    // active array / pre-correction array dimension
+
+    // active array / pre-correction array dimension for default and maximum
+    // resolution modes.
     int32_t mArrayWidth, mArrayHeight;
+    int32_t mArrayWidthMaximumResolution, mArrayHeightMaximumResolution;
 
     bool mIsValid = false;
 
-    float deriveZoomRatio(const CameraMetadata* metadata);
-    void scaleRects(int32_t* rects, int rectCount, float scaleRatio);
+    status_t deriveZoomRatio(const CameraMetadata* metadata, float *zoomRatio, int arrayWidth,
+            int arrayHeight);
+    void scaleRects(int32_t* rects, int rectCount, float scaleRatio, int32_t arrayWidth,
+            int32_t arrayHeight);
 
-    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult);
-    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult);
+    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult, int arrayWidth,
+            int arrayHeight);
+    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult, int arrayWidth,
+            int arrayHeight);
+    status_t getArrayDimensionsToBeUsed(const CameraMetadata *settings, int32_t *arrayWidth,
+            int32_t *arrayHeight);
 };
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
index 96bab4e..88ec85c 100644
--- a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
+++ b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
@@ -23,6 +23,7 @@
 
 using namespace android;
 using namespace android::camera3;
+using DistortionMapperInfo = android::camera3::DistortionMapper::DistortionMapperInfo;
 
 int32_t testActiveArray[] = {100, 100, 1000, 750};
 float testICal[] = { 1000.f, 1000.f, 500.f, 500.f, 0.f };
@@ -62,10 +63,10 @@
     for (int index = 0; fdp.remaining_bytes() > 0; index++) {
         input.push_back(fdp.ConsumeIntegral<int32_t>());
     }
-
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
     // The size argument counts how many coordinate pairs there are, so
     // it is expected to be 1/2 the size of the input.
-    m.mapCorrectedToRaw(input.data(), input.size()/2,  clamp, simple);
+    m.mapCorrectedToRaw(input.data(), input.size()/2,  mapperInfo, clamp, simple);
 
     return 0;
 }
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
new file mode 100644
index 0000000..c7d7c4b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -0,0 +1,68 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_camera_libcameraservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_services_camera_libcameraservice_license",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_service_fuzzer",
+    srcs: [
+        "camera_service_fuzzer.cpp",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcameraservice",
+        "libcamera_client",
+        "libui",
+        "libgui",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/README.md b/services/camera/libcameraservice/libcameraservice_fuzzer/README.md
new file mode 100644
index 0000000..c703845
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/README.md
@@ -0,0 +1,59 @@
+# Fuzzer for libcameraservice
+
+## Plugin Design Considerations
+The fuzzer plugin is designed based on the understanding of the
+library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libcameraservice supports the following parameters:
+1. Camera Type (parameter name: `cameraType`)
+2. Camera API Version (parameter name: `cameraAPIVersion`)
+3. Event ID (parameter name: `eventId`)
+4. Camera Sound Kind (parameter name: `soundKind`)
+5. Shell Command (parameter name: `shellCommand`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cameraType` | 0. `CAMERA_TYPE_BACKWARD_COMPATIBLE` 1. `CAMERA_TYPE_ALL` | Value obtained from FuzzedDataProvider |
+| `cameraAPIVersion` |  0. `API_VERSION_1` 1. `API_VERSION_2` | Value obtained from FuzzedDataProvider |
+| `eventId` |  0. `EVENT_USER_SWITCHED` 1. `EVENT_NONE` | Value obtained from FuzzedDataProvider |
+| `soundKind` |  0. `SOUND_SHUTTER` 1. `SOUND_RECORDING_START` 2. `SOUND_RECORDING_STOP`| Value obtained from FuzzedDataProvider |
+| `shellCommand` |  0. `set-uid-state` 1. `reset-uid-state` 2. `get-uid-state` 3. `set-rotate-and-crop` 4. `get-rotate-and-crop` 5. `help`| Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesn't `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_service_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) camera_service_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR
+```
+  $ adb shell mkdir CORPUS_DIR
+```
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/camera_service_fuzzer/camera_service_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
new file mode 100644
index 0000000..54550a5
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -0,0 +1,433 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <CameraService.h>
+#include <android/hardware/ICameraServiceListener.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <private/android_filesystem_config.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+using namespace android;
+using namespace hardware;
+using namespace std;
+
+const int32_t kPreviewThreshold = 8;
+const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
+const nsecs_t kEventTimeout = 10000000000;   // 1 [s.]
+const size_t kMaxNumLines = USHRT_MAX;
+const size_t kMinArgs = 1;
+const size_t kMaxArgs = 5;
+const int32_t kCamType[] = {hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
+                            hardware::ICameraService::CAMERA_TYPE_ALL};
+const int kCameraApiVersion[] = {android::CameraService::API_VERSION_1,
+                                 android::CameraService::API_VERSION_2};
+const int kLayerMetadata[] = {
+    0x00100000 /*GRALLOC_USAGE_RENDERSCRIPT*/, 0x00000003 /*GRALLOC_USAGE_SW_READ_OFTEN*/,
+    0x00000100 /*GRALLOC_USAGE_HW_TEXTURE*/,   0x00000800 /*GRALLOC_USAGE_HW_COMPOSER*/,
+    0x00000200 /*GRALLOC_USAGE_HW_RENDER*/,    0x00010000 /*GRALLOC_USAGE_HW_VIDEO_ENCODER*/};
+const int kCameraMsg[] = {0x001 /*CAMERA_MSG_ERROR*/,
+                          0x002 /*CAMERA_MSG_SHUTTER*/,
+                          0x004 /*CAMERA_MSG_FOCUS*/,
+                          0x008 /*CAMERA_MSG_ZOOM*/,
+                          0x010 /*CAMERA_MSG_PREVIEW_FRAME*/,
+                          0x020 /*CAMERA_MSG_VIDEO_FRAME */,
+                          0x040 /*CAMERA_MSG_POSTVIEW_FRAME*/,
+                          0x080 /*CAMERA_MSG_RAW_IMAGE */,
+                          0x100 /*CAMERA_MSG_COMPRESSED_IMAGE*/,
+                          0x200 /*CAMERA_MSG_RAW_IMAGE_NOTIFY*/,
+                          0x400 /*CAMERA_MSG_PREVIEW_METADATA*/,
+                          0x800 /*CAMERA_MSG_FOCUS_MOVE*/};
+const int32_t kEventId[] = {ICameraService::EVENT_USER_SWITCHED, ICameraService::EVENT_NONE};
+const android::CameraService::sound_kind kSoundKind[] = {
+    android::CameraService::SOUND_SHUTTER, android::CameraService::SOUND_RECORDING_START,
+    android::CameraService::SOUND_RECORDING_STOP};
+const String16 kShellCmd[] = {String16("set-uid-state"),       String16("reset-uid-state"),
+                              String16("get-uid-state"),       String16("set-rotate-and-crop"),
+                              String16("get-rotate-and-crop"), String16("help")};
+const size_t kNumLayerMetaData = size(kLayerMetadata);
+const size_t kNumCameraMsg = size(kCameraMsg);
+const size_t kNumSoundKind = size(kSoundKind);
+const size_t kNumShellCmd = size(kShellCmd);
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+   public:
+    CameraFuzzer() = default;
+    ~CameraFuzzer() { deInit(); }
+    bool init();
+    void process(const uint8_t *data, size_t size);
+    void deInit();
+
+   private:
+    FuzzedDataProvider *mFuzzedDataProvider = nullptr;
+    sp<CameraService> mCameraService = nullptr;
+    sp<SurfaceComposerClient> mComposerClient = nullptr;
+    int32_t mNumCameras = 0;
+    size_t mPreviewBufferCount = 0;
+    bool mAutoFocusMessage = false;
+    bool mSnapshotNotification = false;
+    mutable Mutex mPreviewLock;
+    mutable Condition mPreviewCondition;
+    mutable Mutex mAutoFocusLock;
+    mutable Condition mAutoFocusCondition;
+    mutable Mutex mSnapshotLock;
+    mutable Condition mSnapshotCondition;
+
+    void getNumCameras();
+    void getCameraInformation(int32_t cameraId);
+    void invokeCameraAPIs();
+    void invokeCameraSound();
+    void invokeDump();
+    void invokeShellCommand();
+    void invokeNotifyCalls();
+
+    // CameraClient interface
+    void notifyCallback(int32_t msgType, int32_t, int32_t) override;
+    void dataCallback(int32_t msgType, const sp<IMemory> &, camera_frame_metadata_t *) override;
+    void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory> &) override{};
+    void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t *) override{};
+    void recordingFrameHandleCallbackTimestampBatch(
+        const std::vector<nsecs_t> &, const std::vector<native_handle_t *> &) override{};
+    status_t waitForPreviewStart();
+    status_t waitForEvent(Mutex &mutex, Condition &condition, bool &flag);
+};
+
+void CameraFuzzer::notifyCallback(int32_t msgType, int32_t, int32_t) {
+    if (CAMERA_MSG_FOCUS == msgType) {
+        Mutex::Autolock l(mAutoFocusLock);
+        mAutoFocusMessage = true;
+        mAutoFocusCondition.broadcast();
+    }
+};
+
+void CameraFuzzer::dataCallback(int32_t msgType, const sp<IMemory> & /*data*/,
+                                camera_frame_metadata_t *) {
+    switch (msgType) {
+        case CAMERA_MSG_PREVIEW_FRAME: {
+            Mutex::Autolock l(mPreviewLock);
+            ++mPreviewBufferCount;
+            mPreviewCondition.broadcast();
+            break;
+        }
+        case CAMERA_MSG_COMPRESSED_IMAGE: {
+            Mutex::Autolock l(mSnapshotLock);
+            mSnapshotNotification = true;
+            mSnapshotCondition.broadcast();
+            break;
+        }
+        default:
+            break;
+    }
+};
+
+status_t CameraFuzzer::waitForPreviewStart() {
+    status_t rc = NO_ERROR;
+    Mutex::Autolock l(mPreviewLock);
+    mPreviewBufferCount = 0;
+
+    while (mPreviewBufferCount < kPreviewThreshold) {
+        rc = mPreviewCondition.waitRelative(mPreviewLock, kPreviewTimeout);
+        if (NO_ERROR != rc) {
+            break;
+        }
+    }
+
+    return rc;
+}
+
+status_t CameraFuzzer::waitForEvent(Mutex &mutex, Condition &condition, bool &flag) {
+    status_t rc = NO_ERROR;
+    Mutex::Autolock l(mutex);
+    flag = false;
+
+    while (!flag) {
+        rc = condition.waitRelative(mutex, kEventTimeout);
+        if (NO_ERROR != rc) {
+            break;
+        }
+    }
+
+    return rc;
+}
+
+bool CameraFuzzer::init() {
+    setuid(AID_MEDIA);
+    mCameraService = new CameraService();
+    if (mCameraService) {
+        return true;
+    }
+    return false;
+}
+
+void CameraFuzzer::deInit() {
+    if (mCameraService) {
+        mCameraService = nullptr;
+    }
+    if (mComposerClient) {
+        mComposerClient->dispose();
+    }
+}
+
+void CameraFuzzer::getNumCameras() {
+    bool shouldPassInvalidCamType = mFuzzedDataProvider->ConsumeBool();
+    int32_t camType;
+    if (shouldPassInvalidCamType) {
+        camType = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+    } else {
+        camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
+    }
+    mCameraService->getNumberOfCameras(camType, &mNumCameras);
+}
+
+void CameraFuzzer::getCameraInformation(int32_t cameraId) {
+    String16 cameraIdStr = String16(String8::format("%d", cameraId));
+    bool isSupported = false;
+    mCameraService->supportsCameraApi(
+        cameraIdStr, kCameraApiVersion[mFuzzedDataProvider->ConsumeBool()], &isSupported);
+    mCameraService->isHiddenPhysicalCamera(cameraIdStr, &isSupported);
+
+    String16 parameters;
+    mCameraService->getLegacyParameters(cameraId, &parameters);
+
+    std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination> concurrentCameraIds;
+    mCameraService->getConcurrentCameraIds(&concurrentCameraIds);
+
+    hardware::camera2::params::VendorTagDescriptorCache cache;
+    mCameraService->getCameraVendorTagCache(&cache);
+
+    CameraInfo cameraInfo;
+    mCameraService->getCameraInfo(cameraId, &cameraInfo);
+
+    CameraMetadata metadata;
+    mCameraService->getCameraCharacteristics(cameraIdStr, &metadata);
+}
+
+void CameraFuzzer::invokeCameraSound() {
+    mCameraService->increaseSoundRef();
+    mCameraService->decreaseSoundRef();
+    bool shouldPassInvalidPlaySound = mFuzzedDataProvider->ConsumeBool();
+    bool shouldPassInvalidLockSound = mFuzzedDataProvider->ConsumeBool();
+    android::CameraService::sound_kind playSound, lockSound;
+    if (shouldPassInvalidPlaySound) {
+        playSound = static_cast<android::CameraService::sound_kind>(
+            mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        playSound =
+            kSoundKind[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumSoundKind - 1)];
+    }
+
+    if (shouldPassInvalidLockSound) {
+        lockSound = static_cast<android::CameraService::sound_kind>(
+            mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        lockSound =
+            kSoundKind[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumSoundKind - 1)];
+    }
+    mCameraService->playSound(playSound);
+    mCameraService->loadSoundLocked(lockSound);
+}
+
+void CameraFuzzer::invokeDump() {
+    Vector<String16> args;
+    size_t numberOfLines = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
+    for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
+        args.add(static_cast<String16>(mFuzzedDataProvider->ConsumeRandomLengthString().c_str()));
+    }
+    const char *fileName = "logDumpFile";
+    int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
+    mCameraService->dump(fd, args);
+    close(fd);
+}
+
+void CameraFuzzer::invokeShellCommand() {
+    int in = mFuzzedDataProvider->ConsumeIntegral<int>();
+    int out = mFuzzedDataProvider->ConsumeIntegral<int>();
+    int err = mFuzzedDataProvider->ConsumeIntegral<int>();
+    Vector<String16> args;
+    size_t numArgs = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinArgs, kMaxArgs);
+    for (size_t argsIdx = 0; argsIdx < numArgs; ++argsIdx) {
+        bool shouldPassInvalidCommand = mFuzzedDataProvider->ConsumeBool();
+        if (shouldPassInvalidCommand) {
+            args.add(
+                static_cast<String16>(mFuzzedDataProvider->ConsumeRandomLengthString().c_str()));
+        } else {
+            args.add(kShellCmd[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                0, kNumShellCmd - 1)]);
+        }
+    }
+    mCameraService->shellCommand(in, out, err, args);
+}
+
+void CameraFuzzer::invokeNotifyCalls() {
+    mCameraService->notifyMonitoredUids();
+    int64_t newState = mFuzzedDataProvider->ConsumeIntegral<int64_t>();
+    mCameraService->notifyDeviceStateChange(newState);
+    std::vector<int32_t> args;
+    size_t numArgs = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinArgs, kMaxArgs);
+    for (size_t argsIdx = 0; argsIdx < numArgs; ++argsIdx) {
+        args.push_back(mFuzzedDataProvider->ConsumeIntegral<int32_t>());
+    }
+    bool shouldPassInvalidEvent = mFuzzedDataProvider->ConsumeBool();
+    int32_t eventId;
+    if (shouldPassInvalidEvent) {
+        eventId = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+    } else {
+        eventId = kEventId[mFuzzedDataProvider->ConsumeBool()];
+    }
+    mCameraService->notifySystemEvent(eventId, args);
+}
+
+void CameraFuzzer::invokeCameraAPIs() {
+    for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
+        getCameraInformation(cameraId);
+
+        const String16 opPackageName("com.fuzzer.poc");
+        ::android::binder::Status rc;
+        sp<ICamera> cameraDevice;
+
+        rc = mCameraService->connect(this, cameraId, opPackageName, AID_MEDIA, AID_ROOT,
+                                     &cameraDevice);
+        if (!rc.isOk()) {
+            // camera not connected
+            return;
+        }
+        if (cameraDevice) {
+            sp<Surface> previewSurface;
+            sp<SurfaceControl> surfaceControl;
+            CameraParameters params(cameraDevice->getParameters());
+            String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+            bool isAFSupported = false;
+            const char *focusMode = nullptr;
+
+            if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+                isAFSupported = true;
+            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+                isAFSupported = true;
+                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+                isAFSupported = true;
+                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+                isAFSupported = true;
+                focusMode = CameraParameters::FOCUS_MODE_MACRO;
+            }
+            if (nullptr != focusMode) {
+                params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+                cameraDevice->setParameters(params.flatten());
+            }
+            int previewWidth, previewHeight;
+            params.getPreviewSize(&previewWidth, &previewHeight);
+
+            mComposerClient = new SurfaceComposerClient;
+            mComposerClient->initCheck();
+
+            bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+            int layerMetaData;
+            if (shouldPassInvalidLayerMetaData) {
+                layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+            } else {
+                layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                    0, kNumLayerMetaData - 1)];
+            }
+            surfaceControl = mComposerClient->createSurface(
+                String8("Test Surface"), previewWidth, previewHeight,
+                CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
+
+            if (surfaceControl.get() != nullptr) {
+                SurfaceComposerClient::Transaction{}
+                    .setLayer(surfaceControl, 0x7fffffff)
+                    .show(surfaceControl)
+                    .apply();
+
+                previewSurface = surfaceControl->getSurface();
+                cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
+            }
+            cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+
+            Vector<Size> pictureSizes;
+            params.getSupportedPictureSizes(pictureSizes);
+
+            for (size_t i = 0; i < pictureSizes.size(); ++i) {
+                params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
+                cameraDevice->setParameters(params.flatten());
+                cameraDevice->startPreview();
+                waitForPreviewStart();
+                cameraDevice->autoFocus();
+                waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+                bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+                int msgType;
+                if (shouldPassInvalidCameraMsg) {
+                    msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+                } else {
+                    msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                        0, kNumCameraMsg - 1)];
+                }
+                cameraDevice->takePicture(msgType);
+
+                waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+            }
+
+            Vector<Size> videoSizes;
+            params.getSupportedVideoSizes(videoSizes);
+
+            for (size_t i = 0; i < videoSizes.size(); ++i) {
+                params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+
+                cameraDevice->setParameters(params.flatten());
+                cameraDevice->startPreview();
+                waitForPreviewStart();
+                cameraDevice->setVideoBufferMode(
+                    android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+                cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
+                cameraDevice->startRecording();
+                cameraDevice->stopRecording();
+            }
+            cameraDevice->stopPreview();
+            cameraDevice->disconnect();
+        }
+    }
+}
+
+void CameraFuzzer::process(const uint8_t *data, size_t size) {
+    mFuzzedDataProvider = new FuzzedDataProvider(data, size);
+    getNumCameras();
+    invokeCameraSound();
+    if (mNumCameras > 0) {
+        invokeCameraAPIs();
+    }
+    invokeDump();
+    invokeShellCommand();
+    invokeNotifyCalls();
+    delete mFuzzedDataProvider;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer();
+    if (!camerafuzzer) {
+        return 0;
+    }
+    if (camerafuzzer->init()) {
+        camerafuzzer->process(data, size);
+    }
+    return 0;
+}
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index b530342..0b5ad79 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -33,9 +33,11 @@
     android.hardware.camera.provider@2.4 \
     android.hardware.camera.provider@2.5 \
     android.hardware.camera.provider@2.6 \
+    android.hardware.camera.provider@2.7 \
     android.hardware.camera.device@1.0 \
     android.hardware.camera.device@3.2 \
     android.hardware.camera.device@3.4 \
+    android.hardware.camera.device@3.7 \
     android.hidl.token@1.0-utils
 
 LOCAL_STATIC_LIBRARIES := \
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 855b5ab..a74fd9d 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -23,7 +23,9 @@
 #include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
 #include <camera_metadata_hidden.h>
+#include <hidl/HidlBinderSupport.h>
 #include <gtest/gtest.h>
+#include <utility>
 
 using namespace android;
 using namespace android::hardware::camera;
@@ -173,6 +175,25 @@
         return hardware::Void();
     }
 
+    virtual ::android::hardware::Return<bool> linkToDeath(
+            const ::android::sp<::android::hardware::hidl_death_recipient>& recipient,
+            uint64_t cookie) {
+        if (mInitialDeathRecipient.get() == nullptr) {
+            mInitialDeathRecipient =
+                std::make_unique<::android::hardware::hidl_binder_death_recipient>(recipient,
+                        cookie, this);
+        }
+        return true;
+    }
+
+    void signalInitialBinderDeathRecipient() {
+        if (mInitialDeathRecipient.get() != nullptr) {
+            mInitialDeathRecipient->binderDied(nullptr /*who*/);
+        }
+    }
+
+    std::unique_ptr<::android::hardware::hidl_binder_death_recipient> mInitialDeathRecipient;
+
     enum MethodNames {
         SET_CALLBACK,
         GET_VENDOR_TAGS,
@@ -567,3 +588,47 @@
     ASSERT_EQ(serviceProxy.mLastRequestedServiceNames.back(), testProviderInstanceName) <<
             "Incorrect instance requested from service manager";
 }
+
+// Test that CameraProviderManager can handle races between provider death notifications and
+// provider registration callbacks
+TEST(CameraProviderManagerTest, BinderDeathRegistrationRaceTest) {
+
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    deviceNames.push_back("device@3.2/test/1");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+    status_t res;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+    sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
+            vendorSection);
+
+    // Not setting up provider in the service proxy yet, to test cases where a
+    // HAL isn't starting right
+    res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    // Now set up provider and trigger a registration
+    serviceProxy.setProvider(provider);
+
+    hardware::hidl_string testProviderFqInterfaceName =
+            "android.hardware.camera.provider@2.4::ICameraProvider";
+    hardware::hidl_string testProviderInstanceName = "test/0";
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    // Simulate artificial delay of the registration callback which arrives before the
+    // death notification
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    provider->signalInitialBinderDeathRecipient();
+
+    auto deviceCount = static_cast<unsigned> (providerManager->getCameraCount().second);
+    ASSERT_EQ(deviceCount, deviceNames.size()) <<
+            "Unexpected amount of camera devices";
+}
diff --git a/services/camera/libcameraservice/tests/ClientManagerTest.cpp b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
index 6a38427..037c5c2 100644
--- a/services/camera/libcameraservice/tests/ClientManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
@@ -17,10 +17,13 @@
 #define LOG_NDEBUG 0
 #define LOG_TAG "ClientManagerTest"
 
+#include <binder/ActivityManager.h>
+
 #include "../utils/ClientManager.h"
 #include <gtest/gtest.h>
 
 using namespace android::resource_policy;
+using namespace android;
 
 struct TestClient {
     TestClient(int id, int32_t cost, const std::set<int>& conflictingKeys, int32_t ownerId,
@@ -59,13 +62,15 @@
 
     TestClientManager cm;
     TestClient cam0Client(/*ID*/0, /*cost*/100, /*conflicts*/{1},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam0Desc = makeDescFromTestClient(cam0Client);
     auto evicted = cm.addAndEvict(cam0Desc);
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam1Client(/*ID*/1, /*cost*/100, /*conflicts*/{0},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam1Desc = makeDescFromTestClient(cam1Client);
 
     // 1. Check with conflicting devices, new client would be evicted
@@ -76,13 +81,15 @@
     cm.removeAll();
 
     TestClient cam2Client(/*ID*/2, /*cost*/100, /*conflicts*/{},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam2Desc = makeDescFromTestClient(cam2Client);
     evicted = cm.addAndEvict(cam2Desc);
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam3Client(/*ID*/3, /*cost*/100, /*conflicts*/{},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam3Desc = makeDescFromTestClient(cam3Client);
 
     // 2. Check without conflicting devices, the pre-existing client won't be evicted
@@ -97,12 +104,42 @@
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam0ClientNew(/*ID*/0, /*cost*/100, /*conflicts*/{1},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam0DescNew = makeDescFromTestClient(cam0ClientNew);
     wouldBeEvicted = cm.wouldEvict(cam0DescNew);
 
     // 3. Check opening the same camera twice will evict the older client
     ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
     ASSERT_EQ(wouldBeEvicted[0], cam0Desc) << "cam0 (old) must be evicted";
-}
 
+    // 4. Check that an invalid client (dead process) will be evicted
+
+    cm.removeAll();
+
+    TestClient camDeadClient(/*ID*/ 0, /*cost*/100, /*conflicts*/{},
+            /*ownerId*/ 1000, INVALID_ADJ,
+            ActivityManager::PROCESS_STATE_NONEXISTENT, /*isVendorClient*/ false);
+    auto camDeadDesc = makeDescFromTestClient(camDeadClient);
+    evicted = cm.addAndEvict(camDeadDesc);
+    wouldBeEvicted = cm.wouldEvict(cam0Desc);
+
+    ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+    ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+    ASSERT_EQ(wouldBeEvicted[0], camDeadDesc) << "dead cam must be evicted";
+
+    // 5. Check that a more important client will win
+
+    TestClient cam0ForegroundClient(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+            /*ownerId*/ 1000, FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
+    auto cam0FgDesc = makeDescFromTestClient(cam0ForegroundClient);
+
+    cm.removeAll();
+    evicted = cm.addAndEvict(cam0Desc);
+    wouldBeEvicted = cm.wouldEvict(cam0FgDesc);
+
+    ASSERT_EQ(evicted.size(), 0u);
+    ASSERT_EQ(wouldBeEvicted.size(), 1u);
+    ASSERT_EQ(wouldBeEvicted[0],cam0Desc) << "less important cam0 must be evicted";
+}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 54935c9..8331136 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -27,7 +27,7 @@
 
 using namespace android;
 using namespace android::camera3;
-
+using DistortionMapperInfo = android::camera3::DistortionMapper::DistortionMapperInfo;
 
 int32_t testActiveArray[] = {100, 100, 1000, 750};
 int32_t testPreCorrActiveArray[] = {90, 90, 1020, 770};
@@ -132,14 +132,15 @@
             /*preCorrectionActiveArray*/ testActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(coords.data(), 5, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], basicCoords[i]);
     }
 
-    res = m.mapRawToCorrected(coords.data(), 5, /*clamp*/true);
+    res = m.mapRawToCorrected(coords.data(), 5, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
@@ -152,14 +153,14 @@
     };
 
     auto rectsOrig = rects;
-    res = m.mapCorrectedRectToRaw(rects.data(), 2, /*clamp*/true);
+    res = m.mapCorrectedRectToRaw(rects.data(), 2, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
         EXPECT_EQ(rects[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(rects.data(), 2, /*clamp*/true);
+    res = m.mapRawRectToCorrected(rects.data(), 2, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
@@ -176,14 +177,17 @@
             /*preCorrectionActiveArray*/ activeArray.data());
 
     auto rectsOrig = activeArray;
-    res = m.mapCorrectedRectToRaw(activeArray.data(), 1, /*clamp*/true, /*simple*/ true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedRectToRaw(activeArray.data(), 1, mapperInfo, /*clamp*/true,
+            /*simple*/ true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < activeArray.size(); i++) {
         EXPECT_EQ(activeArray[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(activeArray.data(), 1, /*clamp*/true, /*simple*/ true);
+    res = m.mapRawRectToCorrected(activeArray.data(), 1, mapperInfo, /*clamp*/true,
+            /*simple*/ true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < activeArray.size(); i++) {
@@ -200,7 +204,8 @@
             /*preCorrectionActiveArray*/ testPreCorrActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true, /*simple*/true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(coords.data(), 5, mapperInfo, /*clamp*/true, /*simple*/true);
     ASSERT_EQ(res, OK);
 
     ASSERT_EQ(coords[0], 0); ASSERT_EQ(coords[1], 0);
@@ -237,12 +242,13 @@
     auto origCoords = randCoords;
 
     base::Timer correctedToRawTimer;
-    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, clamp, simple);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, mapperInfo, clamp, simple);
     auto correctedToRawDurationMs = correctedToRawTimer.duration();
     EXPECT_EQ(res, OK);
 
     base::Timer rawToCorrectedTimer;
-    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, clamp, simple);
+    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, mapperInfo, clamp, simple);
     auto rawToCorrectedDurationMs = rawToCorrectedTimer.duration();
     EXPECT_EQ(res, OK);
 
@@ -363,7 +369,8 @@
 
     using namespace openCvData;
 
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, /*clamp*/false,
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
             /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index 4e94991..ff7aafd 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -182,7 +182,7 @@
 
     // Verify 1.0x zoom doesn't change the coordinates
     auto coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], originalCoords[i]);
     }
@@ -199,7 +199,7 @@
             (width - 1) * 5.0f / 4.0f, (height - 1) / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoords[i]), kMaxAllowedPixelError);
     }
@@ -216,7 +216,7 @@
             width - 1.0f,  (height - 1) / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedInc[i]), kMaxAllowedPixelError);
     }
@@ -233,7 +233,7 @@
             width - 1.0f,  height / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedExc[i]), kMaxAllowedPixelError);
     }
@@ -250,7 +250,7 @@
             (width - 1) * 5 / 8.0f, (height - 1) / 2.0f, // middle-right after 1.33x zoom-in
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
     }
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 0557fcc..76927c0 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -120,6 +120,21 @@
     proxyBinder->pingForUserUpdate();
 }
 
+bool CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
+        String16 packageName, int sensorOrientation, int lensFacing) {
+    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return true;
+    bool ret = true;
+    auto status = proxyBinder->isRotateAndCropOverrideNeeded(packageName, sensorOrientation,
+            lensFacing, &ret);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed during top activity orientation query: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+
+    return ret;
+}
+
 void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
     if (proxyBinder == nullptr) return;
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 9525935..ad9db68 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -90,6 +90,10 @@
 
     // Ping camera service proxy for user update
     static void pingCameraServiceProxy();
+
+    // Check whether the current top activity needs a rotate and crop override.
+    static bool isRotateAndCropOverrideNeeded(String16 packageName, int sensorOrientation,
+            int lensFacing);
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 64be6c5..09258ef 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -31,6 +31,31 @@
 namespace android {
 namespace resource_policy {
 
+// Values from frameworks/base/services/core/java/com/android/server/am/ProcessList.java
+const int32_t INVALID_ADJ = -10000;
+const int32_t UNKNOWN_ADJ = 1001;
+const int32_t CACHED_APP_MAX_ADJ = 999;
+const int32_t CACHED_APP_MIN_ADJ = 900;
+const int32_t CACHED_APP_LMK_FIRST_ADJ = 950;
+const int32_t CACHED_APP_IMPORTANCE_LEVELS = 5;
+const int32_t SERVICE_B_ADJ = 800;
+const int32_t PREVIOUS_APP_ADJ = 700;
+const int32_t HOME_APP_ADJ = 600;
+const int32_t SERVICE_ADJ = 500;
+const int32_t HEAVY_WEIGHT_APP_ADJ = 400;
+const int32_t BACKUP_APP_ADJ = 300;
+const int32_t PERCEPTIBLE_LOW_APP_ADJ = 250;
+const int32_t PERCEPTIBLE_MEDIUM_APP_ADJ = 225;
+const int32_t PERCEPTIBLE_APP_ADJ = 200;
+const int32_t VISIBLE_APP_ADJ = 100;
+const int32_t VISIBLE_APP_LAYER_MAX = PERCEPTIBLE_APP_ADJ - VISIBLE_APP_ADJ - 1;
+const int32_t PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ = 50;
+const int32_t FOREGROUND_APP_ADJ = 0;
+const int32_t PERSISTENT_SERVICE_ADJ = -700;
+const int32_t PERSISTENT_PROC_ADJ = -800;
+const int32_t SYSTEM_ADJ = -900;
+const int32_t NATIVE_ADJ = -1000;
+
 class ClientPriority {
 public:
     /**
@@ -40,7 +65,9 @@
      * hwbinder thread.
      */
     ClientPriority(int32_t score, int32_t state, bool isVendorClient) :
-            mScore(score), mState(state), mIsVendorClient(isVendorClient) { }
+            mScore((score == INVALID_ADJ) ? UNKNOWN_ADJ : score),
+            mState(state),
+            mIsVendorClient(isVendorClient) { }
 
     int32_t getScore() const { return mScore; }
     int32_t getState() const { return mState; }
@@ -50,7 +77,7 @@
         // construction. Otherwise, it can get reset each time cameraserver
         // queries ActivityManagerService for oom_adj scores / states .
         if (!mIsVendorClient) {
-            mScore = score;
+            mScore = (score == INVALID_ADJ) ? UNKNOWN_ADJ : score;
         }
     }
 
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
index 8a0303a..485705c 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.cpp
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -916,11 +916,25 @@
         ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__);
     }
 
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = metadata.find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT ||
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return false;
+        }
+    }
+    int32_t activeArrayTag = sensorPixelMode == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION ?
+            ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION :
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE;
     if (metadata.exists(ANDROID_SCALER_CROP_REGION) &&
-            staticInfo.exists(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE)) {
+            staticInfo.exists(activeArrayTag)) {
         entry = metadata.find(ANDROID_SCALER_CROP_REGION);
         camera_metadata_ro_entry activeArrayEntry =
-                staticInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+                staticInfo.find(activeArrayTag);
 
         if (!setDigitalZoomRatio(entry.data.i32[2], entry.data.i32[3],
                 activeArrayEntry.data.i32[2], activeArrayEntry.data.i32[3])) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index c28f427..6dcf440 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,22 +21,115 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 
-// Convenience methods for constructing binder::Status objects for error returns
-
-#define STATUS_ERROR(errorCode, errorString) \
-    binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
-
-#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
-    binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
-                    __VA_ARGS__))
-
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
+namespace camera3 {
+
+void StreamConfiguration::getStreamConfigurations(
+        const CameraMetadata &staticInfo, int configuration,
+        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
+    if (scm == nullptr) {
+        ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
+        return;
+    }
+    const int STREAM_FORMAT_OFFSET = 0;
+    const int STREAM_WIDTH_OFFSET = 1;
+    const int STREAM_HEIGHT_OFFSET = 2;
+    const int STREAM_IS_INPUT_OFFSET = 3;
+
+    camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
+    for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
+        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+        StreamConfiguration sc = {format, width, height, isInput};
+        (*scm)[format].push_back(sc);
+    }
+}
+
+void StreamConfiguration::getStreamConfigurations(
+        const CameraMetadata &staticInfo, bool maxRes,
+        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
+    int32_t scalerKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
+
+    int32_t depthKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
+
+    int32_t dynamicDepthKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+
+    int32_t heicKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+
+    getStreamConfigurations(staticInfo, scalerKey, scm);
+    getStreamConfigurations(staticInfo, depthKey, scm);
+    getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
+    getStreamConfigurations(staticInfo, heicKey, scm);
+}
+
+int32_t SessionConfigurationUtils::getAppropriateModeTag(int32_t defaultTag, bool maxResolution) {
+    if (!maxResolution) {
+        return defaultTag;
+    }
+    switch (defaultTag) {
+        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS;
+        case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
+            return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_INTRINSIC_CALIBRATION:
+            return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_DISTORTION:
+            return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+        default:
+            ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
+                    defaultTag);
+            return -1;
+    }
+    return -1;
+}
+
+
+StreamConfigurationPair
+SessionConfigurationUtils::getStreamConfigurationPair(const CameraMetadata &staticInfo) {
+    camera3::StreamConfigurationPair streamConfigurationPair;
+    camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
+            &streamConfigurationPair.mDefaultStreamConfigurationMap);
+    camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
+            &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
+    return streamConfigurationPair;
+}
 
 int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
     int64_t d0 = x0 - x1;
@@ -45,15 +138,22 @@
 }
 
 bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
-        int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
-        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
+        int32_t format, android_dataspace dataSpace,
+        const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
+        /*out*/int32_t* outHeight) {
+    const int32_t depthSizesTag =
+            getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                    maxResolution);
+    const int32_t scalerSizesTag =
+            getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t heicSizesTag =
+            getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
 
     camera_metadata_ro_entry streamConfigs =
-            (dataSpace == HAL_DATASPACE_DEPTH) ?
-            info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
+            (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
-            info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
-            info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+            info.find(heicSizesTag) :
+            info.find(scalerSizesTag);
 
     int32_t bestWidth = -1;
     int32_t bestHeight = -1;
@@ -128,11 +228,11 @@
 binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
-
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed){
     // bufferProducer must be non-null
     if (gbp == nullptr) {
-        String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
+        String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
         ALOGW("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
@@ -144,13 +244,13 @@
     status_t err;
     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
-                "stream", __FUNCTION__, cameraId.string(), consumerUsage);
+                "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
         useAsync = true;
     }
 
@@ -169,26 +269,26 @@
     android_dataspace dataSpace;
     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
-                 cameraId.string(), strerror(-err), err);
+                 logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
             reinterpret_cast<int*>(&dataSpace))) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
@@ -199,16 +299,31 @@
             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
-                __FUNCTION__, cameraId.string(), format);
+                __FUNCTION__, logicalCameraId.string(), format);
         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     }
+    std::unordered_set<int32_t> overriddenSensorPixelModes;
+    if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
+            physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+        String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
+                "format %#x are not valid",logicalCameraId.string(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    bool foundInMaxRes = false;
+    if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+            overriddenSensorPixelModes.end()) {
+        // we can use the default stream configuration map
+        foundInMaxRes = true;
+    }
     // Round dimensions to the nearest dimensions available for this format
     if (flexibleConsumer && isPublicFormat(format) &&
             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
-            format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
+            format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
+            /*out*/&height)) {
         String8 msg = String8::format("Camera %s: No supported stream configurations with "
                 "format %#x defined, failed to create output stream",
-                cameraId.string(), format);
+                logicalCameraId.string(), format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
@@ -219,30 +334,31 @@
         streamInfo.format = format;
         streamInfo.dataSpace = dataSpace;
         streamInfo.consumerUsage = consumerUsage;
+        streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
         String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
-                cameraId.string(), width, streamInfo.width);
+                logicalCameraId.string(), width, streamInfo.width);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (height != streamInfo.height) {
         String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
-                 cameraId.string(), height, streamInfo.height);
+                 logicalCameraId.string(), height, streamInfo.height);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (format != streamInfo.format) {
         String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
-                 cameraId.string(), format, streamInfo.format);
+                 logicalCameraId.string(), format, streamInfo.format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         if (dataSpace != streamInfo.dataSpace) {
             String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    cameraId.string(), dataSpace, streamInfo.dataSpace);
+                    logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
@@ -251,7 +367,7 @@
         if (consumerUsage != streamInfo.consumerUsage) {
             String8 msg = String8::format(
                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
-                    cameraId.string(), consumerUsage, streamInfo.consumerUsage);
+                    logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
@@ -259,26 +375,32 @@
     return binder::Status::ok();
 }
 
-
 void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
             camera3::camera_stream_rotation_t rotation, String8 physicalId,
-            hardware::camera::device::V3_4::Stream *stream /*out*/) {
+            int32_t groupId, hardware::camera::device::V3_7::Stream *stream /*out*/) {
     if (stream == nullptr) {
         return;
     }
 
-    stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
-    stream->v3_2.width = streamInfo.width;
-    stream->v3_2.height = streamInfo.height;
-    stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
+    stream->v3_4.v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
+    stream->v3_4.v3_2.width = streamInfo.width;
+    stream->v3_4.v3_2.height = streamInfo.height;
+    stream->v3_4.v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
     auto u = streamInfo.consumerUsage;
     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
-    stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
-    stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
-    stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
-    stream->v3_2.id = -1; // Invalid stream id
-    stream->physicalCameraId = std::string(physicalId.string());
-    stream->bufferSize = 0;
+    stream->v3_4.v3_2.usage = Camera3Device::mapToConsumerUsage(u);
+    stream->v3_4.v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
+    stream->v3_4.v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
+    stream->v3_4.v3_2.id = -1; // Invalid stream id
+    stream->v3_4.physicalCameraId = std::string(physicalId.string());
+    stream->v3_4.bufferSize = 0;
+    stream->groupId = groupId;
+    stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
+    size_t idx = 0;
+    for (auto mode : streamInfo.sensorPixelModesUsed) {
+        stream->sensorPixelModesUsed[idx++] =
+                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+    }
 }
 
 binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
@@ -358,7 +480,7 @@
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration, bool *earlyExit) {
+        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration, bool *earlyExit) {
 
     auto operatingMode = sessionConfiguration.getOperatingMode();
     binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
@@ -393,14 +515,21 @@
     streamConfiguration.streams.resize(streamCount);
     size_t streamIdx = 0;
     if (isInputValid) {
-        streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
+        hardware::hidl_vec<CameraMetadataEnumAndroidSensorPixelMode> defaultSensorPixelModes;
+        defaultSensorPixelModes.resize(1);
+        defaultSensorPixelModes[0] =
+                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(
+                        ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+        streamConfiguration.streams[streamIdx++] = {{{/*streamId*/0,
                 hardware::camera::device::V3_2::StreamType::INPUT,
                 static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
                 static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
                 Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
                 /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
                 hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
-                /*physicalId*/ nullptr, /*bufferSize*/0};
+                /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1, defaultSensorPixelModes};
+        streamConfiguration.multiResolutionInputImage =
+                sessionConfiguration.inputIsMultiResolution();
     }
 
     for (const auto &it : outputConfigs) {
@@ -408,8 +537,15 @@
             it.getGraphicBufferProducers();
         bool deferredConsumer = it.isDeferred();
         String8 physicalCameraId = String8(it.getPhysicalCameraId());
+
+        std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
+        const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
+        const CameraMetadata &metadataChosen =
+                physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
+
         size_t numBufferProducers = bufferProducers.size();
         bool isStreamInfoValid = false;
+        int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
         OutputStreamInfo streamInfo;
 
         res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
@@ -432,7 +568,16 @@
             if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
                 streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
             }
-            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId,
+            if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
+                    streamInfo.format, streamInfo.width,
+                    streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+                    &streamInfo.sensorPixelModesUsed) != OK) {
+                        ALOGE("%s: Deferred surface sensor pixel modes not valid",
+                                __FUNCTION__);
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                                "Deferred surface sensor pixel modes not valid");
+            }
+            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
                     &streamConfiguration.streams[streamIdx++]);
             isStreamInfoValid = true;
 
@@ -443,10 +588,8 @@
 
         for (auto& bufferProducer : bufferProducers) {
             sp<Surface> surface;
-            const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId,
-                    physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
+                    logicalCameraId, metadataChosen, sensorPixelModesUsed);
 
             if (!res.isOk())
                 return res;
@@ -461,6 +604,7 @@
                     // additional internal camera streams.
                     std::vector<OutputStreamInfo> compositeStreams;
                     if (isDepthCompositeStream) {
+                      // TODO: Take care of composite streams.
                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
                                 deviceInfo, &compositeStreams);
                     } else {
@@ -488,19 +632,134 @@
                     for (const auto& compositeStream : compositeStreams) {
                         mapStreamInfo(compositeStream,
                                 static_cast<camera_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+                                physicalCameraId, groupId,
+                                &streamConfiguration.streams[streamIdx++]);
                     }
                 } else {
                     mapStreamInfo(streamInfo,
                             static_cast<camera_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
                 }
                 isStreamInfoValid = true;
             }
         }
     }
     return binder::Status::ok();
-
 }
 
-}// namespace android
+static bool inStreamConfigurationMap(int format, int width, int height,
+        const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
+    auto scs = sm.find(format);
+    if (scs == sm.end()) {
+        return false;
+    }
+    for (auto &sc : scs->second) {
+        if (sc.width == width && sc.height == height && sc.isInput == 0) {
+            return true;
+        }
+    }
+    return false;
+}
+
+static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
+    return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
+}
+
+status_t SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+        const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
+    if (!isUltraHighResolutionSensor(staticInfo)) {
+        overriddenSensorPixelModesUsed->clear();
+        overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+        return OK;
+    }
+
+    StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
+    const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
+            convertToSet(sensorPixelModesUsed);
+    bool isInDefaultStreamConfigurationMap =
+            inStreamConfigurationMap(format, width, height,
+                    streamConfigurationPair.mDefaultStreamConfigurationMap);
+
+    bool isInMaximumResolutionStreamConfigurationMap =
+            inStreamConfigurationMap(format, width, height,
+                    streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
+
+    // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
+    // size + format of the OutputConfiguration is found exclusively in 1.
+    // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
+    // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
+    // compatibility.
+    if (sensorPixelModesUsedSet.size() == 0) {
+        // Ambiguous case, default to only 'DEFAULT' mode.
+        if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+            return OK;
+        }
+        // We don't allow flexible consumer for max resolution mode.
+        if (isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+            return OK;
+        }
+        if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+            return OK;
+        }
+        return BAD_VALUE;
+    }
+
+    // Case2: The app has set sensorPixelModesUsed, we need to verify that they
+    // are valid / err out.
+    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
+            sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+        return BAD_VALUE;
+    }
+
+   if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+            sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+        return BAD_VALUE;
+    }
+    *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
+    return OK;
+}
+
+bool SessionConfigurationUtils::isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t entryCap;
+    entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    // Go through the capabilities and check if it has
+    // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+    for (size_t i = 0; i < entryCap.count; ++i) {
+        uint8_t capability = entryCap.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+        hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+        const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
+    if (streamConfigV37.multiResolutionInputImage) {
+        // ICameraDevice older than 3.7 doesn't support multi-resolution input image.
+        return false;
+    }
+
+    streamConfigV34.streams.resize(streamConfigV37.streams.size());
+    for (size_t i = 0; i < streamConfigV37.streams.size(); i++) {
+        if (streamConfigV37.streams[i].groupId != -1) {
+            // ICameraDevice older than 3.7 doesn't support multi-resolution output
+            // image
+            return false;
+        }
+        streamConfigV34.streams[i] = streamConfigV37.streams[i].v3_4;
+    }
+    streamConfigV34.operationMode = streamConfigV37.operationMode;
+    streamConfigV34.sessionParams = streamConfigV37.sessionParams;
+
+    return true;
+}
+
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 6ac7ab4..863a0cd 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -21,25 +21,61 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <android/hardware/camera/device/3.7/types.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 
 #include <device3/Camera3StreamInterface.h>
 
 #include <stdint.h>
 
+// Convenience methods for constructing binder::Status objects for error returns
+
+#define STATUS_ERROR(errorCode, errorString) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+                    __VA_ARGS__))
+
 namespace android {
+namespace camera3 {
 
 typedef std::function<CameraMetadata (const String8 &)> metadataGetter;
 
+class StreamConfiguration {
+public:
+    int32_t format;
+    int32_t width;
+    int32_t height;
+    int32_t isInput;
+    static void getStreamConfigurations(
+            const CameraMetadata &static_info, bool maxRes,
+            std::unordered_map<int, std::vector<StreamConfiguration>> *scm);
+    static void getStreamConfigurations(
+            const CameraMetadata &static_info, int configuration,
+            std::unordered_map<int, std::vector<StreamConfiguration>> *scm);
+};
+
+// Holds the default StreamConfigurationMap and Maximum resolution
+// StreamConfigurationMap for a camera device.
+struct StreamConfigurationPair {
+    std::unordered_map<int, std::vector<camera3::StreamConfiguration>>
+            mDefaultStreamConfigurationMap;
+    std::unordered_map<int, std::vector<camera3::StreamConfiguration>>
+            mMaximumResolutionStreamConfigurationMap;
+};
+
 class SessionConfigurationUtils {
 public:
-
     static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
 
     // Find the closest dimensions for a given format in available stream configurations with
     // a width <= ROUNDING_WIDTH_CAP
     static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
-            android_dataspace dataSpace, const CameraMetadata& info,
+            android_dataspace dataSpace, const CameraMetadata& info, bool maxResolution,
             /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
 
     //check if format is not custom format
@@ -50,11 +86,12 @@
     static binder::Status createSurfaceFromGbp(
         camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata);
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed);
 
     static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
-            camera3::camera_stream_rotation_t rotation, String8 physicalId,
-            hardware::camera::device::V3_4::Stream *stream /*out*/);
+            camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
+            hardware::camera::device::V3_7::Stream *stream /*out*/);
 
     // Check that the physicalCameraId passed in is spported by the camera
     // device.
@@ -76,13 +113,33 @@
     convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
             const String8 &cameraId, const CameraMetadata &deviceInfo,
             metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-            hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
+            hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
             bool *earlyExit);
 
+    // Utility function to convert a V3_7::StreamConfiguration to
+    // V3_4::StreamConfiguration. Return false if the original V3_7 configuration cannot
+    // be used by older version HAL.
+    static bool convertHALStreamCombinationFromV37ToV34(
+            hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+            const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37);
+
+    static StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
+
+    static status_t checkAndOverrideSensorPixelModesUsed(
+            const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+            const CameraMetadata &staticInfo, bool flexibleConsumer,
+            std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
+
+    static bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+
+    static int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
+
     static const int32_t MAX_SURFACES_PER_STREAM = 4;
 
     static const int32_t ROUNDING_WIDTH_CAP = 1920;
+
 };
 
+} // camera3
 } // android
 #endif
diff --git a/services/mediacodec/registrant/CodecServiceRegistrant.cpp b/services/mediacodec/registrant/CodecServiceRegistrant.cpp
index 184251a..b479433 100644
--- a/services/mediacodec/registrant/CodecServiceRegistrant.cpp
+++ b/services/mediacodec/registrant/CodecServiceRegistrant.cpp
@@ -25,8 +25,9 @@
 #include <C2PlatformSupport.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
-#include <codec2/hidl/1.1/Configurable.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
 #include <hidl/HidlSupport.h>
 #include <media/CodecServiceRegistrant.h>
 
@@ -37,8 +38,8 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::sp;
-using namespace ::android::hardware::media::c2::V1_1;
-using namespace ::android::hardware::media::c2::V1_1::utils;
+using namespace ::android::hardware::media::c2::V1_2;
+using namespace ::android::hardware::media::c2::V1_2::utils;
 
 constexpr c2_status_t C2_TRANSACTION_FAILED = C2_CORRUPTED;
 
@@ -420,11 +421,20 @@
     // STOPSHIP: Remove code name checking once platform version bumps up to 30.
     std::string codeName =
         android::base::GetProperty("ro.build.version.codename", "");
-    if (codeName == "R") {
-        platformVersion = 30;
+    if (codeName == "S") {
+        platformVersion = 31;
     }
 
     switch (platformVersion) {
+        case 31: {
+            android::sp<V1_2::IComponentStore> storeV1_2 =
+                new V1_2::utils::ComponentStore(store);
+            if (storeV1_2->registerAsService("software") != android::OK) {
+                LOG(ERROR) << "Cannot register software Codec2 v1.2 service.";
+                return;
+            }
+            break;
+        }
         case 30: {
             android::sp<V1_1::IComponentStore> storeV1_1 =
                 new V1_1::utils::ComponentStore(store);
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index e40754e..5989181 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -30,7 +30,7 @@
     "modernize-loop-convert",
     "modernize-make-shared",
     "modernize-make-unique",
-    "modernize-pass-by-value",
+    // "modernize-pass-by-value", // found in TimeMachine.h
     "modernize-raw-string-literal",
     "modernize-redundant-void-arg",
     "modernize-replace-auto-ptr",
@@ -38,13 +38,13 @@
     "modernize-return-braced-init-list",
     "modernize-shrink-to-fit",
     "modernize-unary-static-assert",
-    "modernize-use-auto",  // debatable - auto can obscure type
+    // "modernize-use-auto",  // found in MediaMetricsService.h, debatable - auto can obscure type
     "modernize-use-bool-literals",
     "modernize-use-default-member-init",
     "modernize-use-emplace",
     "modernize-use-equals-default",
     "modernize-use-equals-delete",
-    "modernize-use-nodiscard",
+    // "modernize-use-nodiscard", // found in TimeMachine.h
     "modernize-use-noexcept",
     "modernize-use-nullptr",
     "modernize-use-override",
@@ -57,6 +57,11 @@
     // Remove some pedantic stylistic requirements.
     "-google-readability-casting", // C++ casts not always necessary and may be verbose
     "-google-readability-todo",    // do not require TODO(info)
+
+    "-bugprone-unhandled-self-assignment", // found in TimeMachine.h
+    "-bugprone-suspicious-string-compare", // found in TimeMachine.h
+    "-cert-oop54-cpp", // found in TransactionLog.h
+    "-bugprone-narrowing-conversions", // b/182410845
 ]
 
 cc_defaults {
@@ -88,8 +93,7 @@
     tidy_checks: tidy_errors,
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
-      "-format-style='file'",
-      "--header-filter='frameworks/av/services/mediametrics/'",
+      "-format-style=file",
     ],
 }
 
@@ -162,10 +166,18 @@
         "libmediautils",
         "libmemunreachable",
         "libprotobuf-cpp-lite",
+        "libstagefright_foundation",
         "libstatslog",
+        "libstatspull",
+        "libstatssocket",
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "libstatspull",
+        "libstatssocket",
+    ],
+
     static_libs: [
         "libplatformprotos",
     ],
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 3b2de76..2b797b8 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -87,6 +87,7 @@
     "selected_device_id",
     "caller",
     "source",
+    "log_session_id",
 };
 
 static constexpr const char * const AudioThreadDeviceUsageFields[] = {
@@ -124,6 +125,7 @@
     "content_type",
     "caller",
     "traits",
+    "log_session_id",
 };
 
 static constexpr const char * const AudioDeviceConnectionFields[] = {
@@ -136,24 +138,54 @@
     "connection_count",
 };
 
-// static constexpr const char * const AAudioStreamFields[] {
-//     "mediametrics_aaudiostream_reported",
-//     "caller_name",
-//     "path",
-//     "direction",
-//     "frames_per_burst",
-//     "buffer_size",
-//     "buffer_capacity",
-//     "channel_count",
-//     "total_frames_transferred",
-//     "perf_mode_requested",
-//     "perf_mode_actual",
-//     "sharing",
-//     "xrun_count",
-//     "device_type",
-//     "format_app",
-//     "format_device",
-// };
+static constexpr const char * const AAudioStreamFields[] {
+    "mediametrics_aaudiostream_reported",
+    "path",
+    "direction",
+    "frames_per_burst",
+    "buffer_size",
+    "buffer_capacity",
+    "channel_count",
+    "total_frames_transferred",
+    "perf_mode_requested",
+    "perf_mode_actual",
+    "sharing",
+    "xrun_count",
+    "device_type",
+    "format_app",
+    "format_device",
+    "log_session_id",
+    "sample_rate",
+    "content_type",
+};
+
+/**
+ * printFields is a helper method that prints the fields and corresponding values
+ * in a human readable style.
+ */
+template <size_t N, typename ...Types>
+std::string printFields(const char * const (& fields)[N], Types ... args)
+{
+    std::stringstream ss;
+    ss << " { ";
+    stringutils::fieldPrint(ss, fields, args...);
+    ss << "}";
+    return ss.str();
+}
+
+/**
+ * sendToStatsd is a helper method that sends the arguments to statsd
+ */
+template <typename ...Types>
+int sendToStatsd(Types ... args)
+{
+    int result = 0;
+
+#ifdef STATSD_ENABLE
+    result = android::util::stats_write(args...);
+#endif
+    return result;
+}
 
 /**
  * sendToStatsd is a helper method that sends the arguments to statsd
@@ -175,8 +207,10 @@
     return { result, ss.str() };
 }
 
-AudioAnalytics::AudioAnalytics()
+AudioAnalytics::AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog)
     : mDeliverStatistics(property_get_bool(PROP_AUDIO_ANALYTICS_CLOUD_ENABLED, true))
+    , mStatsdLog(statsdLog)
+    , mAudioPowerUsage(this, statsdLog)
 {
     SetMinimumLogSeverity(android::base::DEBUG); // for LOG().
     ALOGD("%s", __func__);
@@ -375,20 +409,6 @@
         ll -= l;
     }
 
-    if (ll > 0) {
-        // Print the statsd atoms we sent out.
-        const std::string statsd = mStatsdLog.dumpToString("  " /* prefix */, ll - 1);
-        const size_t n = std::count(statsd.begin(), statsd.end(), '\n') + 1; // we control this.
-        if ((size_t)ll >= n) {
-            if (n == 1) {
-                ss << "Statsd atoms: empty or truncated\n";
-            } else {
-                ss << "Statsd atoms:\n" << statsd;
-            }
-            ll -= n;
-        }
-    }
-
     if (ll > 0 && prefix == nullptr) {
         auto [s, l] = mAudioPowerUsage.dump(ll);
         ss << s;
@@ -521,12 +541,18 @@
         std::string source;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_SOURCE, &source);
+        // Android S
+        std::string logSessionId;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
 
         const auto callerNameForStats =
                 types::lookup<types::CALLER_NAME, short_enum_type_t>(callerName);
         const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
         const auto flagsForStats = types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags);
         const auto sourceForStats = types::lookup<types::SOURCE_TYPE, short_enum_type_t>(source);
+        // Android S
+        const auto logSessionIdForStats = stringutils::sanitizeLogSessionId(logSessionId);
 
         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
@@ -541,7 +567,9 @@
               << ") packageName:" << packageName
               << " selectedDeviceId:" << selectedDeviceId
               << " callerName:" << callerName << "(" << callerNameForStats
-              << ") source:" << source << "(" << sourceForStats << ")";
+              << ") source:" << source << "(" << sourceForStats
+              << ") logSessionId:" << logSessionId << "(" << logSessionIdForStats
+              << ")";
         if (clientCalled  // only log if client app called AudioRecord.
                 && mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
@@ -559,9 +587,11 @@
                     , selectedDeviceId
                     , ENUM_EXTRACT(callerNameForStats)
                     , ENUM_EXTRACT(sourceForStats)
+                    , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case THREAD: {
@@ -609,7 +639,8 @@
                 , ENUM_EXTRACT(typeForStats)
             );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case TRACK: {
@@ -659,6 +690,10 @@
         std::string usage;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_USAGE, &usage);
+        // Android S
+        std::string logSessionId;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
 
         const auto callerNameForStats =
                 types::lookup<types::CALLER_NAME, short_enum_type_t>(callerName);
@@ -671,6 +706,8 @@
         const auto traitsForStats =
                  types::lookup<types::TRACK_TRAITS, short_enum_type_t>(traits);
         const auto usageForStats = types::lookup<types::USAGE, short_enum_type_t>(usage);
+        // Android S
+        const auto logSessionIdForStats = stringutils::sanitizeLogSessionId(logSessionId);
 
         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
@@ -695,6 +732,7 @@
               << " streamType:" << streamType << "(" << streamTypeForStats
               << ") traits:" << traits << "(" << traitsForStats
               << ") usage:" << usage << "(" << usageForStats
+              << ") logSessionId:" << logSessionId << "(" << logSessionIdForStats
               << ")";
         if (clientCalled // only log if client app called AudioTracks
                 && mAudioAnalytics.mDeliverStatistics) {
@@ -719,9 +757,11 @@
                     , ENUM_EXTRACT(contentTypeForStats)
                     , ENUM_EXTRACT(callerNameForStats)
                     , ENUM_EXTRACT(traitsForStats)
+                    , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
         }
         } break;
     }
@@ -774,7 +814,7 @@
         mA2dpConnectionServiceNs = 0;
         ++mA2dpConnectionSuccesses;
 
-        const auto connectionTimeMs = float(timeDiffNs * 1e-6);
+        const auto connectionTimeMs = float((double)timeDiffNs * 1e-6);
 
         const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
                 "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
@@ -797,7 +837,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
     }
 }
@@ -850,7 +891,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
         return;
     }
@@ -876,7 +918,8 @@
                 , /* connection_count */ 1
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+        mAudioAnalytics.mStatsdLog->log(
+                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
     }
 }
 
@@ -884,12 +927,6 @@
         const std::shared_ptr<const android::mediametrics::Item> &item, CallerPath path) const {
     const std::string& key = item->getKey();
 
-    std::string callerNameStr;
-    mAudioAnalytics.mAnalyticsState->timeMachine().get(
-            key, AMEDIAMETRICS_PROP_CALLERNAME, &callerNameStr);
-
-    const auto callerName = types::lookup<types::CALLER_NAME, int32_t>(callerNameStr);
-
     std::string directionStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
             key, AMEDIAMETRICS_PROP_DIRECTION, &directionStr);
@@ -932,7 +969,7 @@
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
             key, AMEDIAMETRICS_PROP_UNDERRUN, &xrunCount);
 
-    std::string deviceType;
+    std::string serializedDeviceTypes;
     // TODO: only routed device id is logged, but no device type
 
     int32_t formatApp = 0;
@@ -943,8 +980,19 @@
             key, AMEDIAMETRICS_PROP_ENCODING, &formatDeviceStr);
     const auto formatDevice = types::lookup<types::ENCODING, int32_t>(formatDeviceStr);
 
+    std::string logSessionId;
+    // TODO: log logSessionId
+
+    int32_t sampleRate = 0;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate);
+
+    std::string contentTypeStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_CONTENTTYPE, &contentTypeStr);
+    const auto contentType = types::lookup<types::CONTENT_TYPE, int32_t>(contentTypeStr);
+
     LOG(LOG_LEVEL) << "key:" << key
-            << " caller_name:" << callerName << "(" << callerNameStr << ")"
             << " path:" << path
             << " direction:" << direction << "(" << directionStr << ")"
             << " frames_per_burst:" << framesPerBurst
@@ -956,33 +1004,63 @@
             << " perf_mode_actual:" << perfModeActual
             << " sharing:" << sharingMode << "(" << sharingModeStr << ")"
             << " xrun_count:" << xrunCount
-            << " device_type:" << deviceType
+            << " device_type:" << serializedDeviceTypes
             << " format_app:" << formatApp
-            << " format_device: " << formatDevice << "(" << formatDeviceStr << ")";
+            << " format_device: " << formatDevice << "(" << formatDeviceStr << ")"
+            << " log_session_id: " << logSessionId
+            << " sample_rate: " << sampleRate
+            << " content_type: " << contentType << "(" << contentTypeStr << ")";
 
-    // TODO: send the metric to statsd when the proto is ready
-    // if (mAudioAnalytics.mDeliverStatistics) {
-    //     const auto [ result, str ] = sendToStatsd(AAudioStreamFields,
-    //             CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
-    //             , callerName
-    //             , path
-    //             , direction
-    //             , framesPerBurst
-    //             , bufferSizeInFrames
-    //             , bufferCapacityInFrames
-    //             , channelCount
-    //             , totalFramesTransferred
-    //             , perfModeRequested
-    //             , perfModeActual
-    //             , sharingMode
-    //             , xrunCount
-    //             , deviceType.c_str()
-    //             , formatApp
-    //             , formatDevice
-    //             );
-    //     ALOGV("%s: statsd %s", __func__, str.c_str());
-    //     mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
-    // }
+    if (mAudioAnalytics.mDeliverStatistics) {
+        android::util::BytesField bf_serialized(
+            serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
+        const auto result = sendToStatsd(
+                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                , path
+                , direction
+                , framesPerBurst
+                , bufferSizeInFrames
+                , bufferCapacityInFrames
+                , channelCount
+                , totalFramesTransferred
+                , perfModeRequested
+                , perfModeActual
+                , sharingMode
+                , xrunCount
+                , bf_serialized
+                , formatApp
+                , formatDevice
+                , logSessionId.c_str()
+                , sampleRate
+                , contentType
+                );
+        std::stringstream ss;
+        ss << "result:" << result;
+        const auto fieldsStr = printFields(AAudioStreamFields,
+                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                , path
+                , direction
+                , framesPerBurst
+                , bufferSizeInFrames
+                , bufferCapacityInFrames
+                , channelCount
+                , totalFramesTransferred
+                , perfModeRequested
+                , perfModeActual
+                , sharingMode
+                , xrunCount
+                , serializedDeviceTypes.c_str()
+                , formatApp
+                , formatDevice
+                , logSessionId.c_str()
+                , sampleRate
+                , contentType
+                );
+        ss << " " << fieldsStr;
+        std::string str = ss.str();
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
+    }
 }
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index 07872ef..2b41a95 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -17,10 +17,10 @@
 #pragma once
 
 #include <android-base/thread_annotations.h>
-#include <audio_utils/SimpleLog.h>
 #include "AnalyticsActions.h"
 #include "AnalyticsState.h"
 #include "AudioPowerUsage.h"
+#include "StatsdLog.h"
 #include "TimedAction.h"
 #include "Wrap.h"
 
@@ -32,7 +32,7 @@
     friend AudioPowerUsage;
 
 public:
-    AudioAnalytics();
+    explicit AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioAnalytics();
 
     /**
@@ -122,8 +122,7 @@
     SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
 
     TimedAction mTimedAction; // locked internally
-
-    SimpleLog mStatsdLog{16 /* log lines */}; // locked internally
+    const std::shared_ptr<StatsdLog> mStatsdLog; // locked internally, ok for multiple threads.
 
     // DeviceUse is a nested class which handles audio device usage accounting.
     // We define this class at the end to ensure prior variables all properly constructed.
@@ -212,7 +211,7 @@
         AudioAnalytics &mAudioAnalytics;
     } mAAudioStreamInfo{*this};
 
-    AudioPowerUsage mAudioPowerUsage{this};
+    AudioPowerUsage mAudioPowerUsage;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 34be0b9..ab74c8e 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -127,14 +127,13 @@
     return deviceMask;
 }
 
-/* static */
-void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item)
+void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item) const
 {
     int32_t type;
     if (!item->getInt32(AUDIO_POWER_USAGE_PROP_TYPE, &type)) return;
 
-    int32_t device;
-    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &device)) return;
+    int32_t audio_device;
+    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &audio_device)) return;
 
     int64_t duration_ns;
     if (!item->getInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, &duration_ns)) return;
@@ -142,11 +141,24 @@
     double volume;
     if (!item->getDouble(AUDIO_POWER_USAGE_PROP_VOLUME, &volume)) return;
 
-    (void)android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
-                                         device,
-                                         (int32_t)(duration_ns / NANOS_PER_SECOND),
-                                         (float)volume,
+    const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
+    const float average_volume = (float)volume;
+    const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+                                         audio_device,
+                                         duration_secs,
+                                         average_volume,
                                          type);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audio_power_usage_data_reported:"
+            << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+            << " audio_device:" << audio_device
+            << " duration_secs:" << duration_secs
+            << " average_volume:" << average_volume
+            << " type:" << type
+            << " }";
+    mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
 bool AudioPowerUsage::saveAsItem_l(
@@ -174,8 +186,8 @@
         if (item_device == device && item_type == type) {
             int64_t final_duration_ns = item_duration_ns + duration_ns;
             double final_volume = (device & INPUT_DEVICE_BIT) ? 1.0:
-                            ((item_volume * item_duration_ns +
-                            average_vol * duration_ns) / final_duration_ns);
+                            ((item_volume * (double)item_duration_ns +
+                            average_vol * (double)duration_ns) / (double)final_duration_ns);
 
             item->setInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, final_duration_ns);
             item->setDouble(AUDIO_POWER_USAGE_PROP_VOLUME, final_volume);
@@ -289,7 +301,7 @@
         const int64_t durationNs = endCallNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / (double)durationNs;
             saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
     } else if (mode == "AUDIO_MODE_IN_CALL") { // entering call mode
@@ -317,7 +329,7 @@
         const int64_t durationNs = timeNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / (double)durationNs;
             mVolumeTimeNs = timeNs;
         }
     }
@@ -348,7 +360,7 @@
         const int64_t durationNs = endDeviceNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / (double)durationNs;
             saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
         // reset statistics
@@ -360,8 +372,10 @@
     mPrimaryDevice = device;
 }
 
-AudioPowerUsage::AudioPowerUsage(AudioAnalytics *audioAnalytics)
+AudioPowerUsage::AudioPowerUsage(
+        AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog)
     : mAudioAnalytics(audioAnalytics)
+    , mStatsdLog(statsdLog)
     , mDisabled(property_get_bool(PROP_AUDIO_METRICS_DISABLED, AUDIO_METRICS_DISABLED_DEFAULT))
     , mIntervalHours(property_get_int32(PROP_AUDIO_METRICS_INTERVAL_HR, INTERVAL_HR_DEFAULT))
 {
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index b705a6a..7021902 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -22,13 +22,15 @@
 #include <mutex>
 #include <thread>
 
+#include "StatsdLog.h"
+
 namespace android::mediametrics {
 
 class AudioAnalytics;
 
 class AudioPowerUsage {
 public:
-    explicit AudioPowerUsage(AudioAnalytics *audioAnalytics);
+    AudioPowerUsage(AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioPowerUsage();
 
     void checkTrackRecord(const std::shared_ptr<const mediametrics::Item>& item, bool isTrack);
@@ -83,12 +85,13 @@
 private:
     bool saveAsItem_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
-    static void sendItem(const std::shared_ptr<const mediametrics::Item>& item);
+    void sendItem(const std::shared_ptr<const mediametrics::Item>& item) const;
     void collect();
     bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
 
     AudioAnalytics * const mAudioAnalytics;
+    const std::shared_ptr<StatsdLog> mStatsdLog;  // mStatsdLog is internally locked
     const bool mDisabled;
     const int32_t mIntervalHours;
 
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 44e96ec..838cdd5 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -77,6 +77,7 @@
         {"AUDIO_DEVICE_IN_DEFAULT",                1LL << 28},
         // R values above.
         {"AUDIO_DEVICE_IN_BLE_HEADSET",            1LL << 29},
+        {"AUDIO_DEVICE_IN_HDMI_EARC",              1LL << 30},
     };
     return map;
 }
@@ -123,7 +124,8 @@
         {"AUDIO_DEVICE_OUT_DEFAULT",                   1LL << 30},
         // R values above.
         {"AUDIO_DEVICE_OUT_BLE_HEADSET",               1LL << 31},
-        {"AUDIO_DEVICE_OUT_BLE_SPAEKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_BLE_SPEAKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_HDMI_EARC",                 1LL << 33},
     };
     return map;
 }
@@ -158,9 +160,9 @@
     // DO NOT MODIFY VALUES(OK to add new ones).
     // This may be found in frameworks/av/media/libaaudio/include/aaudio/AAudio.h
     static std::unordered_map<std::string, int32_t> map {
-        // UNKNOWN is -1
-        {"AAUDIO_DIRECTION_OUTPUT",    0},
-        {"AAUDIO_DIRECTION_INPUT",     1},
+        // UNKNOWN is 0
+        {"AAUDIO_DIRECTION_OUTPUT",    1 /* AAUDIO_DIRECTION_OUTPUT + 1 */},
+        {"AAUDIO_DIRECTION_INPUT",     2 /* AAUDIO_DIRECTION_INPUT + 1*/},
     };
     return map;
 }
@@ -169,7 +171,7 @@
     // DO NOT MODIFY VALUES(OK to add new ones).
     // This may be found in frameworks/av/media/libaaudio/include/aaudio/AAudio.h
     static std::unordered_map<std::string, int32_t> map {
-        // UNKNOWN is -1
+        // UNKNOWN is 0
         {"AAUDIO_PERFORMANCE_MODE_NONE",            10},
         {"AAUDIO_PERFORMANCE_MODE_POWER_SAVING",    11},
         {"AAUDIO_PERFORMANCE_MODE_LOW_LATENCY",     12},
@@ -181,9 +183,9 @@
     // DO NOT MODIFY VALUES(OK to add new ones).
     // This may be found in frameworks/av/media/libaaudio/include/aaudio/AAudio.h
     static std::unordered_map<std::string, int32_t> map {
-        // UNKNOWN is -1
-        {"AAUDIO_SHARING_MODE_EXCLUSIVE",    0},
-        {"AAUDIO_SHARING_MODE_SHARED",       1},
+        // UNKNOWN is 0
+        {"AAUDIO_SHARING_MODE_EXCLUSIVE",    1 /* AAUDIO_SHARING_MODE_EXCLUSIVE + 1 */},
+        {"AAUDIO_SHARING_MODE_SHARED",       2 /* AAUDIO_SHARING_MODE_SHARED + 1 */},
     };
     return map;
 }
@@ -484,7 +486,7 @@
     auto& map = getAAudioDirection();
     auto it = map.find(direction);
     if (it == map.end()) {
-        return -1; // return unknown
+        return 0; // return unknown
     }
     return it->second;
 }
@@ -506,7 +508,7 @@
     auto& map = getAAudioPerformanceMode();
     auto it = map.find(performanceMode);
     if (it == map.end()) {
-        return -1; // return unknown
+        return 0; // return unknown
     }
     return it->second;
 }
@@ -528,7 +530,7 @@
     auto& map = getAAudioSharingMode();
     auto it = map.find(sharingMode);
     if (it == map.end()) {
-        return -1; // return unknown
+        return 0; // return unknown
     }
     return it->second;
 }
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 9d380ec..5e672ee 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -19,9 +19,11 @@
 #include <utils/Log.h>
 
 #include "MediaMetricsService.h"
+#include "iface_statsd.h"
 
 #include <pwd.h> //getpwuid
 
+#include <android-base/stringprintf.h>
 #include <android/content/pm/IPackageManagerNative.h>  // package info
 #include <audio_utils/clock.h>                 // clock conversions
 #include <binder/IPCThreadState.h>             // get calling uid
@@ -30,9 +32,13 @@
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <memunreachable/memunreachable.h>
 #include <private/android_filesystem_config.h> // UID
+#include <statslog.h>
+
+#include <set>
 
 namespace android {
 
+using base::StringPrintf;
 using mediametrics::Item;
 using mediametrics::startsWith;
 
@@ -108,7 +114,7 @@
 {
     ALOGD("%s", __func__);
     // the class destructor clears anyhow, but we enforce clearing items first.
-    mItemsDiscarded += mItems.size();
+    mItemsDiscarded += (int64_t)mItems.size();
     mItems.clear();
 }
 
@@ -200,22 +206,19 @@
 
     (void)mAudioAnalytics.submit(sitem, isTrusted);
 
-    extern bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item);
-    (void)dump2Statsd(sitem);  // failure should be logged in function.
+    (void)dump2Statsd(sitem, mStatsdLog);  // failure should be logged in function.
     saveItem(sitem);
     return NO_ERROR;
 }
 
 status_t MediaMetricsService::dump(int fd, const Vector<String16>& args)
 {
-    String8 result;
-
     if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
-        result.appendFormat("Permission Denial: "
+        const std::string result = StringPrintf("Permission Denial: "
                 "can't dump MediaMetricsService from pid=%d, uid=%d\n",
                 IPCThreadState::self()->getCallingPid(),
                 IPCThreadState::self()->getCallingUid());
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         return NO_ERROR;
     }
 
@@ -247,17 +250,18 @@
             // dumpsys media.metrics audiotrack,codec
             // or dumpsys media.metrics audiotrack codec
 
-            result.append("Recognized parameters:\n");
-            result.append("--all         show all records\n");
-            result.append("--clear       clear out saved records\n");
-            result.append("--heap        show heap usage (top 100)\n");
-            result.append("--help        display help\n");
-            result.append("--prefix X    process records for component X\n");
-            result.append("--since X     X < 0: records from -X seconds in the past\n");
-            result.append("              X = 0: ignore\n");
-            result.append("              X > 0: records from X seconds since Unix epoch\n");
-            result.append("--unreachable show unreachable memory (leaks)\n");
-            write(fd, result.string(), result.size());
+            static constexpr char result[] =
+                    "Recognized parameters:\n"
+                    "--all         show all records\n"
+                    "--clear       clear out saved records\n"
+                    "--heap        show heap usage (top 100)\n"
+                    "--help        display help\n"
+                    "--prefix X    process records for component X\n"
+                    "--since X     X < 0: records from -X seconds in the past\n"
+                    "              X = 0: ignore\n"
+                    "              X > 0: records from X seconds since Unix epoch\n"
+                    "--unreachable show unreachable memory (leaks)\n";
+            write(fd, result, std::size(result));
             return NO_ERROR;
         } else if (args[i] == prefixOption) {
             ++i;
@@ -283,30 +287,36 @@
             unreachable = true;
         }
     }
-
+    std::stringstream result;
     {
         std::lock_guard _l(mLock);
 
         if (clear) {
-            mItemsDiscarded += mItems.size();
+            mItemsDiscarded += (int64_t)mItems.size();
             mItems.clear();
             mAudioAnalytics.clear();
         } else {
-            result.appendFormat("Dump of the %s process:\n", kServiceName);
+            result << StringPrintf("Dump of the %s process:\n", kServiceName);
             const char *prefixptr = prefix.size() > 0 ? prefix.c_str() : nullptr;
-            dumpHeaders(result, sinceNs, prefixptr);
-            dumpQueue(result, sinceNs, prefixptr);
+            result << dumpHeaders(sinceNs, prefixptr);
+            result << dumpQueue(sinceNs, prefixptr);
 
             // TODO: maybe consider a better way of dumping audio analytics info.
             const int32_t linesToDump = all ? INT32_MAX : 1000;
             auto [ dumpString, lines ] = mAudioAnalytics.dump(linesToDump, sinceNs, prefixptr);
-            result.append(dumpString.c_str());
+            result << dumpString;
             if (lines == linesToDump) {
-                result.append("-- some lines may be truncated --\n");
+                result << "-- some lines may be truncated --\n";
             }
+
+            // Dump the statsd atoms we sent out.
+            result << "Statsd atoms:\n"
+                   << mStatsdLog->dumpToString("  " /* prefix */,
+                           all ? STATSD_LOG_LINES_MAX : STATSD_LOG_LINES_DUMP);
         }
     }
-    write(fd, result.string(), result.size());
+    const std::string str = result.str();
+    write(fd, str.c_str(), str.size());
 
     // Check heap and unreachable memory outside of lock.
     if (heap) {
@@ -324,38 +334,37 @@
 }
 
 // dump headers
-void MediaMetricsService::dumpHeaders(String8 &result, int64_t sinceNs, const char* prefix)
+std::string MediaMetricsService::dumpHeaders(int64_t sinceNs, const char* prefix)
 {
+    std::stringstream result;
     if (mediametrics::Item::isEnabled()) {
-        result.append("Metrics gathering: enabled\n");
+        result << "Metrics gathering: enabled\n";
     } else {
-        result.append("Metrics gathering: DISABLED via property\n");
+        result << "Metrics gathering: DISABLED via property\n";
     }
-    result.appendFormat(
+    result << StringPrintf(
             "Since Boot: Submissions: %lld Accepted: %lld\n",
             (long long)mItemsSubmitted.load(), (long long)mItemsFinalized);
-    result.appendFormat(
+    result << StringPrintf(
             "Records Discarded: %lld (by Count: %lld by Expiration: %lld)\n",
             (long long)mItemsDiscarded, (long long)mItemsDiscardedCount,
             (long long)mItemsDiscardedExpire);
     if (prefix != nullptr) {
-        result.appendFormat("Restricting to prefix %s", prefix);
+        result << "Restricting to prefix " << prefix << "\n";
     }
     if (sinceNs != 0) {
-        result.appendFormat(
-            "Emitting Queue entries more recent than: %lld\n",
-            (long long)sinceNs);
+        result << "Emitting Queue entries more recent than: " << sinceNs << "\n";
     }
+    return result.str();
 }
 
 // TODO: should prefix be a set<string>?
-void MediaMetricsService::dumpQueue(String8 &result, int64_t sinceNs, const char* prefix)
+std::string MediaMetricsService::dumpQueue(int64_t sinceNs, const char* prefix)
 {
     if (mItems.empty()) {
-        result.append("empty\n");
-        return;
+        return "empty\n";
     }
-
+    std::stringstream result;
     int slot = 0;
     for (const auto &item : mItems) {         // TODO: consider std::lower_bound() on mItems
         if (item->getTimestamp() < sinceNs) { // sinceNs == 0 means all items shown
@@ -366,9 +375,10 @@
                     __func__, item->getKey().c_str(), prefix);
             continue;
         }
-        result.appendFormat("%5d: %s\n", slot, item->toString().c_str());
+        result << StringPrintf("%5d: %s\n", slot, item->toString().c_str());
         slot++;
     }
+    return result.str();
 }
 
 //
@@ -417,10 +427,10 @@
 
     if (const size_t toErase = overlimit + expired;
             toErase > 0) {
-        mItemsDiscardedCount += overlimit;
-        mItemsDiscardedExpire += expired;
-        mItemsDiscarded += toErase;
-        mItems.erase(mItems.begin(), mItems.begin() + toErase); // erase from front
+        mItemsDiscardedCount += (int64_t)overlimit;
+        mItemsDiscardedExpire += (int64_t)expired;
+        mItemsDiscarded += (int64_t)toErase;
+        mItems.erase(mItems.begin(), mItems.begin() + (ptrdiff_t)toErase); // erase from front
     }
     return more;
 }
@@ -440,6 +450,10 @@
     std::lock_guard _l(mLock);
     // we assume the items are roughly in time order.
     mItems.emplace_back(item);
+    if (isPullable(item->getKey())) {
+        registerStatsdCallbacksIfNeeded();
+        mPullableItems[item->getKey()].emplace_back(item);
+    }
     ++mItemsFinalized;
     if (expirations(item)
             && (!mExpireFuture.valid()
@@ -486,4 +500,57 @@
     return false;
 }
 
+void MediaMetricsService::registerStatsdCallbacksIfNeeded()
+{
+    if (mStatsdRegistered.test_and_set()) {
+        return;
+    }
+    auto tag = android::util::MEDIA_DRM_ACTIVITY_INFO;
+    auto cb = MediaMetricsService::pullAtomCallback;
+    AStatsManager_setPullAtomCallback(tag, /* metadata */ nullptr, cb, this);
+}
+
+/* static */
+bool MediaMetricsService::isPullable(const std::string &key)
+{
+    static const std::set<std::string> pullableKeys{
+        "mediadrm",
+    };
+    return pullableKeys.count(key);
+}
+
+/* static */
+std::string MediaMetricsService::atomTagToKey(int32_t atomTag)
+{
+    switch (atomTag) {
+    case android::util::MEDIA_DRM_ACTIVITY_INFO:
+        return "mediadrm";
+    }
+    return {};
+}
+
+/* static */
+AStatsManager_PullAtomCallbackReturn MediaMetricsService::pullAtomCallback(
+        int32_t atomTag, AStatsEventList* data, void* cookie)
+{
+    MediaMetricsService* svc = reinterpret_cast<MediaMetricsService*>(cookie);
+    return svc->pullItems(atomTag, data);
+}
+
+AStatsManager_PullAtomCallbackReturn MediaMetricsService::pullItems(
+        int32_t atomTag, AStatsEventList* data)
+{
+    const std::string key(atomTagToKey(atomTag));
+    if (key.empty()) {
+        return AStatsManager_PULL_SKIP;
+    }
+    std::lock_guard _l(mLock);
+    for (auto &item : mPullableItems[key]) {
+        if (const auto sitem = item.lock()) {
+            dump2Statsd(sitem, data, mStatsdLog);
+        }
+    }
+    mPullableItems[key].clear();
+    return AStatsManager_PULL_SUCCESS;
+}
 } // namespace android
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index bcae397..8d0b1cf 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -26,6 +26,7 @@
 #include <android-base/thread_annotations.h>
 #include <android/media/BnMediaMetricsService.h>
 #include <mediautils/ServiceUtilities.h>
+#include <stats_pull_atom_callback.h>
 #include <utils/String8.h>
 
 #include "AudioAnalytics.h"
@@ -99,8 +100,17 @@
     bool expirations(const std::shared_ptr<const mediametrics::Item>& item) REQUIRES(mLock);
 
     // support for generating output
-    void dumpQueue(String8 &result, int64_t sinceNs, const char* prefix) REQUIRES(mLock);
-    void dumpHeaders(String8 &result, int64_t sinceNs, const char* prefix) REQUIRES(mLock);
+    std::string dumpQueue(int64_t sinceNs, const char* prefix) REQUIRES(mLock);
+    std::string dumpHeaders(int64_t sinceNs, const char* prefix) REQUIRES(mLock);
+
+    // support statsd pushed atoms
+    static bool isPullable(const std::string &key);
+    static std::string atomTagToKey(int32_t atomTag);
+    static AStatsManager_PullAtomCallbackReturn pullAtomCallback(
+            int32_t atomTag, AStatsEventList* data, void* cookie);
+    AStatsManager_PullAtomCallbackReturn pullItems(int32_t atomTag, AStatsEventList* data);
+    void registerStatsdCallbacksIfNeeded();
+    std::atomic_flag mStatsdRegistered = ATOMIC_FLAG_INIT;
 
     // The following variables accessed without mLock
 
@@ -114,7 +124,14 @@
 
     std::atomic<int64_t> mItemsSubmitted{}; // accessed outside of lock.
 
-    mediametrics::AudioAnalytics mAudioAnalytics; // mAudioAnalytics is locked internally.
+    // mStatsdLog is locked internally (thread-safe) and shows the last atoms logged
+    static constexpr size_t STATSD_LOG_LINES_MAX = 30; // recent log lines to keep
+    static constexpr size_t STATSD_LOG_LINES_DUMP = 4; // normal amount of lines to dump
+    const std::shared_ptr<mediametrics::StatsdLog> mStatsdLog{
+            std::make_shared<mediametrics::StatsdLog>(STATSD_LOG_LINES_MAX)};
+
+    // mAudioAnalytics is locked internally.
+    mediametrics::AudioAnalytics mAudioAnalytics{mStatsdLog};
 
     std::mutex mLock;
     // statistics about our analytics
@@ -130,6 +147,12 @@
     // TODO: Make separate class, use segmented queue, write lock only end.
     // Note: Another analytics module might have ownership of an item longer than the log.
     std::deque<std::shared_ptr<const mediametrics::Item>> mItems GUARDED_BY(mLock);
+
+    // Queues per item key, pending to be pulled by statsd.
+    // Use weak_ptr such that a pullable item can still expire.
+    using ItemKey = std::string;
+    using WeakItemQueue = std::deque<std::weak_ptr<const mediametrics::Item>>;
+    std::unordered_map<ItemKey, WeakItemQueue> mPullableItems GUARDED_BY(mLock);
 };
 
 } // namespace android
diff --git a/services/mediametrics/StatsdLog.h b/services/mediametrics/StatsdLog.h
new file mode 100644
index 0000000..e207bac
--- /dev/null
+++ b/services/mediametrics/StatsdLog.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/SimpleLog.h>
+#include <map>
+#include <mutex>
+#include <sstream>
+
+namespace android::mediametrics {
+
+class StatsdLog {
+public:
+    explicit StatsdLog(size_t lines) : mSimpleLog(lines) {}
+
+    void log(int atom, const std::string& string) {
+        {
+            std::lock_guard lock(mLock);
+            ++mCountMap[atom];
+        }
+        mSimpleLog.log("%s", string.c_str());
+    }
+
+   std::string dumpToString(const char *prefix = "", size_t logLines = 0) const {
+       std::stringstream ss;
+
+       {   // first print out the atom counts
+           std::lock_guard lock(mLock);
+
+           size_t col = 0;
+           for (const auto& count : mCountMap) {
+               if (col == 8) {
+                   col = 0;
+                   ss << "\n" << prefix;
+               } else {
+                   ss << " ";
+               }
+               ss << "[ " << count.first << " : " << count.second << " ]";
+               ++col;
+           }
+           ss << "\n";
+       }
+
+       // then print out the log lines
+       ss << mSimpleLog.dumpToString(prefix, logLines);
+       return ss.str();
+   }
+
+private:
+    SimpleLog mSimpleLog; // internally locked
+    std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
+    mutable std::mutex mLock;
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/StringUtils.h
index 7a8bbee..01034d9 100644
--- a/services/mediametrics/StringUtils.h
+++ b/services/mediametrics/StringUtils.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <iomanip>
+#include <sstream>
 #include <string>
 #include <vector>
 
@@ -68,4 +70,101 @@
  */
 size_t replace(std::string &str, const char *targetChars, const char replaceChar);
 
+// RFC 1421, 2045, 2152, 4648(4), 4880
+inline constexpr char Base64Table[] =
+    // 0000000000111111111122222222223333333333444444444455555555556666
+    // 0123456789012345678901234567890123456789012345678901234567890123
+    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+
+// RFC 4648(5) URL-safe Base64 encoding
+inline constexpr char Base64UrlTable[] =
+    // 0000000000111111111122222222223333333333444444444455555555556666
+    // 0123456789012345678901234567890123456789012345678901234567890123
+    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
+
+// An constexpr struct that transposes/inverts a string conversion table.
+struct Transpose {
+    // constexpr bug, returning char still means -1 == 0xff, so we use unsigned char.
+    using base_char_t = unsigned char;
+    static inline constexpr base_char_t INVALID_CHAR = 0xff;
+
+    template <size_t N>
+    explicit constexpr Transpose(const char(&string)[N]) {
+        for (auto& e : mMap) {
+            e = INVALID_CHAR;
+        }
+        for (size_t i = 0; string[i] != 0; ++i) {
+            mMap[static_cast<size_t>(string[i]) & 0xff] = i;
+        }
+    }
+
+    constexpr base_char_t operator[] (size_t n) const {
+        return n < sizeof(mMap) ? mMap[n] : INVALID_CHAR;
+    }
+
+    constexpr const auto& get() const {
+        return mMap;
+    }
+
+private:
+    base_char_t mMap[256];  // construct an inverse character mapping.
+};
+
+// This table is used to convert an input char to a 6 bit (0 - 63) value.
+// If the input char is not in the Base64Url charset, Transpose::INVALID_CHAR is returned.
+inline constexpr Transpose InverseBase64UrlTable(Base64UrlTable);
+
+// Returns true if s consists of only valid Base64Url characters (no padding chars allowed).
+inline constexpr bool isBase64Url(const char *s) {
+    for (; *s != 0; ++s) {
+        if (InverseBase64UrlTable[(unsigned char)*s] == Transpose::INVALID_CHAR) return false;
+    }
+    return true;
+}
+
+// Returns true if s is a valid log session id: exactly 16 Base64Url characters.
+//
+// logSessionIds are a web-safe Base64Url RFC 4648(5) encoded string of 16 characters
+// (representing 96 unique bits 16 * 6).
+//
+// The string version is considered the reference representation.  However, for ease of
+// manipulation and comparison, it may be converted to an int128.
+//
+// For int128 conversion, some common interpretations exist - for example
+// (1) the 16 Base64 chars can be converted 6 bits per char to a 96 bit value
+// (with the most significant 32 bits as zero) as there are only 12 unique bytes worth of data
+// or (2) the 16 Base64 chars can be used to directly fill the 128 bits of int128 assuming
+// the 16 chars are 16 bytes, filling the layout of the int128 variable.
+// Endianness of the data may follow whatever is convenient in the interpretation as long
+// as it is applied to each such conversion of string to int128 identically.
+//
+inline constexpr bool isLogSessionId(const char *s) {
+    return std::char_traits<std::decay_t<decltype(*s)>>::length(s) == 16 && isBase64Url(s);
+}
+
+// Returns either the original string or an empty string if isLogSessionId check fails.
+inline std::string sanitizeLogSessionId(const std::string& string) {
+    if (isLogSessionId(string.c_str())) return string;
+    return {}; // if not a logSessionId, return an empty string.
+}
+
+inline std::string bytesToString(const std::vector<uint8_t>& bytes, size_t maxSize = SIZE_MAX) {
+    if (bytes.size() == 0) {
+        return "{}";
+    }
+    std::stringstream ss;
+    ss << "{";
+    ss << std::hex << std::setfill('0');
+    maxSize = std::min(maxSize, bytes.size());
+    for (size_t i = 0; i < maxSize; ++i) {
+        ss << " " << std::setw(2) << (int)bytes[i];
+    }
+    if (maxSize != bytes.size()) {
+        ss << " ... }";
+    } else {
+        ss << " }";
+    }
+    return ss.str();
+}
+
 } // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index d75ded2..b03e518 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -50,7 +50,10 @@
         "libmemunreachable",
         "libprotobuf-cpp-lite",
         "libstagefright",
+        "libstagefright_foundation",
         "libstatslog",
+        "libstatspull",
+        "libstatssocket",
         "libutils",
         "mediametricsservice-aidl-cpp",
     ],
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 0cb2594..8b0b479 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -320,7 +320,9 @@
 
 void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
     FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    android::mediametrics::AudioAnalytics audioAnalytics;
+    std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
+            std::make_shared<android::mediametrics::StatsdLog>(10);
+    android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
     while (fdp.remaining_bytes()) {
         auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 16204de..776f878 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -27,7 +27,10 @@
 #include <pthread.h>
 #include <unistd.h>
 
+#include <map>
 #include <memory>
+#include <string>
+#include <vector>
 #include <string.h>
 #include <pwd.h>
 
@@ -45,33 +48,12 @@
 // has its own routine to handle this.
 //
 
-bool enabled_statsd = true;
+static bool enabled_statsd = true;
 
-struct statsd_hooks {
-    const char *key;
-    bool (*handler)(const mediametrics::Item *);
-};
-
-// keep this sorted, so we can do binary searches
-static constexpr struct statsd_hooks statsd_handlers[] =
-{
-    { "audiopolicy", statsd_audiopolicy },
-    { "audiorecord", statsd_audiorecord },
-    { "audiothread", statsd_audiothread },
-    { "audiotrack", statsd_audiotrack },
-    { "codec", statsd_codec},
-    { "drm.vendor.Google.WidevineCDM", statsd_widevineCDM },
-    { "drmmanager", statsd_drmmanager },
-    { "extractor", statsd_extractor },
-    { "mediadrm", statsd_mediadrm },
-    { "mediaparser", statsd_mediaparser },
-    { "nuplayer", statsd_nuplayer },
-    { "nuplayer2", statsd_nuplayer },
-    { "recorder", statsd_recorder },
-};
-
-// give me a record, i'll look at the type and upload appropriately
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item) {
+namespace {
+template<typename Handler, typename... Args>
+bool dump2StatsdInternal(const std::map<std::string, Handler>& handlers,
+        const std::shared_ptr<const mediametrics::Item>& item, Args... args) {
     if (item == nullptr) return false;
 
     // get the key
@@ -82,12 +64,42 @@
         return false;
     }
 
-    for (const auto &statsd_handler : statsd_handlers) {
-        if (key == statsd_handler.key) {
-            return statsd_handler.handler(item.get());
-        }
+    if (handlers.count(key)) {
+        return (handlers.at(key))(item, args...);
     }
     return false;
 }
+} // namespace
+
+// give me a record, I'll look at the type and upload appropriately
+bool dump2Statsd(
+        const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_pusher*> statsd_pushers =
+    {
+        { "audiopolicy", statsd_audiopolicy },
+        { "audiorecord", statsd_audiorecord },
+        { "audiothread", statsd_audiothread },
+        { "audiotrack", statsd_audiotrack },
+        { "codec", statsd_codec},
+        { "drmmanager", statsd_drmmanager },
+        { "extractor", statsd_extractor },
+        { "mediadrm", statsd_mediadrm },
+        { "mediaparser", statsd_mediaparser },
+        { "nuplayer", statsd_nuplayer },
+        { "nuplayer2", statsd_nuplayer },
+        { "recorder", statsd_recorder },
+    };
+    return dump2StatsdInternal(statsd_pushers, item, statsdLog);
+}
+
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_puller*> statsd_pullers =
+    {
+        { "mediadrm", statsd_mediadrm_puller },
+    };
+    return dump2StatsdInternal(statsd_pullers, item, out, statsdLog);
+}
 
 } // namespace android
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/iface_statsd.h
index 9b49556..c2a8b3c 100644
--- a/services/mediametrics/iface_statsd.h
+++ b/services/mediametrics/iface_statsd.h
@@ -14,23 +14,37 @@
  * limitations under the License.
  */
 
+#include <memory>
+#include <stats_event.h>
+
 namespace android {
+namespace mediametrics {
+class Item;
+}
 
-extern bool enabled_statsd;
-
+using statsd_pusher = bool (const std::shared_ptr<const mediametrics::Item>& item,
+         const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 // component specific dumpers
-extern bool statsd_audiopolicy(const mediametrics::Item *);
-extern bool statsd_audiorecord(const mediametrics::Item *);
-extern bool statsd_audiothread(const mediametrics::Item *);
-extern bool statsd_audiotrack(const mediametrics::Item *);
-extern bool statsd_codec(const mediametrics::Item *);
-extern bool statsd_extractor(const mediametrics::Item *);
-extern bool statsd_mediaparser(const mediametrics::Item *);
-extern bool statsd_nuplayer(const mediametrics::Item *);
-extern bool statsd_recorder(const mediametrics::Item *);
+extern statsd_pusher statsd_audiopolicy;
+extern statsd_pusher statsd_audiorecord;
+extern statsd_pusher statsd_audiothread;
+extern statsd_pusher statsd_audiotrack;
+extern statsd_pusher statsd_codec;
+extern statsd_pusher statsd_extractor;
+extern statsd_pusher statsd_mediaparser;
 
-extern bool statsd_mediadrm(const mediametrics::Item *);
-extern bool statsd_widevineCDM(const mediametrics::Item *);
-extern bool statsd_drmmanager(const mediametrics::Item *);
+extern statsd_pusher statsd_nuplayer;
+extern statsd_pusher statsd_recorder;
+extern statsd_pusher statsd_mediadrm;
+extern statsd_pusher statsd_drmmanager;
 
+using statsd_puller = bool (const std::shared_ptr<const mediametrics::Item>& item,
+        AStatsEventList *, const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
+// component specific pullers
+extern statsd_puller statsd_mediadrm_puller;
+
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 } // namespace android
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 6ef2f2c..f44b7c4 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_audiopolicy(const mediametrics::Item *item)
+bool statsd_audiopolicy(const std::shared_ptr<const mediametrics::Item>& item,
+       const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -60,35 +60,35 @@
         metrics_proto.set_status(status);
     }
     //string char kAudioPolicyRqstSrc[] = "android.media.audiopolicy.rqst.src";
-    std::string rqst_src;
-    if (item->getString("android.media.audiopolicy.rqst.src", &rqst_src)) {
-        metrics_proto.set_request_source(std::move(rqst_src));
+    std::string request_source;
+    if (item->getString("android.media.audiopolicy.rqst.src", &request_source)) {
+        metrics_proto.set_request_source(request_source);
     }
     //string char kAudioPolicyRqstPkg[] = "android.media.audiopolicy.rqst.pkg";
-    std::string rqst_pkg;
-    if (item->getString("android.media.audiopolicy.rqst.pkg", &rqst_pkg)) {
-        metrics_proto.set_request_package(std::move(rqst_pkg));
+    std::string request_package;
+    if (item->getString("android.media.audiopolicy.rqst.pkg", &request_package)) {
+        metrics_proto.set_request_package(request_package);
     }
     //int32 char kAudioPolicyRqstSession[] = "android.media.audiopolicy.rqst.session";
-    int32_t rqst_session = -1;
-    if (item->getInt32("android.media.audiopolicy.rqst.session", &rqst_session)) {
-        metrics_proto.set_request_session(rqst_session);
+    int32_t request_session = -1;
+    if (item->getInt32("android.media.audiopolicy.rqst.session", &request_session)) {
+        metrics_proto.set_request_session(request_session);
     }
     //string char kAudioPolicyRqstDevice[] = "android.media.audiopolicy.rqst.device";
-    std::string rqst_device;
-    if (item->getString("android.media.audiopolicy.rqst.device", &rqst_device)) {
-        metrics_proto.set_request_device(std::move(rqst_device));
+    std::string request_device;
+    if (item->getString("android.media.audiopolicy.rqst.device", &request_device)) {
+        metrics_proto.set_request_device(request_device);
     }
 
     //string char kAudioPolicyActiveSrc[] = "android.media.audiopolicy.active.src";
-    std::string active_src;
-    if (item->getString("android.media.audiopolicy.active.src", &active_src)) {
-        metrics_proto.set_active_source(std::move(active_src));
+    std::string active_source;
+    if (item->getString("android.media.audiopolicy.active.src", &active_source)) {
+        metrics_proto.set_active_source(active_source);
     }
     //string char kAudioPolicyActivePkg[] = "android.media.audiopolicy.active.pkg";
-    std::string active_pkg;
-    if (item->getString("android.media.audiopolicy.active.pkg", &active_pkg)) {
-        metrics_proto.set_active_package(std::move(active_pkg));
+    std::string active_package;
+    if (item->getString("android.media.audiopolicy.active.pkg", &active_package)) {
+        metrics_proto.set_active_package(active_package);
     }
     //int32 char kAudioPolicyActiveSession[] = "android.media.audiopolicy.active.session";
     int32_t active_session = -1;
@@ -98,27 +98,40 @@
     //string char kAudioPolicyActiveDevice[] = "android.media.audiopolicy.active.device";
     std::string active_device;
     if (item->getString("android.media.audiopolicy.active.device", &active_device)) {
-        metrics_proto.set_active_device(std::move(active_device));
+        metrics_proto.set_active_device(active_device);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize audipolicy metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiopolicy_reported:"
+            << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
-
+            << " status:" << status
+            << " request_source:" << request_source
+            << " request_package:" << request_package
+            << " request_session:" << request_session
+            << " request_device:" << request_device
+            << " active_source:" << active_source
+            << " active_package:" << active_package
+            << " active_session:" << active_session
+            << " active_device:" << active_device
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 76f4b59..70a67ae 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -32,21 +32,21 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_audiorecord(const mediametrics::Item *item)
-{
+bool statsd_audiorecord(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -56,12 +56,12 @@
     //
     std::string encoding;
     if (item->getString("android.media.audiorecord.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
 
     std::string source;
     if (item->getString("android.media.audiorecord.source", &source)) {
-        metrics_proto.set_source(std::move(source));
+        metrics_proto.set_source(source);
     }
 
     int32_t latency = -1;
@@ -79,14 +79,14 @@
         metrics_proto.set_channels(channels);
     }
 
-    int64_t createdMs = -1;
-    if (item->getInt64("android.media.audiorecord.createdMs", &createdMs)) {
-        metrics_proto.set_created_millis(createdMs);
+    int64_t created_millis = -1;
+    if (item->getInt64("android.media.audiorecord.createdMs", &created_millis)) {
+        metrics_proto.set_created_millis(created_millis);
     }
 
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.audiorecord.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.audiorecord.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
     int32_t count = -1;
@@ -94,46 +94,44 @@
         metrics_proto.set_count(count);
     }
 
-    int32_t errcode = -1;
-    if (item->getInt32("android.media.audiorecord.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
-    } else if (item->getInt32("android.media.audiorecord.lastError.code", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+    int32_t error_code = -1;
+    if (item->getInt32("android.media.audiorecord.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
+    } else if (item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
 
-    std::string errfunc;
-    if (item->getString("android.media.audiorecord.errfunc", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
-    } else if (item->getString("android.media.audiorecord.lastError.at", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
+    std::string error_function;
+    if (item->getString("android.media.audiorecord.errfunc", &error_function)) {
+        metrics_proto.set_error_function(error_function);
+    } else if (item->getString("android.media.audiorecord.lastError.at", &error_function)) {
+        metrics_proto.set_error_function(error_function);
     }
 
-    // portId (int32)
     int32_t port_id = -1;
     if (item->getInt32("android.media.audiorecord.portId", &port_id)) {
         metrics_proto.set_port_id(count);
     }
-    // frameCount (int32)
-    int32_t frameCount = -1;
-    if (item->getInt32("android.media.audiorecord.frameCount", &frameCount)) {
-        metrics_proto.set_frame_count(frameCount);
-    }
-    // attributes (string)
-    std::string attributes;
-    if (item->getString("android.media.audiorecord.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
-    }
-    // channelMask (int64)
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiorecord.channelMask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
-    }
-    // startcount (int64)
-    int64_t startcount = -1;
-    if (item->getInt64("android.media.audiorecord.startcount", &startcount)) {
-        metrics_proto.set_start_count(startcount);
+
+    int32_t frame_count = -1;
+    if (item->getInt32("android.media.audiorecord.frameCount", &frame_count)) {
+        metrics_proto.set_frame_count(frame_count);
     }
 
+    std::string attributes;
+    if (item->getString("android.media.audiorecord.attributes", &attributes)) {
+        metrics_proto.set_attributes(attributes);
+    }
+
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiorecord.channelMask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
+    }
+
+    int64_t start_count = -1;
+    if (item->getInt64("android.media.audiorecord.startcount", &start_count)) {
+        metrics_proto.set_start_count(start_count);
+    }
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
@@ -141,17 +139,48 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    // Android S
+    // log_session_id (string)
+    std::string logSessionId;
+    (void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
+    const auto log_session_id =
+            mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized,
+        log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiorecord_reported:"
+            << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " encoding:" << encoding
+            << " source:" << source
+            << " latency:" << latency
+            << " samplerate:" << samplerate
+            << " channels:" << channels
+            << " created_millis:" << created_millis
+            << " duration_millis:" << duration_millis
+            << " count:" << count
+            << " error_code:" << error_code
+            << " error_function:" << error_function
+
+            << " port_id:" << port_id
+            << " frame_count:" << frame_count
+            << " attributes:" << attributes
+            << " channel_mask:" << channel_mask
+            << " start_count:" << start_count
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index 2ad2562..34cc923 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_audiothread(const mediametrics::Item *item)
+bool statsd_audiothread(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -68,17 +68,17 @@
     if (item->getInt32(MM_PREFIX "samplerate", &samplerate)) {
         metrics_proto.set_samplerate(samplerate);
     }
-    std::string workhist;
-    if (item->getString(MM_PREFIX "workMs.hist", &workhist)) {
-        metrics_proto.set_work_millis_hist(std::move(workhist));
+    std::string work_millis_hist;
+    if (item->getString(MM_PREFIX "workMs.hist", &work_millis_hist)) {
+        metrics_proto.set_work_millis_hist(work_millis_hist);
     }
-    std::string latencyhist;
-    if (item->getString(MM_PREFIX "latencyMs.hist", &latencyhist)) {
-        metrics_proto.set_latency_millis_hist(std::move(latencyhist));
+    std::string latency_millis_hist;
+    if (item->getString(MM_PREFIX "latencyMs.hist", &latency_millis_hist)) {
+        metrics_proto.set_latency_millis_hist(latency_millis_hist);
     }
-    std::string warmuphist;
-    if (item->getString(MM_PREFIX "warmupMs.hist", &warmuphist)) {
-        metrics_proto.set_warmup_millis_hist(std::move(warmuphist));
+    std::string warmup_millis_hist;
+    if (item->getString(MM_PREFIX "warmupMs.hist", &warmup_millis_hist)) {
+        metrics_proto.set_warmup_millis_hist(warmup_millis_hist);
     }
     int64_t underruns = -1;
     if (item->getInt64(MM_PREFIX "underruns", &underruns)) {
@@ -88,101 +88,99 @@
     if (item->getInt64(MM_PREFIX "overruns", &overruns)) {
         metrics_proto.set_overruns(overruns);
     }
-    int64_t activeMs = -1;
-    if (item->getInt64(MM_PREFIX "activeMs", &activeMs)) {
-        metrics_proto.set_active_millis(activeMs);
+    int64_t active_millis = -1;
+    if (item->getInt64(MM_PREFIX "activeMs", &active_millis)) {
+        metrics_proto.set_active_millis(active_millis);
     }
-    int64_t durationMs = -1;
-    if (item->getInt64(MM_PREFIX "durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64(MM_PREFIX "durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
-    // item->setInt32(MM_PREFIX "id", (int32_t)mId); // IO handle
     int32_t id = -1;
     if (item->getInt32(MM_PREFIX "id", &id)) {
         metrics_proto.set_id(id);
     }
-    // item->setInt32(MM_PREFIX "portId", (int32_t)mPortId);
+
     int32_t port_id = -1;
-    if (item->getInt32(MM_PREFIX "portId", &id)) {
+    if (item->getInt32(MM_PREFIX "portId", &port_id)) {
         metrics_proto.set_port_id(port_id);
     }
     // item->setCString(MM_PREFIX "type", threadTypeToString(mType));
     std::string type;
     if (item->getString(MM_PREFIX "type", &type)) {
-        metrics_proto.set_type(std::move(type));
+        metrics_proto.set_type(type);
     }
-    // item->setInt32(MM_PREFIX "sampleRate", (int32_t)mSampleRate);
+
     int32_t sample_rate = -1;
     if (item->getInt32(MM_PREFIX "sampleRate", &sample_rate)) {
         metrics_proto.set_sample_rate(sample_rate);
     }
-    // item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
+
     int32_t channel_mask = -1;
     if (item->getInt32(MM_PREFIX "channelMask", &channel_mask)) {
         metrics_proto.set_channel_mask(channel_mask);
     }
-    // item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
+
     std::string encoding;
     if (item->getString(MM_PREFIX "encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
-    // item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
+
     int32_t frame_count = -1;
     if (item->getInt32(MM_PREFIX "frameCount", &frame_count)) {
         metrics_proto.set_frame_count(frame_count);
     }
-    // item->setCString(MM_PREFIX "outDevice", toString(mOutDevice).c_str());
-    std::string outDevice;
-    if (item->getString(MM_PREFIX "outDevice", &outDevice)) {
-        metrics_proto.set_output_device(std::move(outDevice));
-    }
-    // item->setCString(MM_PREFIX "inDevice", toString(mInDevice).c_str());
-    std::string inDevice;
-    if (item->getString(MM_PREFIX "inDevice", &inDevice)) {
-        metrics_proto.set_input_device(std::move(inDevice));
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.mean", mIoJitterMs.getMean());
-    double iojitters_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &iojitters_ms_mean)) {
-        metrics_proto.set_io_jitter_mean_millis(iojitters_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.std", mIoJitterMs.getStdDev());
-    double iojitters_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &iojitters_ms_std)) {
-        metrics_proto.set_io_jitter_stddev_millis(iojitters_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.mean", mProcessTimeMs.getMean());
-    double process_time_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_ms_mean)) {
-        metrics_proto.set_process_time_mean_millis(process_time_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.std", mProcessTimeMs.getStdDev());
-    double process_time_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_ms_std)) {
-        metrics_proto.set_process_time_stddev_millis(process_time_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.mean", tsjitter.getMean());
-    double timestamp_jitter_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_ms_mean)) {
-        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.std", tsjitter.getStdDev());
-    double timestamp_jitter_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_ms_stddev)) {
-        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_ms_stddev);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.mean", mLatencyMs.getMean());
-    double latency_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_ms_mean)) {
-        metrics_proto.set_latency_mean_millis(latency_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.std", mLatencyMs.getStdDev());
-    double latency_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_ms_stddev)) {
-        metrics_proto.set_latency_stddev_millis(latency_ms_stddev);
+
+    std::string output_device;
+    if (item->getString(MM_PREFIX "outDevice", &output_device)) {
+        metrics_proto.set_output_device(output_device);
     }
 
+    std::string input_device;
+    if (item->getString(MM_PREFIX "inDevice", &input_device)) {
+        metrics_proto.set_input_device(input_device);
+    }
+
+    double io_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &io_jitter_mean_millis)) {
+        metrics_proto.set_io_jitter_mean_millis(io_jitter_mean_millis);
+    }
+
+    double io_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &io_jitter_stddev_millis)) {
+        metrics_proto.set_io_jitter_stddev_millis(io_jitter_stddev_millis);
+    }
+
+    double process_time_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_mean_millis)) {
+        metrics_proto.set_process_time_mean_millis(process_time_mean_millis);
+    }
+
+    double process_time_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_stddev_millis)) {
+        metrics_proto.set_process_time_stddev_millis(process_time_stddev_millis);
+    }
+
+    double timestamp_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_mean_millis)) {
+        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_mean_millis);
+    }
+
+    double timestamp_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_stddev_millis)) {
+        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_stddev_millis);
+    }
+
+    double latency_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_mean_millis)) {
+        metrics_proto.set_latency_mean_millis(latency_mean_millis);
+    }
+
+    double latency_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_stddev_millis)) {
+        metrics_proto.set_latency_stddev_millis(latency_stddev_millis);
+    }
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
@@ -190,17 +188,50 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiothread_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " type:" << type
+            << " framecount:" << framecount
+            << " samplerate:" << samplerate
+            << " work_millis_hist:" << work_millis_hist
+            << " latency_millis_hist:" << latency_millis_hist
+            << " warmup_millis_hist:" << warmup_millis_hist
+            << " underruns:" << underruns
+            << " overruns:" << overruns
+            << " active_millis:" << active_millis
+            << " duration_millis:" << duration_millis
 
+            << " id:" << id
+            << " port_id:" << port_id
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
+            << " output_device:" << output_device
+            << " input_device:" << input_device
+            << " io_jitter_mean_millis:" << io_jitter_mean_millis
+            << " io_jitter_stddev_millis:" << io_jitter_stddev_millis
+
+            << " process_time_mean_millis:" << process_time_mean_millis
+            << " process_time_stddev_millis:" << process_time_stddev_millis
+            << " timestamp_jitter_mean_millis:" << timestamp_jitter_mean_millis
+            << " timestamp_jitter_stddev_millis:" << timestamp_jitter_stddev_millis
+            << " latency_mean_millis:" << latency_mean_millis
+            << " latency_stddev_millis:" << latency_stddev_millis
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 6b08a78..fe269a1 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -32,21 +32,22 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_audiotrack(const mediametrics::Item *item)
+bool statsd_audiotrack(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -57,52 +58,52 @@
 
     // static constexpr char kAudioTrackStreamType[] = "android.media.audiotrack.streamtype";
     // optional string streamType;
-    std::string streamtype;
-    if (item->getString("android.media.audiotrack.streamtype", &streamtype)) {
-        metrics_proto.set_stream_type(std::move(streamtype));
+    std::string stream_type;
+    if (item->getString("android.media.audiotrack.streamtype", &stream_type)) {
+        metrics_proto.set_stream_type(stream_type);
     }
 
     // static constexpr char kAudioTrackContentType[] = "android.media.audiotrack.type";
     // optional string contentType;
-    std::string contenttype;
-    if (item->getString("android.media.audiotrack.type", &contenttype)) {
-        metrics_proto.set_content_type(std::move(contenttype));
+    std::string content_type;
+    if (item->getString("android.media.audiotrack.type", &content_type)) {
+        metrics_proto.set_content_type(content_type);
     }
 
     // static constexpr char kAudioTrackUsage[] = "android.media.audiotrack.usage";
     // optional string trackUsage;
-    std::string trackusage;
-    if (item->getString("android.media.audiotrack.usage", &trackusage)) {
-        metrics_proto.set_track_usage(std::move(trackusage));
+    std::string track_usage;
+    if (item->getString("android.media.audiotrack.usage", &track_usage)) {
+        metrics_proto.set_track_usage(track_usage);
     }
 
     // static constexpr char kAudioTrackSampleRate[] = "android.media.audiotrack.samplerate";
     // optional int32 samplerate;
-    int32_t samplerate = -1;
-    if (item->getInt32("android.media.audiotrack.samplerate", &samplerate)) {
-        metrics_proto.set_sample_rate(samplerate);
+    int32_t sample_rate = -1;
+    if (item->getInt32("android.media.audiotrack.samplerate", &sample_rate)) {
+        metrics_proto.set_sample_rate(sample_rate);
     }
 
     // static constexpr char kAudioTrackChannelMask[] = "android.media.audiotrack.channelmask";
     // optional int64 channelMask;
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiotrack.channelmask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiotrack.channelmask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
     }
 
     // NB: These are not yet exposed as public Java API constants.
     // static constexpr char kAudioTrackUnderrunFrames[] = "android.media.audiotrack.underrunframes";
     // optional int32 underrunframes;
-    int32_t underrunframes = -1;
-    if (item->getInt32("android.media.audiotrack.underrunframes", &underrunframes)) {
-        metrics_proto.set_underrun_frames(underrunframes);
+    int32_t underrun_frames = -1;
+    if (item->getInt32("android.media.audiotrack.underrunframes", &underrun_frames)) {
+        metrics_proto.set_underrun_frames(underrun_frames);
     }
 
     // static constexpr char kAudioTrackStartupGlitch[] = "android.media.audiotrack.glitch.startup";
     // optional int32 startupglitch;
-    int32_t startupglitch = -1;
-    if (item->getInt32("android.media.audiotrack.glitch.startup", &startupglitch)) {
-        metrics_proto.set_startup_glitch(startupglitch);
+    int32_t startup_glitch = -1;
+    if (item->getInt32("android.media.audiotrack.glitch.startup", &startup_glitch)) {
+        metrics_proto.set_startup_glitch(startup_glitch);
     }
 
     // portId (int32)
@@ -113,7 +114,7 @@
     // encoding (string)
     std::string encoding;
     if (item->getString("android.media.audiotrack.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
     // frameCount (int32)
     int32_t frame_count = -1;
@@ -123,7 +124,7 @@
     // attributes (string)
     std::string attributes;
     if (item->getString("android.media.audiotrack.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
+        metrics_proto.set_attributes(attributes);
     }
 
     std::string serialized;
@@ -132,17 +133,44 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    // Android S
+    // log_session_id (string)
+    std::string logSessionId;
+    (void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
+    const auto log_session_id =
+            mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized,
+                               log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiotrack_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " stream_type:" << stream_type
+            << " content_type:" << content_type
+            << " track_usage:" << track_usage
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " underrun_frames:" << underrun_frames
+            << " startup_glitch:" << startup_glitch
+            << " port_id:" << port_id
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
+
+            << " attributes:" << attributes
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index d502b30..8a2158f 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -38,16 +38,16 @@
 
 namespace android {
 
-bool statsd_codec(const mediametrics::Item *item)
+bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,39 +58,39 @@
     // android.media.mediacodec.codec   string
     std::string codec;
     if (item->getString("android.media.mediacodec.codec", &codec)) {
-        metrics_proto.set_codec(std::move(codec));
+        metrics_proto.set_codec(codec);
     }
-    // android.media.mediacodec.mime    string
+
     std::string mime;
     if (item->getString("android.media.mediacodec.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
+        metrics_proto.set_mime(mime);
     }
-    // android.media.mediacodec.mode    string
+
     std::string mode;
     if ( item->getString("android.media.mediacodec.mode", &mode)) {
-        metrics_proto.set_mode(std::move(mode));
+        metrics_proto.set_mode(mode);
     }
-    // android.media.mediacodec.encoder int32
+
     int32_t encoder = -1;
     if ( item->getInt32("android.media.mediacodec.encoder", &encoder)) {
         metrics_proto.set_encoder(encoder);
     }
-    // android.media.mediacodec.secure  int32
+
     int32_t secure = -1;
     if ( item->getInt32("android.media.mediacodec.secure", &secure)) {
         metrics_proto.set_secure(secure);
     }
-    // android.media.mediacodec.width   int32
+
     int32_t width = -1;
     if ( item->getInt32("android.media.mediacodec.width", &width)) {
         metrics_proto.set_width(width);
     }
-    // android.media.mediacodec.height  int32
+
     int32_t height = -1;
     if ( item->getInt32("android.media.mediacodec.height", &height)) {
         metrics_proto.set_height(height);
     }
-    // android.media.mediacodec.rotation-degrees        int32
+
     int32_t rotation = -1;
     if ( item->getInt32("android.media.mediacodec.rotation-degrees", &rotation)) {
         metrics_proto.set_rotation(rotation);
@@ -100,109 +100,151 @@
     if ( item->getInt32("android.media.mediacodec.crypto", &crypto)) {
         metrics_proto.set_crypto(crypto);
     }
-    // android.media.mediacodec.profile int32
+
     int32_t profile = -1;
     if ( item->getInt32("android.media.mediacodec.profile", &profile)) {
         metrics_proto.set_profile(profile);
     }
-    // android.media.mediacodec.level   int32
+
     int32_t level = -1;
     if ( item->getInt32("android.media.mediacodec.level", &level)) {
         metrics_proto.set_level(level);
     }
-    // android.media.mediacodec.maxwidth        int32
-    int32_t maxwidth = -1;
-    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxwidth)) {
-        metrics_proto.set_max_width(maxwidth);
+
+    int32_t max_width = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
+        metrics_proto.set_max_width(max_width);
     }
-    // android.media.mediacodec.maxheight       int32
-    int32_t maxheight = -1;
-    if ( item->getInt32("android.media.mediacodec.maxheight", &maxheight)) {
-        metrics_proto.set_max_height(maxheight);
+
+    int32_t max_height = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
+        metrics_proto.set_max_height(max_height);
     }
-    // android.media.mediacodec.errcode         int32
-    int32_t errcode = -1;
-    if ( item->getInt32("android.media.mediacodec.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+
+    int32_t error_code = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
-    // android.media.mediacodec.errstate        string
-    std::string errstate;
-    if ( item->getString("android.media.mediacodec.errstate", &errstate)) {
-        metrics_proto.set_error_state(std::move(errstate));
+
+    std::string error_state;
+    if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
+        metrics_proto.set_error_state(error_state);
     }
-    // android.media.mediacodec.latency.max  int64
+
     int64_t latency_max = -1;
     if ( item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
         metrics_proto.set_latency_max(latency_max);
     }
-    // android.media.mediacodec.latency.min  int64
+
     int64_t latency_min = -1;
     if ( item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
         metrics_proto.set_latency_min(latency_min);
     }
-    // android.media.mediacodec.latency.avg  int64
+
     int64_t latency_avg = -1;
     if ( item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
         metrics_proto.set_latency_avg(latency_avg);
     }
-    // android.media.mediacodec.latency.n    int64
+
     int64_t latency_count = -1;
     if ( item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
         metrics_proto.set_latency_count(latency_count);
     }
-    // android.media.mediacodec.latency.unknown    int64
+
     int64_t latency_unknown = -1;
     if ( item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
         metrics_proto.set_latency_unknown(latency_unknown);
     }
-    // android.media.mediacodec.queueSecureInputBufferError  int32
-    if (int32_t queueSecureInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
-                &queueSecureInputBufferError)) {
-        metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
+
+    int32_t queue_secure_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
+                &queue_secure_input_buffer_error)) {
+        metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
     }
-    // android.media.mediacodec.queueInputBufferError  int32
-    if (int32_t queueInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueInputBufferError",
-                &queueInputBufferError)) {
-        metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
+
+    int32_t queue_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueInputBufferError",
+                &queue_input_buffer_error)) {
+        metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
     }
     // android.media.mediacodec.latency.hist    NOT EMITTED
 
-    // android.media.mediacodec.bitrate_mode string
     std::string bitrate_mode;
     if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
-        metrics_proto.set_bitrate_mode(std::move(bitrate_mode));
+        metrics_proto.set_bitrate_mode(bitrate_mode);
     }
-    // android.media.mediacodec.bitrate int32
+
     int32_t bitrate = -1;
     if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
         metrics_proto.set_bitrate(bitrate);
     }
-    // android.media.mediacodec.lifetimeMs int64
-    int64_t lifetimeMs = -1;
-    if ( item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMs)) {
-        lifetimeMs = mediametrics::bucket_time_minutes(lifetimeMs);
-        metrics_proto.set_lifetime_millis(lifetimeMs);
+
+    int64_t lifetime_millis = -1;
+    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
+        lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
+        metrics_proto.set_lifetime_millis(lifetime_millis);
     }
 
+    // new for S; need to plumb through to westworld
+    // android.media.mediacodec.channelCount int32
+    // android.media.mediacodec.sampleRate int32
+
+    // new for S; need to plumb through to westworld
+    // TODO PWG may want these fuzzed up a bit to obscure some precision
+    // android.media.mediacodec.vencode.bytes int64
+    // android.media.mediacodec.vencode.frames int64
+    // android.media.mediacodec.vencode.durationUs int64
+
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_codec_reported:"
+            << android::util::MEDIAMETRICS_CODEC_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+            << " codec:" << codec
+            << " mime:" << mime
+            << " mode:" << mode
+            << " encoder:" << encoder
+            << " secure:" << secure
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " crypto:" << crypto
+            << " profile:" << profile
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " level:" << level
+            << " max_width:" << max_width
+            << " max_height:" << max_height
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " latency_max:" << latency_max
+            << " latency_min:" << latency_min
+            << " latency_avg:" << latency_avg
+            << " latency_count:" << latency_count
+            << " latency_unknown:" << latency_unknown
 
+            << " queue_input_buffer_error:" << queue_input_buffer_error
+            << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
+            << " bitrate_mode:" << bitrate_mode
+            << " bitrate:" << bitrate
+            << " lifetime_millis:" << lifetime_millis
+            // TODO: add when log_session_id is merged.
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index ac58929..27fd089 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "statsd_drm"
 #include <utils/Log.h>
+#include <media/stagefright/foundation/base64.h>
 
 #include <stdint.h>
 #include <inttypes.h>
@@ -31,90 +32,72 @@
 #include <pwd.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "iface_statsd.h"
 
 #include <statslog.h>
 
 #include <array>
 #include <string>
+#include <vector>
 
 namespace android {
 
 // mediadrm
-bool statsd_mediadrm(const mediametrics::Item *item)
+bool statsd_mediadrm(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string vendor;
     (void) item->getString("vendor", &vendor);
     std::string description;
     (void) item->getString("description", &description);
-    std::string serialized_metrics;
-    (void) item->getString("serialized_metrics", &serialized_metrics);
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized(serialized_metrics.c_str(),
-                                                serialized_metrics.size());
-        android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   vendor.c_str(),
-                                   description.c_str(),
-                                   bf_serialized);
-    } else {
-        ALOGV("NOT sending: mediadrm private data (len=%zu)", serialized_metrics.size());
-    }
+    // This field is left here for backward compatibility.
+    // This field is not used anymore.
+    const std::string  kUnusedField("unused");
+    android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        vendor.c_str(),
+        description.c_str(),
+        bf_serialized);
 
-    return true;
-}
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_mediadrm_reported:"
+            << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-// widevineCDM
-bool statsd_widevineCDM(const mediametrics::Item *item)
-{
-    if (item == nullptr) return false;
-
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
-    std::string serialized_metrics;
-    (void) item->getString("serialized_metrics", &serialized_metrics);
-
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized(serialized_metrics.c_str(),
-                                                serialized_metrics.size());
-        android::util::stats_write(android::util::MEDIAMETRICS_DRM_WIDEVINE_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
-    } else {
-        ALOGV("NOT sending: widevine private data (len=%zu)", serialized_metrics.size());
-    }
-
+            << " vendor:" << vendor
+            << " description:" << description
+            // omitting serialized
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
     return true;
 }
 
 // drmmanager
-bool statsd_drmmanager(const mediametrics::Item *item)
+bool statsd_drmmanager(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     using namespace std::string_literals;
     if (item == nullptr) return false;
 
-    if (!enabled_statsd) {
-        ALOGV("NOT sending: drmmanager data");
-        return true;
-    }
-
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string plugin_id;
     (void) item->getString("plugin_id", &plugin_id);
@@ -132,8 +115,9 @@
         item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
     }
 
-    android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
-                               timestamp, pkgName.c_str(), pkgVersionCode, mediaApexVersion,
+    const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
                                plugin_id.c_str(), description.c_str(),
                                method_id, mime_types.c_str(),
                                methodCounts[0], methodCounts[1], methodCounts[2],
@@ -142,6 +126,96 @@
                                methodCounts[9], methodCounts[10], methodCounts[11],
                                methodCounts[12]);
 
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_drmmanager_reported:"
+            << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " plugin_id:" << plugin_id
+            << " description:" << description
+            << " method_id:" << method_id
+            << " mime_types:" << mime_types;
+
+    for (size_t i = 0; i < methodCounts.size(); ++i) {
+        log << " method_" << i << ":" << methodCounts[i];
+    }
+    log << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
+    return true;
+}
+
+namespace {
+std::vector<uint8_t> base64DecodeNoPad(std::string& str) {
+    if (str.empty()) {
+        return {};
+    }
+
+    switch (str.length() % 4) {
+    case 3: str += "="; break;
+    case 2: str += "=="; break;
+    case 1: str += "==="; break;
+    case 0: /* unchanged */ break;
+    }
+
+    std::vector<uint8_t> buf(str.length() / 4 * 3, 0);
+    size_t size = buf.size();
+    if (decodeBase64(buf.data(), &size, str.c_str()) && size <= buf.size()) {
+        buf.erase(buf.begin() + size, buf.end());
+        return buf;
+    }
+    return {};
+}
+} // namespace
+
+// |out| and its contents are memory-managed by statsd.
+bool statsd_mediadrm_puller(
+        const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    if (item == nullptr) {
+        return false;
+    }
+
+    std::string serialized_metrics;
+    (void) item->getString("serialized_metrics", &serialized_metrics);
+    const auto framework_raw(base64DecodeNoPad(serialized_metrics));
+
+    std::string plugin_metrics;
+    (void) item->getString("plugin_metrics", &plugin_metrics);
+    const auto plugin_raw(base64DecodeNoPad(plugin_metrics));
+
+    std::string vendor;
+    (void) item->getString("vendor", &vendor);
+    std::string description;
+    (void) item->getString("description", &description);
+
+    // Memory for |event| is internally managed by statsd.
+    AStatsEvent* event = AStatsEventList_addStatsEvent(out);
+    AStatsEvent_setAtomId(event, android::util::MEDIA_DRM_ACTIVITY_INFO);
+    AStatsEvent_writeString(event, item->getPkgName().c_str());
+    AStatsEvent_writeInt64(event, item->getPkgVersionCode());
+    AStatsEvent_writeString(event, vendor.c_str());
+    AStatsEvent_writeString(event, description.c_str());
+    AStatsEvent_writeByteArray(event, framework_raw.data(), framework_raw.size());
+    AStatsEvent_writeByteArray(event, plugin_raw.data(), plugin_raw.size());
+    AStatsEvent_build(event);
+
+    std::stringstream log;
+    log << "pulled:" << " {"
+            << " media_drm_activity_info:"
+            << android::util::MEDIA_DRM_ACTIVITY_INFO
+            << " package_name:" << item->getPkgName()
+            << " package_version_code:" << item->getPkgVersionCode()
+            << " vendor:" << vendor
+            << " description:" << description
+            << " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
+            << " vendor_metrics:" <<  mediametrics::stringutils::bytesToString(plugin_raw, 8)
+            << " }";
+    statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 4180e0c..e228f07 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_extractor(const mediametrics::Item *item)
+bool statsd_extractor(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -55,26 +55,25 @@
     // flesh out the protobuf we'll hand off with our data
     //
 
-    // android.media.mediaextractor.fmt         string
-    std::string fmt;
-    if (item->getString("android.media.mediaextractor.fmt", &fmt)) {
-        metrics_proto.set_format(std::move(fmt));
-    }
-    // android.media.mediaextractor.mime        string
-    std::string mime;
-    if (item->getString("android.media.mediaextractor.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
-    }
-    // android.media.mediaextractor.ntrk        int32
-    int32_t ntrk = -1;
-    if (item->getInt32("android.media.mediaextractor.ntrk", &ntrk)) {
-        metrics_proto.set_tracks(ntrk);
+    std::string format;
+    if (item->getString("android.media.mediaextractor.fmt", &format)) {
+        metrics_proto.set_format(format);
     }
 
-    // android.media.mediaextractor.entry       string
+    std::string mime;
+    if (item->getString("android.media.mediaextractor.mime", &mime)) {
+        metrics_proto.set_mime(mime);
+    }
+
+    int32_t tracks = -1;
+    if (item->getInt32("android.media.mediaextractor.ntrk", &tracks)) {
+        metrics_proto.set_tracks(tracks);
+    }
+
     std::string entry_point_string;
+    stats::mediametrics::ExtractorData::EntryPoint entry_point =
+            stats::mediametrics::ExtractorData_EntryPoint_OTHER;
     if (item->getString("android.media.mediaextractor.entry", &entry_point_string)) {
-      stats::mediametrics::ExtractorData::EntryPoint entry_point;
       if (entry_point_string == "sdk") {
         entry_point = stats::mediametrics::ExtractorData_EntryPoint_SDK;
       } else if (entry_point_string == "ndk-with-jvm") {
@@ -93,17 +92,30 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_extractor_reported:"
+            << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " format:" << format
+            << " mime:" << mime
+            << " tracks:" << tracks
+            << " entry_point:" << entry_point_string << "(" << entry_point << ")"
 
+            // TODO: Add MediaExtractor log_session_id
+            // << " log_session_id:" << log_session_id
+
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 262b2ae..f543425 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -36,16 +36,15 @@
 
 namespace android {
 
-bool statsd_mediaparser(const mediametrics::Item *item)
+bool statsd_mediaparser(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
-    if (item == nullptr) {
-        return false;
-    }
+    static constexpr bool enabled_statsd = true; // TODO: Remove, dup with dump2StatsdInternal().
+    if (item == nullptr) return false;
 
-    // statsd wrapper data.
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
 
     std::string parserName;
     item->getString("android.media.mediaparser.parserName", &parserName);
@@ -82,9 +81,9 @@
 
     if (enabled_statsd) {
         (void) android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
-                                   timestamp,
-                                   pkgName.c_str(),
-                                   pkgVersionCode,
+                                   timestamp_nanos,
+                                   package_name.c_str(),
+                                   package_version_code,
                                    parserName.c_str(),
                                    createdByName,
                                    parserPool.c_str(),
@@ -99,7 +98,29 @@
     } else {
         ALOGV("NOT sending MediaParser media metrics.");
     }
-
+    // TODO: Cleanup after playback_id is merged.
+    std::stringstream log;
+    log << "result:" << "(result)" << " {"
+            << " mediametrics_mediaparser_reported:"
+            << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " parser_name:" << parserName
+            << " created_by_name:" << createdByName
+            << " parser_pool:" << parserPool
+            << " last_exception:" << lastException
+            << " resource_byte_count:" << resourceByteCount
+            << " duration_millis:" << durationMillis
+            << " track_mime_types:" << trackMimeTypes
+            << " track_codecs:" << trackCodecs
+            << " altered_parameters:" << alteredParameters
+            << " video_width:" << videoWidth
+            << " video_height:" << videoHeight
+            // TODO: Add MediaParser playback_id
+            // << " playback_id:" << playbackId
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index a8d0f55..33da81e 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -41,16 +41,16 @@
  *  handles nuplayer AND nuplayer2
  *  checks for the union of what the two players generate
  */
-bool statsd_nuplayer(const mediametrics::Item *item)
+bool statsd_nuplayer(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -60,15 +60,16 @@
     //
 
     // differentiate between nuplayer and nuplayer2
-    metrics_proto.set_whichplayer(item->getKey().c_str());
+    std::string whichPlayer = item->getKey();
+    metrics_proto.set_whichplayer(whichPlayer.c_str());
 
     std::string video_mime;
     if (item->getString("android.media.mediaplayer.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     std::string video_codec;
     if (item->getString("android.media.mediaplayer.video.codec", &video_codec)) {
-        metrics_proto.set_video_codec(std::move(video_codec));
+        metrics_proto.set_video_codec(video_codec);
     }
 
     int32_t width = -1;
@@ -92,32 +93,32 @@
     if (item->getInt64("android.media.mediaplayer.startupdropped", &frames_dropped_startup)) {
         metrics_proto.set_frames_dropped_startup(frames_dropped_startup);
     }
-    double fps = -1.0;
-    if (item->getDouble("android.media.mediaplayer.fps", &fps)) {
-        metrics_proto.set_framerate(fps);
+    double framerate = -1.0;
+    if (item->getDouble("android.media.mediaplayer.fps", &framerate)) {
+        metrics_proto.set_framerate(framerate);
     }
 
     std::string audio_mime;
     if (item->getString("android.media.mediaplayer.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     std::string audio_codec;
     if (item->getString("android.media.mediaplayer.audio.codec", &audio_codec)) {
-        metrics_proto.set_audio_codec(std::move(audio_codec));
+        metrics_proto.set_audio_codec(audio_codec);
     }
 
-    int64_t duration_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_ms)) {
-        metrics_proto.set_duration_millis(duration_ms);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
-    int64_t playing_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_ms)) {
-        metrics_proto.set_playing_millis(playing_ms);
+    int64_t playing_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_millis)) {
+        metrics_proto.set_playing_millis(playing_millis);
     }
 
-    int32_t err = -1;
-    if (item->getInt32("android.media.mediaplayer.err", &err)) {
-        metrics_proto.set_error(err);
+    int32_t error = -1;
+    if (item->getInt32("android.media.mediaplayer.err", &error)) {
+        metrics_proto.set_error(error);
     }
     int32_t error_code = -1;
     if (item->getInt32("android.media.mediaplayer.errcode", &error_code)) {
@@ -125,45 +126,74 @@
     }
     std::string error_state;
     if (item->getString("android.media.mediaplayer.errstate", &error_state)) {
-        metrics_proto.set_error_state(std::move(error_state));
+        metrics_proto.set_error_state(error_state);
     }
 
     std::string data_source_type;
     if (item->getString("android.media.mediaplayer.dataSource", &data_source_type)) {
-        metrics_proto.set_data_source_type(std::move(data_source_type));
+        metrics_proto.set_data_source_type(data_source_type);
     }
 
-    int64_t rebufferingMs = -1;
-    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebufferingMs)) {
-        metrics_proto.set_rebuffering_millis(rebufferingMs);
+    int64_t rebuffering_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebuffering_millis)) {
+        metrics_proto.set_rebuffering_millis(rebuffering_millis);
     }
     int32_t rebuffers = -1;
     if (item->getInt32("android.media.mediaplayer.rebuffers", &rebuffers)) {
         metrics_proto.set_rebuffers(rebuffers);
     }
-    int32_t rebufferExit = -1;
-    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebufferExit)) {
-        metrics_proto.set_rebuffer_at_exit(rebufferExit);
+    int32_t rebuffer_at_exit = -1;
+    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebuffer_at_exit)) {
+        metrics_proto.set_rebuffer_at_exit(rebuffer_at_exit);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize nuplayer metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_nuplayer_reported:"
+            << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " whichPlayer:" << whichPlayer
+            << " video_mime:" << video_mime
+            << " video_codec:" << video_codec
+            << " width:" << width
+            << " height:" << height
+            << " frames:" << frames
+            << " frames_dropped:" << frames_dropped
+            << " framerate:" << framerate
+            << " audio_mime:" << audio_mime
+            << " audio_codec:" << media_apex_version
+
+            << " duration_millis:" << duration_millis
+            << " playing_millis:" << playing_millis
+            << " error:" << error
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " data_source_type:" << data_source_type
+            << " rebuffering_millis:" << rebuffering_millis
+            << " rebuffers:" << rebuffers
+            << " rebuffer_at_exit:" << rebuffer_at_exit
+            << " frames_dropped_startup:" << frames_dropped_startup
+
+            // TODO NuPlayer - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 2e5ada4..23b884f 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_recorder(const mediametrics::Item *item)
+bool statsd_recorder(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,22 +58,22 @@
     // string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
     std::string audio_mime;
     if (item->getString("android.media.mediarecorder.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     // string kRecorderVideoMime = "android.media.mediarecorder.video.mime";
     std::string video_mime;
     if (item->getString("android.media.mediarecorder.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     // int32 kRecorderVideoProfile = "android.media.mediarecorder.video-encoder-profile";
-    int32_t videoProfile = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &videoProfile)) {
-        metrics_proto.set_video_profile(videoProfile);
+    int32_t video_profile = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &video_profile)) {
+        metrics_proto.set_video_profile(video_profile);
     }
     // int32 kRecorderVideoLevel = "android.media.mediarecorder.video-encoder-level";
-    int32_t videoLevel = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &videoLevel)) {
-        metrics_proto.set_video_level(videoLevel);
+    int32_t video_level = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &video_level)) {
+        metrics_proto.set_video_level(video_level);
     }
     // int32 kRecorderWidth = "android.media.mediarecorder.width";
     int32_t width = -1;
@@ -97,73 +97,73 @@
     }
 
     // int32 kRecorderCaptureFps = "android.media.mediarecorder.capture-fps";
-    int32_t captureFps = -1;
-    if (item->getInt32("android.media.mediarecorder.capture-fps", &captureFps)) {
-        metrics_proto.set_capture_fps(captureFps);
+    int32_t capture_fps = -1;
+    if (item->getInt32("android.media.mediarecorder.capture-fps", &capture_fps)) {
+        metrics_proto.set_capture_fps(capture_fps);
     }
     // double kRecorderCaptureFpsEnable = "android.media.mediarecorder.capture-fpsenable";
-    double captureFpsEnable = -1;
-    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &captureFpsEnable)) {
-        metrics_proto.set_capture_fps_enable(captureFpsEnable);
+    double capture_fps_enable = -1;
+    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &capture_fps_enable)) {
+        metrics_proto.set_capture_fps_enable(capture_fps_enable);
     }
 
     // int64 kRecorderDurationMs = "android.media.mediarecorder.durationMs";
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.mediarecorder.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
     // int64 kRecorderPaused = "android.media.mediarecorder.pausedMs";
-    int64_t pausedMs = -1;
-    if (item->getInt64("android.media.mediarecorder.pausedMs", &pausedMs)) {
-        metrics_proto.set_paused_millis(pausedMs);
+    int64_t paused_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.pausedMs", &paused_millis)) {
+        metrics_proto.set_paused_millis(paused_millis);
     }
     // int32 kRecorderNumPauses = "android.media.mediarecorder.NPauses";
-    int32_t pausedCount = -1;
-    if (item->getInt32("android.media.mediarecorder.NPauses", &pausedCount)) {
-        metrics_proto.set_paused_count(pausedCount);
+    int32_t paused_count = -1;
+    if (item->getInt32("android.media.mediarecorder.NPauses", &paused_count)) {
+        metrics_proto.set_paused_count(paused_count);
     }
 
     // int32 kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
-    int32_t audioBitrate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audioBitrate)) {
-        metrics_proto.set_audio_bitrate(audioBitrate);
+    int32_t audio_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audio_bitrate)) {
+        metrics_proto.set_audio_bitrate(audio_bitrate);
     }
     // int32 kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
-    int32_t audioChannels = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-channels", &audioChannels)) {
-        metrics_proto.set_audio_channels(audioChannels);
+    int32_t audio_channels = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-channels", &audio_channels)) {
+        metrics_proto.set_audio_channels(audio_channels);
     }
     // int32 kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
-    int32_t audioSampleRate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audioSampleRate)) {
-        metrics_proto.set_audio_samplerate(audioSampleRate);
+    int32_t audio_samplerate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audio_samplerate)) {
+        metrics_proto.set_audio_samplerate(audio_samplerate);
     }
 
     // int32 kRecorderMovieTimescale = "android.media.mediarecorder.movie-timescale";
-    int32_t movieTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movieTimescale)) {
-        metrics_proto.set_movie_timescale(movieTimescale);
+    int32_t movie_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movie_timescale)) {
+        metrics_proto.set_movie_timescale(movie_timescale);
     }
     // int32 kRecorderAudioTimescale = "android.media.mediarecorder.audio-timescale";
-    int32_t audioTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audioTimescale)) {
-        metrics_proto.set_audio_timescale(audioTimescale);
+    int32_t audio_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audio_timescale)) {
+        metrics_proto.set_audio_timescale(audio_timescale);
     }
     // int32 kRecorderVideoTimescale = "android.media.mediarecorder.video-timescale";
-    int32_t videoTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.video-timescale", &videoTimescale)) {
-        metrics_proto.set_video_timescale(videoTimescale);
+    int32_t video_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.video-timescale", &video_timescale)) {
+        metrics_proto.set_video_timescale(video_timescale);
     }
 
     // int32 kRecorderVideoBitrate = "android.media.mediarecorder.video-bitrate";
-    int32_t videoBitRate = -1;
-    if (item->getInt32("android.media.mediarecorder.video-bitrate", &videoBitRate)) {
-        metrics_proto.set_video_bitrate(videoBitRate);
+    int32_t video_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.video-bitrate", &video_bitrate)) {
+        metrics_proto.set_video_bitrate(video_bitrate);
     }
     // int32 kRecorderVideoIframeInterval = "android.media.mediarecorder.video-iframe-interval";
-    int32_t iFrameInterval = -1;
-    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iFrameInterval)) {
-        metrics_proto.set_iframe_interval(iFrameInterval);
+    int32_t iframe_interval = -1;
+    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iframe_interval)) {
+        metrics_proto.set_iframe_interval(iframe_interval);
     }
 
     std::string serialized;
@@ -172,17 +172,47 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_recorder_reported:"
+            << android::util::MEDIAMETRICS_RECORDER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " audio_mime:" << audio_mime
+            << " video_mime:" << video_mime
+            << " video_profile:" << video_profile
+            << " video_level:" << video_level
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " framerate:" << framerate
+            << " capture_fps:" << capture_fps
+            << " capture_fps_enable:" << capture_fps_enable
 
+            << " duration_millis:" << duration_millis
+            << " paused_millis:" << paused_millis
+            << " paused_count:" << paused_count
+            << " audio_bitrate:" << audio_bitrate
+            << " audio_channels:" << audio_channels
+            << " audio_samplerate:" << audio_samplerate
+            << " movie_timescale:" << movie_timescale
+            << " audio_timescale:" << audio_timescale
+            << " video_timescale:" << video_timescale
+            << " video_bitrate:" << video_bitrate
+
+            << " iframe_interval:" << iframe_interval
+            // TODO Recorder - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index 71193a2..3baf739 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -11,6 +11,10 @@
     name: "mediametrics_tests",
     test_suites: ["device-tests"],
 
+    // not all shared libraries are populated in the 2nd architecture in
+    // particular, libmediametricsservice we use to have a tame copy of the service
+    compile_multilib: "first",
+
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index 478355b..2336d6f 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -809,7 +809,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -817,7 +819,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -845,7 +847,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -853,7 +857,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -877,7 +881,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -1082,3 +1088,42 @@
   //mediaMetrics->dump(fileno(stdout), {} /* args */);
 }
 #endif
+
+// Base64Url and isLogSessionId string utilities can be tested by static asserts.
+static_assert(mediametrics::stringutils::isBase64Url("abc"));
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['A'] == 0);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['a'] == 26);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['!'] ==
+        mediametrics::stringutils::Transpose::INVALID_CHAR);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['@'] ==
+        mediametrics::stringutils::Transpose::INVALID_CHAR);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['#'] ==
+        mediametrics::stringutils::Transpose::INVALID_CHAR);
+static_assert(!mediametrics::stringutils::isBase64Url("!@#"));
+
+static_assert(mediametrics::stringutils::isLogSessionId("0123456789abcdef"));
+static_assert(!mediametrics::stringutils::isLogSessionId("abc"));
+static_assert(!mediametrics::stringutils::isLogSessionId("!@#"));
+static_assert(!mediametrics::stringutils::isLogSessionId("0123456789abcde!"));
+
+TEST(mediametrics_tests, sanitizeLogSessionId) {
+   // invalid id returns empty string.
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId("abc"));
+
+   // valid id passes through.
+   std::string validId = "fedcba9876543210";
+   ASSERT_EQ(validId, mediametrics::stringutils::sanitizeLogSessionId(validId));
+
+   // one more char makes the id invalid
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId(validId + "A"));
+
+   std::string validId2 = "ZYXWVUT123456789";
+   ASSERT_EQ(validId2, mediametrics::stringutils::sanitizeLogSessionId(validId2));
+
+   // one fewer char makes the id invalid
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId(validId.c_str() + 1));
+
+   // replacing one character with an invalid character makes an invalid id.
+   validId2[3] = '!';
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId(validId2));
+}
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 926de3e..db61061 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -74,6 +74,9 @@
         "ResourceManagerService.cpp",
         "ResourceObserverService.cpp",
         "ServiceLog.cpp",
+
+        // TODO: convert to AIDL?
+        "IMediaResourceMonitor.cpp",
     ],
 
     shared_libs: [
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.cpp b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
new file mode 100644
index 0000000..42d7feb
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IMediaResourceMonitor.h"
+#include <binder/Parcel.h>
+#include <utils/Errors.h>
+#include <sys/types.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class BpMediaResourceMonitor : public BpInterface<IMediaResourceMonitor> {
+public:
+    explicit BpMediaResourceMonitor(const sp<IBinder>& impl)
+        : BpInterface<IMediaResourceMonitor>(impl) {}
+
+    virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaResourceMonitor::getInterfaceDescriptor());
+        data.writeInt32(pid);
+        data.writeInt32(type);
+        remote()->transact(NOTIFY_RESOURCE_GRANTED, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(MediaResourceMonitor, "android.media.IMediaResourceMonitor")
+
+// ----------------------------------------------------------------------
+
+// NOLINTNEXTLINE(google-default-arguments)
+status_t BnMediaResourceMonitor::onTransact( uint32_t code, const Parcel& data, Parcel* reply,
+        uint32_t flags) {
+    switch(code) {
+        case NOTIFY_RESOURCE_GRANTED: {
+            CHECK_INTERFACE(IMediaResourceMonitor, data, reply);
+            int32_t pid = data.readInt32();
+            const int32_t type = data.readInt32();
+            notifyResourceGranted(/*in*/ pid, /*in*/ type);
+            return NO_ERROR;
+        } break;
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+}
+
+// ----------------------------------------------------------------------
+
+} // namespace android
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.h b/services/mediaresourcemanager/IMediaResourceMonitor.h
new file mode 100644
index 0000000..f92d557
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#ifndef __ANDROID_VNDK__
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class IMediaResourceMonitor : public IInterface {
+public:
+    DECLARE_META_INTERFACE(MediaResourceMonitor)
+
+    // Values should be in sync with Intent.EXTRA_MEDIA_RESOURCE_TYPE_XXX.
+    enum {
+        TYPE_VIDEO_CODEC = 0,
+        TYPE_AUDIO_CODEC = 1,
+    };
+
+    virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type) = 0;
+
+    enum {
+        NOTIFY_RESOURCE_GRANTED = IBinder::FIRST_CALL_TRANSACTION,
+    };
+};
+
+// ----------------------------------------------------------------------
+
+class BnMediaResourceMonitor : public BnInterface<IMediaResourceMonitor> {
+public:
+    // NOLINTNEXTLINE(google-default-arguments)
+    virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+            uint32_t flags = 0);
+};
+
+// ----------------------------------------------------------------------
+
+} // namespace android
+
+#else // __ANDROID_VNDK__
+#error "This header is not visible to vendors"
+#endif // __ANDROID_VNDK__
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 289cffd..953686b 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -21,7 +21,6 @@
 
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
-#include <binder/IMediaResourceMonitor.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <cutils/sched_policy.h>
@@ -36,6 +35,7 @@
 #include <sys/time.h>
 #include <unistd.h>
 
+#include "IMediaResourceMonitor.h"
 #include "ResourceManagerService.h"
 #include "ResourceObserverService.h"
 #include "ServiceLog.h"
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 9cc6fe4..4e97406 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -165,6 +165,10 @@
         return Status::fromServiceSpecificError(PERMISSION_DENIED);
     }
 
+    if (in_observer == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
     ::ndk::SpAIBinder binder = in_observer->asBinder();
 
     {
@@ -220,6 +224,10 @@
         return Status::fromServiceSpecificError(PERMISSION_DENIED);
     }
 
+    if (in_observer == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
     ::ndk::SpAIBinder binder = in_observer->asBinder();
 
     {
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index e3d3e78..acd9df1 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -182,6 +182,11 @@
     std::vector<MediaObservableFilter> filters1;
     Status status;
 
+    // Register with null observer should fail.
+    status = mObserverService->registerObserver(nullptr, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), BAD_VALUE);
+
     // Register with empty observables should fail.
     status = mObserverService->registerObserver(mTestObserver1, filters1);
     EXPECT_FALSE(status.isOk());
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index 1825424..a9fd14f 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -1,3 +1,4 @@
+// service library
 package {
     default_applicable_licenses: [
         "frameworks_av_services_mediatranscoding_license",
@@ -17,7 +18,6 @@
     ],
 }
 
-// service library
 cc_library {
     name: "libmediatranscodingservice",
 
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 5c8cc1a..8b64134 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -24,6 +24,7 @@
 #include <cutils/properties.h>
 #include <media/TranscoderWrapper.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingLogger.h>
 #include <media/TranscodingResourcePolicy.h>
 #include <media/TranscodingSessionController.h>
 #include <media/TranscodingThermalPolicy.h>
@@ -41,16 +42,50 @@
             errorCode,                                \
             String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
 
-MediaTranscodingService::MediaTranscodingService(
-        const std::shared_ptr<TranscoderInterface>& transcoder)
+static constexpr int64_t kTranscoderHeartBeatIntervalUs = 1000000LL;
+
+MediaTranscodingService::MediaTranscodingService()
       : mUidPolicy(new TranscodingUidPolicy()),
         mResourcePolicy(new TranscodingResourcePolicy()),
         mThermalPolicy(new TranscodingThermalPolicy()),
-        mSessionController(new TranscodingSessionController(transcoder, mUidPolicy, mResourcePolicy,
-                                                            mThermalPolicy)),
-        mClientManager(new TranscodingClientManager(mSessionController)) {
+        mLogger(new TranscodingLogger()) {
     ALOGV("MediaTranscodingService is created");
-    transcoder->setCallback(mSessionController);
+    bool simulated = property_get_bool("debug.transcoding.simulated_transcoder", false);
+    if (simulated) {
+        // Overrid default config params with shorter values for testing.
+        TranscodingSessionController::ControllerConfig config = {
+                .pacerBurstThresholdMs = 500,
+                .pacerBurstCountQuota = 10,
+                .pacerBurstTimeQuotaSeconds = 3,
+        };
+        mSessionController.reset(new TranscodingSessionController(
+                [](const std::shared_ptr<TranscoderCallbackInterface>& cb)
+                        -> std::shared_ptr<TranscoderInterface> {
+                    return std::make_shared<SimulatedTranscoder>(cb);
+                },
+                mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+    } else {
+        int32_t overrideBurstCountQuota =
+                property_get_int32("persist.transcoding.burst_count_quota", -1);
+        int32_t pacerBurstTimeQuotaSeconds =
+                property_get_int32("persist.transcoding.burst_time_quota_seconds", -1);
+        // Override default config params with properties if present.
+        TranscodingSessionController::ControllerConfig config;
+        if (overrideBurstCountQuota > 0) {
+            config.pacerBurstCountQuota = overrideBurstCountQuota;
+        }
+        if (pacerBurstTimeQuotaSeconds > 0) {
+            config.pacerBurstTimeQuotaSeconds = pacerBurstTimeQuotaSeconds;
+        }
+        mSessionController.reset(new TranscodingSessionController(
+                [logger = mLogger](const std::shared_ptr<TranscoderCallbackInterface>& cb)
+                        -> std::shared_ptr<TranscoderInterface> {
+                    return std::make_shared<TranscoderWrapper>(cb, logger,
+                                                               kTranscoderHeartBeatIntervalUs);
+                },
+                mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+    }
+    mClientManager.reset(new TranscodingClientManager(mSessionController));
     mUidPolicy->setCallback(mSessionController);
     mResourcePolicy->setCallback(mSessionController);
     mThermalPolicy->setCallback(mSessionController);
@@ -94,15 +129,8 @@
 
 //static
 void MediaTranscodingService::instantiate() {
-    std::shared_ptr<TranscoderInterface> transcoder;
-    if (property_get_bool("debug.transcoding.simulated_transcoder", false)) {
-        transcoder = std::make_shared<SimulatedTranscoder>();
-    } else {
-        transcoder = std::make_shared<TranscoderWrapper>();
-    }
-
     std::shared_ptr<MediaTranscodingService> service =
-            ::ndk::SharedRefBase::make<MediaTranscodingService>(transcoder);
+            ::ndk::SharedRefBase::make<MediaTranscodingService>();
     binder_status_t status =
             AServiceManager_addService(service->asBinder().get(), getServiceName());
     if (status != STATUS_OK) {
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index a22acf2..12be131 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -29,8 +29,8 @@
 using ::aidl::android::media::TranscodingRequestParcel;
 using ::aidl::android::media::TranscodingSessionParcel;
 class TranscodingClientManager;
+class TranscodingLogger;
 class TranscodingSessionController;
-class TranscoderInterface;
 class UidPolicyInterface;
 class ResourcePolicyInterface;
 class ThermalPolicyInterface;
@@ -40,7 +40,7 @@
     static constexpr int32_t kInvalidSessionId = -1;
     static constexpr int32_t kInvalidClientId = -1;
 
-    MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder);
+    MediaTranscodingService();
     virtual ~MediaTranscodingService();
 
     static void instantiate();
@@ -63,6 +63,7 @@
     std::shared_ptr<UidPolicyInterface> mUidPolicy;
     std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
     std::shared_ptr<ThermalPolicyInterface> mThermalPolicy;
+    std::shared_ptr<TranscodingLogger> mLogger;
     std::shared_ptr<TranscodingSessionController> mSessionController;
     std::shared_ptr<TranscodingClientManager> mClientManager;
 };
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
index 03ee886..e80dbc5 100644
--- a/services/mediatranscoding/SimulatedTranscoder.cpp
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -33,28 +33,45 @@
         return "Pause";
     case Event::Resume:
         return "Resume";
+    case Event::Stop:
+        return "Stop";
+    case Event::Finished:
+        return "Finished";
+    case Event::Failed:
+        return "Failed";
+    case Event::Abandon:
+        return "Abandon";
     default:
         break;
     }
     return "(unknown)";
 }
 
-SimulatedTranscoder::SimulatedTranscoder() {
-    std::thread(&SimulatedTranscoder::threadLoop, this).detach();
+SimulatedTranscoder::SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb)
+      : mCallback(cb), mLooperReady(false) {
+    ALOGV("SimulatedTranscoder CTOR: %p", this);
 }
 
-void SimulatedTranscoder::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
-    mCallback = cb;
+SimulatedTranscoder::~SimulatedTranscoder() {
+    ALOGV("SimulatedTranscoder DTOR: %p", this);
 }
 
 void SimulatedTranscoder::start(
         ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        uid_t /*callingUid*/,
         const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
-    if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
-        mSessionProcessingTimeMs = request.testConfig->processingTotalTimeMs;
+    {
+        auto lock = std::scoped_lock(mLock);
+        int64_t processingTimeUs = kSessionDurationUs;
+        if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
+            processingTimeUs = request.testConfig->processingTotalTimeMs * 1000;
+        }
+        ALOGI("%s: session {%lld, %d}: processingTimeUs: %lld", __FUNCTION__, (long long)clientId,
+              sessionId, (long long)processingTimeUs);
+        SessionKeyType key = std::make_pair(clientId, sessionId);
+        mRemainingTimeMap.emplace(key, processingTimeUs);
     }
-    ALOGV("%s: session {%d}: processingTime: %lld", __FUNCTION__, sessionId,
-          (long long)mSessionProcessingTimeMs);
+
     queueEvent(Event::Start, clientId, sessionId, [=] {
         auto callback = mCallback.lock();
         if (callback != nullptr) {
@@ -74,6 +91,7 @@
 
 void SimulatedTranscoder::resume(
         ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& /*request*/,
+        uid_t /*callingUid*/,
         const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
     queueEvent(Event::Resume, clientId, sessionId, [=] {
         auto callback = mCallback.lock();
@@ -83,8 +101,12 @@
     });
 }
 
-void SimulatedTranscoder::stop(ClientIdType clientId, SessionIdType sessionId) {
+void SimulatedTranscoder::stop(ClientIdType clientId, SessionIdType sessionId, bool abandon) {
     queueEvent(Event::Stop, clientId, sessionId, nullptr);
+
+    if (abandon) {
+        queueEvent(Event::Abandon, 0, 0, nullptr);
+    }
 }
 
 void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId,
@@ -94,14 +116,22 @@
 
     auto lock = std::scoped_lock(mLock);
 
+    if (!mLooperReady) {
+        // A shared_ptr to ourselves is given to the thread's stack, so that SimulatedTranscoder
+        // object doesn't go away until the thread exits. When a watchdog timeout happens, this
+        // allows the session controller to release its reference to the TranscoderWrapper object
+        // without blocking on the thread exits.
+        std::thread([owner = shared_from_this()]() { owner->threadLoop(); }).detach();
+        mLooperReady = true;
+    }
+
     mQueue.push_back({type, clientId, sessionId, runnable});
     mCondition.notify_one();
 }
 
 void SimulatedTranscoder::threadLoop() {
     bool running = false;
-    std::chrono::microseconds remainingUs(kSessionDurationUs);
-    std::chrono::system_clock::time_point lastRunningTime;
+    std::chrono::steady_clock::time_point lastRunningTime;
     Event lastRunningEvent;
 
     std::unique_lock<std::mutex> lock(mLock);
@@ -115,12 +145,16 @@
                 continue;
             }
             // If running, wait for the remaining life of this session. Report finish if timed out.
-            std::cv_status status = mCondition.wait_for(lock, remainingUs);
+            SessionKeyType key =
+                    std::make_pair(lastRunningEvent.clientId, lastRunningEvent.sessionId);
+            std::cv_status status = mCondition.wait_for(lock, mRemainingTimeMap[key]);
             if (status == std::cv_status::timeout) {
                 running = false;
 
                 auto callback = mCallback.lock();
                 if (callback != nullptr) {
+                    mRemainingTimeMap.erase(key);
+
                     lock.unlock();
                     callback->onFinish(lastRunningEvent.clientId, lastRunningEvent.sessionId);
                     lock.lock();
@@ -129,41 +163,51 @@
                 // Advance last running time and remaining time. This is needed to guard
                 // against bad events (which will be ignored) or spurious wakeups, in that
                 // case we don't want to wait for the same time again.
-                auto now = std::chrono::system_clock::now();
-                remainingUs -= (now - lastRunningTime);
+                auto now = std::chrono::steady_clock::now();
+                mRemainingTimeMap[key] -= std::chrono::duration_cast<std::chrono::microseconds>(
+                        now - lastRunningTime);
                 lastRunningTime = now;
             }
         }
 
         // Handle the events, adjust state and send updates to client accordingly.
-        while (!mQueue.empty()) {
-            Event event = *mQueue.begin();
-            mQueue.pop_front();
+        Event event = *mQueue.begin();
+        mQueue.pop_front();
 
-            ALOGV("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId,
-                  event.sessionId, toString(event.type));
+        ALOGD("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId,
+              event.sessionId, toString(event.type));
 
-            if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
-                running = true;
-                lastRunningTime = std::chrono::system_clock::now();
-                lastRunningEvent = event;
-                if (event.type == Event::Start) {
-                    remainingUs = std::chrono::milliseconds(mSessionProcessingTimeMs);
-                }
-            } else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
-                running = false;
-                remainingUs -= (std::chrono::system_clock::now() - lastRunningTime);
+        if (event.type == Event::Abandon) {
+            break;
+        }
+
+        SessionKeyType key = std::make_pair(event.clientId, event.sessionId);
+        if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
+            running = true;
+            lastRunningTime = std::chrono::steady_clock::now();
+            lastRunningEvent = event;
+            ALOGV("%s: session {%lld, %d}: remaining time: %lld", __FUNCTION__,
+                  (long long)event.clientId, event.sessionId,
+                  (long long)mRemainingTimeMap[key].count());
+
+        } else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
+            running = false;
+            if (event.type == Event::Stop) {
+                mRemainingTimeMap.erase(key);
             } else {
-                ALOGW("%s: discarding bad event: session {%lld, %d}: %s", __FUNCTION__,
-                      (long long)event.clientId, event.sessionId, toString(event.type));
-                continue;
+                mRemainingTimeMap[key] -= std::chrono::duration_cast<std::chrono::microseconds>(
+                        std::chrono::steady_clock::now() - lastRunningTime);
             }
+        } else {
+            ALOGW("%s: discarding bad event: session {%lld, %d}: %s", __FUNCTION__,
+                  (long long)event.clientId, event.sessionId, toString(event.type));
+            continue;
+        }
 
-            if (event.runnable != nullptr) {
-                lock.unlock();
-                event.runnable();
-                lock.lock();
-            }
+        if (event.runnable != nullptr) {
+            lock.unlock();
+            event.runnable();
+            lock.lock();
         }
     }
 }
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
index ba2bba0..58e2e30 100644
--- a/services/mediatranscoding/SimulatedTranscoder.h
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -21,6 +21,7 @@
 #include <media/TranscoderInterface.h>
 
 #include <list>
+#include <map>
 #include <mutex>
 
 namespace android {
@@ -36,10 +37,11 @@
  * Session lifecycle events are reported via progress updates with special progress
  * numbers (equal to the Event's type).
  */
-class SimulatedTranscoder : public TranscoderInterface {
+class SimulatedTranscoder : public TranscoderInterface,
+                            public std::enable_shared_from_this<SimulatedTranscoder> {
 public:
     struct Event {
-        enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed, Abandon } type;
         ClientIdType clientId;
         SessionIdType sessionId;
         std::function<void()> runnable;
@@ -47,18 +49,18 @@
 
     static constexpr int64_t kSessionDurationUs = 1000000;
 
-    SimulatedTranscoder();
+    SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb);
+    ~SimulatedTranscoder();
 
     // TranscoderInterface
-    void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
     void start(ClientIdType clientId, SessionIdType sessionId,
-               const TranscodingRequestParcel& request,
+               const TranscodingRequestParcel& request, uid_t callingUid,
                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
     void pause(ClientIdType clientId, SessionIdType sessionId) override;
     void resume(ClientIdType clientId, SessionIdType sessionId,
-                const TranscodingRequestParcel& request,
+                const TranscodingRequestParcel& request, uid_t callingUid,
                 const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
-    void stop(ClientIdType clientId, SessionIdType sessionId) override;
+    void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) override;
     // ~TranscoderInterface
 
 private:
@@ -66,9 +68,11 @@
     std::mutex mLock;
     std::condition_variable mCondition;
     std::list<Event> mQueue GUARDED_BY(mLock);
+    bool mLooperReady;
 
-    // Minimum time spent on transcode the video. This is used just for testing.
-    int64_t mSessionProcessingTimeMs = kSessionDurationUs / 1000;
+    using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+    // map of session's remaining time in microsec.
+    std::map<SessionKeyType, std::chrono::microseconds> mRemainingTimeMap;
 
     static const char* toString(Event::Type type);
     void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index 4df5a9f..cb180ec 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -25,6 +25,7 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libbinder",
         "libbinder_ndk",
         "liblog",
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 66cced5..20e4bfb 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -51,6 +51,7 @@
 using aidl::android::media::TranscodingRequestParcel;
 using aidl::android::media::TranscodingSessionParcel;
 using aidl::android::media::TranscodingSessionPriority;
+using aidl::android::media::TranscodingTestConfig;
 using aidl::android::media::TranscodingVideoTrackFormat;
 
 constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
@@ -207,7 +208,9 @@
         std::unique_lock lock(mLock);
 
         mEventQueue.push_back(event);
-        mLastErr = err;
+        if (err != TranscodingErrorCode::kNoError) {
+            mLastErrQueue.push_back(err);
+        }
         mCondition.notify_one();
     }
 
@@ -225,7 +228,12 @@
 
     TranscodingErrorCode getLastError() {
         std::unique_lock lock(mLock);
-        return mLastErr;
+        if (mLastErrQueue.empty()) {
+            return TranscodingErrorCode::kNoError;
+        }
+        TranscodingErrorCode err = mLastErrQueue.front();
+        mLastErrQueue.pop_front();
+        return err;
     }
 
 private:
@@ -233,7 +241,7 @@
     std::condition_variable mCondition;
     Event mPoppedEvent;
     std::list<Event> mEventQueue;
-    TranscodingErrorCode mLastErr;
+    std::list<TranscodingErrorCode> mLastErrQueue;
     int mUpdateCount = 0;
     int mLastProgress = -1;
 };
@@ -359,7 +367,8 @@
     template <bool expectation = success>
     bool submit(int32_t sessionId, const char* sourceFilePath, const char* destinationFilePath,
                 TranscodingSessionPriority priority = TranscodingSessionPriority::kNormal,
-                int bitrateBps = -1, int overridePid = -1, int overrideUid = -1) {
+                int bitrateBps = -1, int overridePid = -1, int overrideUid = -1,
+                int sessionDurationMs = -1) {
         constexpr bool shouldSucceed = (expectation == success);
         bool result;
         TranscodingRequestParcel request;
@@ -375,6 +384,11 @@
             request.requestedVideoTrackFormat.emplace(TranscodingVideoTrackFormat());
             request.requestedVideoTrackFormat->bitrateBps = bitrateBps;
         }
+        if (sessionDurationMs > 0) {
+            request.isForTesting = true;
+            request.testConfig.emplace(TranscodingTestConfig());
+            request.testConfig->processingTotalTimeMs = sessionDurationMs;
+        }
         Status status = mClient->submitRequest(request, &session, &result);
 
         EXPECT_TRUE(status.isOk());
@@ -420,6 +434,34 @@
                                    session.request.destinationFilePath == destinationFilePath));
     }
 
+    template <bool expectation = success>
+    bool addClientUid(int32_t sessionId, uid_t clientUid) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        Status status = mClient->addClientUid(sessionId, clientUid, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+
+        return status.isOk() && (result == shouldSucceed);
+    }
+
+    template <bool expectation = success>
+    bool getClientUids(int32_t sessionId, std::vector<int32_t>* clientUids) {
+        constexpr bool shouldSucceed = (expectation == success);
+        std::optional<std::vector<int32_t>> aidl_return;
+        Status status = mClient->getClientUids(sessionId, &aidl_return);
+
+        EXPECT_TRUE(status.isOk());
+        bool success = (aidl_return != std::nullopt);
+        if (success) {
+            *clientUids = *aidl_return;
+        }
+        EXPECT_EQ(success, shouldSucceed);
+
+        return status.isOk() && (success == shouldSucceed);
+    }
+
     int32_t mClientId;
     pid_t mClientPid;
     uid_t mClientUid;
@@ -486,8 +528,24 @@
         EXPECT_TRUE(mClient3->unregisterClient().isOk());
     }
 
+    const char* prepareOutputFile(const char* path) {
+        deleteFile(path);
+        return path;
+    }
+
     void deleteFile(const char* path) { unlink(path); }
 
+    void dismissKeyguard() {
+        EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+        EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    }
+
+    void stopAppPackages() {
+        EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+        EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+        EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+    }
+
     std::shared_ptr<IMediaTranscodingService> mService;
     std::shared_ptr<TestClientCallback> mClient1;
     std::shared_ptr<TestClientCallback> mClient2;
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
index 95a94fc..df00aa1 100644
--- a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
@@ -1,3 +1,14 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediatranscoding_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_services_mediatranscoding_license",
+    ],
+}
+
 android_test_helper_app {
     name: "TranscodingUidPolicy_TestAppA",
     manifest: "TestAppA.xml",
@@ -20,4 +31,4 @@
     static_libs: ["androidx.test.rules"],
     sdk_version: "test_current",
     srcs: ["src/**/*.java"],
-}
\ No newline at end of file
+}
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
index 0550d77..e9eebe2 100644
--- a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
+++ b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
@@ -33,7 +33,7 @@
 
 namespace media {
 
-constexpr int64_t kPaddingUs = 400000;
+constexpr int64_t kPaddingUs = 1000000;
 constexpr int64_t kSessionWithPaddingUs = 10000000 + kPaddingUs;
 constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
 
@@ -56,8 +56,7 @@
     registerMultipleClients();
 
     const char* srcPath = "bad_file_uri";
-    const char* dstPath = OUTPATH(TestInvalidSource);
-    deleteFile(dstPath);
+    const char* dstPath = prepareOutputFile(OUTPATH(TestInvalidSource));
 
     // Submit one session.
     EXPECT_TRUE(
@@ -73,8 +72,7 @@
 TEST_F(MediaTranscodingServiceRealTest, TestPassthru) {
     registerMultipleClients();
 
-    const char* dstPath = OUTPATH(TestPassthru);
-    deleteFile(dstPath);
+    const char* dstPath = prepareOutputFile(OUTPATH(TestPassthru));
 
     // Submit one session.
     EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath));
@@ -89,8 +87,7 @@
 TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideo) {
     registerMultipleClients();
 
-    const char* dstPath = OUTPATH(TestTranscodeVideo);
-    deleteFile(dstPath);
+    const char* dstPath = prepareOutputFile(OUTPATH(TestTranscodeVideo));
 
     // Submit one session.
     EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath, TranscodingSessionPriority::kNormal,
@@ -106,8 +103,7 @@
 TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideoProgress) {
     registerMultipleClients();
 
-    const char* dstPath = OUTPATH(TestTranscodeVideoProgress);
-    deleteFile(dstPath);
+    const char* dstPath = prepareOutputFile(OUTPATH(TestTranscodeVideoProgress));
 
     // Submit one session.
     EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath, TranscodingSessionPriority::kNormal,
@@ -134,11 +130,9 @@
 
     const char* srcPath0 = kLongSrcPath;
     const char* srcPath1 = kShortSrcPath;
-    const char* dstPath0 = OUTPATH(TestCancelImmediately_Session0);
-    const char* dstPath1 = OUTPATH(TestCancelImmediately_Session1);
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestCancelImmediately_Session0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestCancelImmediately_Session1));
 
-    deleteFile(dstPath0);
-    deleteFile(dstPath1);
     // Submit one session, should start immediately.
     EXPECT_TRUE(
             mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
@@ -166,11 +160,9 @@
 
     const char* srcPath0 = kLongSrcPath;
     const char* srcPath1 = kShortSrcPath;
-    const char* dstPath0 = OUTPATH(TestCancelWhileRunning_Session0);
-    const char* dstPath1 = OUTPATH(TestCancelWhileRunning_Session1);
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestCancelWhileRunning_Session0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestCancelWhileRunning_Session1));
 
-    deleteFile(dstPath0);
-    deleteFile(dstPath1);
     // Submit two sessions, session 0 should start immediately, session 1 should be queued.
     EXPECT_TRUE(
             mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
@@ -197,10 +189,8 @@
 
     const char* srcPath0 = kLongSrcPath;
     const char* srcPath1 = kShortSrcPath;
-    const char* dstPath0 = OUTPATH(TestPauseResumeSingleClient_Session0);
-    const char* dstPath1 = OUTPATH(TestPauseResumeSingleClient_Session1);
-    deleteFile(dstPath0);
-    deleteFile(dstPath1);
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestPauseResumeSingleClient_Session0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestPauseResumeSingleClient_Session1));
 
     // Submit one offline session, should start immediately.
     EXPECT_TRUE(mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kUnspecified,
@@ -244,20 +234,15 @@
 TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeMultiClients) {
     ALOGD("TestPauseResumeMultiClients starting...");
 
-    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
-    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+    dismissKeyguard();
+    stopAppPackages();
 
     registerMultipleClients();
 
     const char* srcPath0 = kLongSrcPath;
     const char* srcPath1 = kShortSrcPath;
-    const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
-    const char* dstPath1 = OUTPATH(TestPauseResumeMultiClients_Client1);
-    deleteFile(dstPath0);
-    deleteFile(dstPath1);
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestPauseResumeMultiClients_Client0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestPauseResumeMultiClients_Client1));
 
     ALOGD("Moving app A to top...");
     EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
@@ -294,12 +279,177 @@
 
     unregisterMultipleClients();
 
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+    stopAppPackages();
 
     ALOGD("TestPauseResumeMultiClients finished.");
 }
 
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneForeground) {
+    ALOGD("TestUidGoneForeground starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill foreground app, using only 1 uid.
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit sessions to Client1 (app A).
+    ALOGD("Submitting sessions to client1 (app A) ...");
+    EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::NoEvent);
+
+    // Kill app A, expect to see A's session pause followed by B's session start,
+    // then A's session cancelled with error code kUidGoneCancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneForeground finished.");
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneForegroundMultiUids) {
+    ALOGD("TestUidGoneForegroundMultiUids starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill foreground app, using two uids.
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+    EXPECT_TRUE(mClient2->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+    EXPECT_TRUE(mClient2->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+    // Make app A also requesting session 1.
+    EXPECT_TRUE(mClient2->addClientUid(1, mClient1->mClientUid));
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 1));
+
+    // Kill app A, CLIENT(2)'s session 1 should continue because it's also requested by app B.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+
+    // Kill app B, sessions should be cancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 1));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneForegroundMultiUids finished.");
+}
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneBackground) {
+    ALOGD("TestUidGoneBackground starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill background app, using two uids.
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+    EXPECT_TRUE(mClient2->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+    EXPECT_TRUE(mClient2->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Kill app B, all its sessions should be cancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 1));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneBackground finished.");
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneBackgroundMultiUids) {
+    ALOGD("TestUidGoneBackgroundMultiUids starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill background app, using two uids.
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+    EXPECT_TRUE(mClient2->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+    EXPECT_TRUE(mClient2->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+    // Make app A also requesting session 1.
+    EXPECT_TRUE(mClient2->addClientUid(1, mClient1->mClientUid));
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 1));
+
+    // Kill app B, CLIENT(2)'s session 1 should continue to run, session 0 on
+    // the other hand should be cancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneBackgroundMultiUids finished.");
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
index 601bb1b..cb354f4 100644
--- a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -36,6 +36,7 @@
 
 #include <iostream>
 #include <list>
+#include <unordered_set>
 
 #include "MediaTranscodingServiceTestHelper.h"
 #include "SimulatedTranscoder.h"
@@ -53,6 +54,11 @@
 
 constexpr int64_t kPaddingUs = 1000000;
 constexpr int64_t kSessionWithPaddingUs = SimulatedTranscoder::kSessionDurationUs + kPaddingUs;
+constexpr int64_t kWatchdogTimeoutUs = 3000000;
+// Pacer settings used for simulated tests. Listed here for reference.
+constexpr int32_t kSimulatedPacerBurstThresholdMs = 500;
+//constexpr int32_t kSimulatedPacerBurstCountQuota = 10;
+//constexpr int32_t kSimulatedPacerBurstTimeQuotaSec = 3;
 
 constexpr const char* kClientOpPackageName = "TestClientPackage";
 
@@ -63,6 +69,25 @@
     virtual ~MediaTranscodingServiceSimulatedTest() {
         ALOGI("MediaTranscodingServiceResourceTest destroyed");
     }
+
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        // Idle to clear out burst history.
+        usleep(kSimulatedPacerBurstThresholdMs * 2 * 1000);
+        for (int i = 0; i < numSubmits; i++) {
+            EXPECT_TRUE(mClient3->submit(i, "test_source_file_0", "test_destination_file_0",
+                                         TranscodingSessionPriority::kNormal, -1 /*bitrateBps*/,
+                                         -1 /*overridePid*/, -1 /*overrideUid*/,
+                                         sessionDurationMs));
+        }
+        for (int i = 0; i < expectedSuccess; i++) {
+            EXPECT_EQ(mClient3->pop(kPaddingUs), EventTracker::Start(CLIENT(3), i));
+            EXPECT_EQ(mClient3->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(3), i));
+        }
+        for (int i = expectedSuccess; i < numSubmits; i++) {
+            EXPECT_EQ(mClient3->pop(kPaddingUs), EventTracker::Failed(CLIENT(3), i));
+            EXPECT_EQ(mClient3->getLastError(), TranscodingErrorCode::kDroppedByService);
+        }
+    }
 };
 
 TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
@@ -231,6 +256,54 @@
     unregisterMultipleClients();
 }
 
+TEST_F(MediaTranscodingServiceSimulatedTest, TestAddGetClientUids) {
+    registerMultipleClients();
+
+    std::vector<int32_t> clientUids;
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    uid_t ownUid = ::getuid();
+
+    // Submit one real-time session.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file"));
+
+    // Should have mClientUid in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(0, &clientUids));
+    EXPECT_EQ(clientUids.size(), 1u);
+    EXPECT_EQ(clientUids[0], (int32_t)mClient1->mClientUid);
+
+    // Adding invalid client uid should fail.
+    EXPECT_TRUE(mClient1->addClientUid<fail>(0, kInvalidClientUid));
+
+    // Adding mClientUid again should fail.
+    EXPECT_TRUE(mClient1->addClientUid<fail>(0, mClient1->mClientUid));
+
+    // Submit one offline session.
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1",
+                                 TranscodingSessionPriority::kUnspecified));
+
+    // Should not have any uids in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(1, &clientUids));
+    EXPECT_EQ(clientUids.size(), 0u);
+
+    // Add own uid (with IMediaTranscodingService::USE_CALLING_UID), should succeed.
+    EXPECT_TRUE(mClient1->addClientUid(1, IMediaTranscodingService::USE_CALLING_UID));
+    EXPECT_TRUE(mClient1->getClientUids(1, &clientUids));
+    EXPECT_EQ(clientUids.size(), 1u);
+    EXPECT_EQ(clientUids[0], (int32_t)ownUid);
+
+    // Adding mClientUid should succeed.
+    EXPECT_TRUE(mClient1->addClientUid(1, mClient1->mClientUid));
+    EXPECT_TRUE(mClient1->getClientUids(1, &clientUids));
+    std::unordered_set<uid_t> uidSet;
+    uidSet.insert(clientUids.begin(), clientUids.end());
+    EXPECT_EQ(uidSet.size(), 2u);
+    EXPECT_EQ(uidSet.count(ownUid), 1u);
+    EXPECT_EQ(uidSet.count(mClient1->mClientUid), 1u);
+
+    unregisterMultipleClients();
+}
+
 TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelWithOfflineSessions) {
     registerMultipleClients();
 
@@ -354,6 +427,53 @@
     ALOGD("TestTranscodingUidPolicy finished.");
 }
 
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicyWithMultipleClientUids) {
+    ALOGD("TestTranscodingUidPolicyWithMultipleClientUids starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit 3 requests.
+    ALOGD("Submitting session to client1 (app A)...");
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+    // mClient1's Session 0 should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Add client2 (app B)'s uid to mClient1's session 1.
+    EXPECT_TRUE(mClient1->addClientUid(1, mClient2->mClientUid));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // mClient1's session 0 should pause, session 1 should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    ALOGD("Moving app A back to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestTranscodingUidPolicyWithMultipleClientUids finished.");
+}
+
 TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingThermalPolicy) {
     ALOGD("TestTranscodingThermalPolicy starting...");
 
@@ -385,5 +505,64 @@
 
     ALOGD("TestTranscodingThermalPolicy finished.");
 }
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingWatchdog) {
+    ALOGD("TestTranscodingWatchdog starting...");
+
+    registerMultipleClients();
+
+    // SimulatedTranscoder itself does not send heartbeat. Its sessions last 1sec
+    // by default, so timeout will not happen normally.
+    // Here we run a session of 4000ms with TranscodingTestConfig. This will trigger
+    // a watchdog timeout on server side. We use it to check that error code is correct.
+    EXPECT_TRUE(mClient1->submit(
+            0, "test_source_file_0", "test_destination_file_0", TranscodingSessionPriority::kNormal,
+            -1 /*bitrateBps*/, -1 /*overridePid*/, -1 /*overrideUid*/, 4000 /*sessionDurationMs*/));
+    EXPECT_EQ(mClient1->pop(100000), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kWatchdogTimeoutUs - 100000), EventTracker::NoEvent);
+    EXPECT_EQ(mClient1->pop(200000), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kWatchdogTimeout);
+
+    // After the timeout, submit another request and check it's finished.
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingWatchdog finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverCountQuotaOnly) {
+    ALOGD("TestTranscodingPacerOverCountQuotaOnly starting...");
+
+    registerMultipleClients();
+    testPacerHelper(12 /*numSubmits*/, 100 /*sessionDurationMs*/, 12 /*expectedSuccess*/);
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingPacerOverCountQuotaOnly finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverTimeQuotaOnly) {
+    ALOGD("TestTranscodingPacerOverTimeQuotaOnly starting...");
+
+    registerMultipleClients();
+    testPacerHelper(5 /*numSubmits*/, 1000 /*sessionDurationMs*/, 5 /*expectedSuccess*/);
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingPacerOverTimeQuotaOnly finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverQuota) {
+    ALOGD("TestTranscodingPacerOverQuota starting...");
+
+    registerMultipleClients();
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+    unregisterMultipleClients();
+
+    // Idle to clear out burst history. Since we expect it to actually fail, wait for cooldown.
+    ALOGD("TestTranscodingPacerOverQuota finished.");
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index 407f6d5..3224cfc 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -24,6 +24,7 @@
 #include <mutex>
 #include <sstream>
 #include <utility/AAudioUtilities.h>
+#include <media/AidlConversion.h>
 
 #include "AAudioClientTracker.h"
 #include "AAudioEndpointManager.h"
@@ -182,7 +183,9 @@
             // and START calls. This will help preserve app compatibility.
             // An app can avoid having this happen by closing their streams when
             // the app is paused.
-            AAudioClientTracker::getInstance().setExclusiveEnabled(request.getProcessId(), false);
+            pid_t pid = VALUE_OR_FATAL(
+                aidl2legacy_int32_t_pid_t(request.getIdentity().pid));
+            AAudioClientTracker::getInstance().setExclusiveEnabled(pid, false);
             endpointToSteal = endpoint; // return it to caller
         }
         return nullptr;
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 69e58f6..0b69bf6 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -22,7 +22,9 @@
 #include <iostream>
 #include <sstream>
 
+#include <android/media/permission/Identity.h>
 #include <aaudio/AAudio.h>
+#include <media/AidlConversion.h>
 #include <mediautils/ServiceUtilities.h>
 #include <utils/String16.h>
 
@@ -37,18 +39,26 @@
 using namespace aaudio;
 
 #define MAX_STREAMS_PER_PROCESS   8
-#define AIDL_RETURN(x) *_aidl_return = (x); return Status::ok();
+#define AIDL_RETURN(x) { *_aidl_return = (x); return Status::ok(); }
 
+#define VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(x) \
+    ({ auto _tmp = (x); \
+       if (!_tmp.ok()) AIDL_RETURN(AAUDIO_ERROR_ILLEGAL_ARGUMENT); \
+       std::move(_tmp.value()); })
 
 using android::AAudioService;
+using android::media::permission::Identity;
 using binder::Status;
 
 android::AAudioService::AAudioService()
     : BnAAudioService(),
       mAdapter(this) {
-    mAudioClient.clientUid = getuid();   // TODO consider using geteuid()
-    mAudioClient.clientPid = getpid();
-    mAudioClient.packageName = String16("");
+    // TODO consider using geteuid()
+    // TODO b/182392769: use identity util
+    mAudioClient.identity.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    mAudioClient.identity.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    mAudioClient.identity.packageName = std::nullopt;
+    mAudioClient.identity.attributionTag = std::nullopt;
     AAudioClientTracker::getInstance().setAAudioService(this);
 }
 
@@ -105,8 +115,13 @@
     aaudio_sharing_mode_t sharingMode = configurationInput.getSharingMode();
 
     // Enforce limit on client processes.
-    pid_t pid = request.getProcessId();
-    if (pid != mAudioClient.clientPid) {
+    Identity callingIdentity = request.getIdentity();
+    pid_t pid = IPCThreadState::self()->getCallingPid();
+    callingIdentity.pid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
+        legacy2aidl_pid_t_int32_t(pid));
+    callingIdentity.uid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    if (callingIdentity.pid != mAudioClient.identity.pid) {
         int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
         if (count >= MAX_STREAMS_PER_PROCESS) {
             ALOGE("openStream(): exceeded max streams per process %d >= %d",
@@ -121,7 +136,7 @@
     }
 
     if (sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE
-        && AAudioClientTracker::getInstance().isExclusiveEnabled(request.getProcessId())) {
+        && AAudioClientTracker::getInstance().isExclusiveEnabled(pid)) {
         // only trust audioserver for in service indication
         bool inService = false;
         if (isCallerInService()) {
@@ -154,7 +169,6 @@
     } else {
         aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
         serviceStream->setHandle(handle);
-        pid_t pid = request.getProcessId();
         AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
         paramsOut.copyFrom(*serviceStream);
         *_paramsOut = std::move(paramsOut).parcelable();
@@ -266,8 +280,10 @@
 }
 
 bool AAudioService::isCallerInService() {
-    return mAudioClient.clientPid == IPCThreadState::self()->getCallingPid() &&
-        mAudioClient.clientUid == IPCThreadState::self()->getCallingUid();
+    pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAudioClient.identity.pid));
+    uid_t clientUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAudioClient.identity.uid));
+    return clientPid == IPCThreadState::self()->getCallingPid() &&
+        clientUid == IPCThreadState::self()->getCallingUid();
 }
 
 aaudio_result_t AAudioService::closeStream(sp<AAudioServiceStreamBase> serviceStream) {
@@ -290,9 +306,11 @@
         // Only allow owner or the aaudio service to access the stream.
         const uid_t callingUserId = IPCThreadState::self()->getCallingUid();
         const uid_t ownerUserId = serviceStream->getOwnerUserId();
+        const uid_t clientUid = VALUE_OR_FATAL(
+            aidl2legacy_int32_t_uid_t(mAudioClient.identity.uid));
         bool callerOwnsIt = callingUserId == ownerUserId;
-        bool serverCalling = callingUserId == mAudioClient.clientUid;
-        bool serverOwnsIt = ownerUserId == mAudioClient.clientUid;
+        bool serverCalling = callingUserId == clientUid;
+        bool serverOwnsIt = ownerUserId == clientUid;
         bool allowed = callerOwnsIt || serverCalling || serverOwnsIt;
         if (!allowed) {
             ALOGE("AAudioService: calling uid %d cannot access stream 0x%08X owned by %d",
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 85b2057..556710d 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -73,9 +73,12 @@
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAUDIO_OK;
     copyFrom(request.getConstantConfiguration());
-    mMmapClient.clientUid = request.getUserId();
-    mMmapClient.clientPid = request.getProcessId();
-    mMmapClient.packageName.setTo(String16(""));
+    mMmapClient.identity = request.getIdentity();
+    // TODO b/182392769: use identity util
+    mMmapClient.identity.uid = VALUE_OR_FATAL(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    mMmapClient.identity.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     audio_format_t audioFormat = getFormat();
 
@@ -159,8 +162,8 @@
                                                           this, // callback
                                                           mMmapStream,
                                                           &mPortHandle);
-    ALOGD("%s() mMapClient.uid = %d, pid = %d => portHandle = %d\n",
-          __func__, mMmapClient.clientUid,  mMmapClient.clientPid, mPortHandle);
+    ALOGD("%s() mMapClient.identity = %s => portHandle = %d\n",
+          __func__, mMmapClient.identity.toString().c_str(), mPortHandle);
     if (status != OK) {
         // This can happen if the resource is busy or the config does
         // not match the hardware.
@@ -208,8 +211,9 @@
     setBufferCapacity(mMmapBufferinfo.buffer_size_frames);
     if (!isBufferShareable) {
         // Exclusive mode can only be used by the service because the FD cannot be shared.
-        uid_t audioServiceUid = getuid();
-        if ((mMmapClient.clientUid != audioServiceUid) &&
+        int32_t audioServiceUid =
+            VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+        if ((mMmapClient.identity.uid != audioServiceUid) &&
             getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
             ALOGW("%s() - exclusive FD cannot be used by client", __func__);
             result = AAUDIO_ERROR_UNAVAILABLE;
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 501e8c0..0d453cf 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -111,7 +111,7 @@
     if (!endpoint->isConnected()) {
         ALOGD("%s() call safeReleaseCloseFromCallback()", __func__);
         // Release and close under a lock with no check for callback collisions.
-        endpoint->getStreamInternal()->safeReleaseCloseFromCallback();
+        endpoint->getStreamInternal()->safeReleaseCloseInternal();
     }
 
     return result;
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 7edc25c..8638f36 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -39,6 +39,8 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
+using media::permission::Identity;
+
 /**
  * Base class for streams in the service.
  * @return
@@ -48,9 +50,7 @@
         : mTimestampThread("AATime")
         , mAtomicStreamTimestamp()
         , mAudioService(audioService) {
-    mMmapClient.clientUid = -1;
-    mMmapClient.clientPid = -1;
-    mMmapClient.packageName = String16("");
+    mMmapClient.identity = Identity();
 }
 
 AAudioServiceStreamBase::~AAudioServiceStreamBase() {
@@ -82,7 +82,7 @@
 
     result << "    0x" << std::setfill('0') << std::setw(8) << std::hex << mHandle
            << std::dec << std::setfill(' ') ;
-    result << std::setw(6) << mMmapClient.clientUid;
+    result << std::setw(6) << mMmapClient.identity.uid;
     result << std::setw(7) << mClientHandle;
     result << std::setw(4) << (isRunning() ? "yes" : " no");
     result << std::setw(6) << getState();
@@ -128,9 +128,12 @@
     AAudioEndpointManager &mEndpointManager = AAudioEndpointManager::getInstance();
     aaudio_result_t result = AAUDIO_OK;
 
-    mMmapClient.clientUid = request.getUserId();
-    mMmapClient.clientPid = request.getProcessId();
-    mMmapClient.packageName.setTo(String16("")); // TODO What should we do here?
+    mMmapClient.identity = request.getIdentity();
+    // TODO b/182392769: use identity util
+    mMmapClient.identity.uid = VALUE_OR_FATAL(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    mMmapClient.identity.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     // Limit scope of lock to avoid recursive lock in close().
     {
@@ -288,10 +291,6 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp();
-
     result = stopTimestampThread();
     if (result != AAUDIO_OK) {
         disconnect_l();
@@ -337,9 +336,6 @@
 
     setState(AAUDIO_STREAM_STATE_STOPPING);
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp(); // warning - this calls a virtual function
     result = stopTimestampThread();
     if (result != AAUDIO_OK) {
         disconnect_l();
@@ -405,10 +401,11 @@
     timestampScheduler.start(AudioClock::getNanoseconds());
     int64_t nextTime = timestampScheduler.nextAbsoluteTime();
     int32_t loopCount = 0;
+    aaudio_result_t result = AAUDIO_OK;
     while(mThreadEnabled.load()) {
         loopCount++;
         if (AudioClock::getNanoseconds() >= nextTime) {
-            aaudio_result_t result = sendCurrentTimestamp();
+            result = sendCurrentTimestamp();
             if (result != AAUDIO_OK) {
                 ALOGE("%s() timestamp thread got result = %d", __func__, result);
                 break;
@@ -420,6 +417,11 @@
             AudioClock::sleepUntilNanoTime(nextTime);
         }
     }
+    // This was moved from the calls in stop_l() and pause_l(), which could cause a deadlock
+    // if it resulted in a call to disconnect.
+    if (result == AAUDIO_OK) {
+        (void) sendCurrentTimestamp();
+    }
     ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< TIMESTAMPS",
           __func__, getTypeText(), loopCount);
 }
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 0f752b7..8e5c8ef 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -21,6 +21,7 @@
 #include <mutex>
 
 #include <android-base/thread_annotations.h>
+#include <media/AidlConversion.h>
 #include <media/AudioClient.h>
 #include <utils/RefBase.h>
 
@@ -79,7 +80,7 @@
     // because we had to wait until we generated the handle.
     void logOpen(aaudio_handle_t streamHandle);
 
-    aaudio_result_t close();
+    aaudio_result_t close() EXCLUDES(mLock);
 
     /**
      * Start the flow of audio data.
@@ -87,7 +88,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
      */
-    aaudio_result_t start();
+    aaudio_result_t start() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data so that start() can resume without loss of data.
@@ -95,7 +96,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
     */
-    aaudio_result_t pause();
+    aaudio_result_t pause() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data after the currently queued data has finished playing.
@@ -104,14 +105,14 @@
      * An AAUDIO_SERVICE_EVENT_STOPPED will be sent to the client when complete.
      *
      */
-    aaudio_result_t stop();
+    aaudio_result_t stop() EXCLUDES(mLock);
 
     /**
      * Discard any data held by the underlying HAL or Service.
      *
      * An AAUDIO_SERVICE_EVENT_FLUSHED will be sent to the client when complete.
      */
-    aaudio_result_t flush();
+    aaudio_result_t flush() EXCLUDES(mLock);
 
     virtual aaudio_result_t startClient(const android::AudioClient& client,
                                         const audio_attributes_t *attr __unused,
@@ -125,9 +126,9 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority);
+    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority) EXCLUDES(mLock);
 
-    aaudio_result_t unregisterAudioThread(pid_t clientThreadId);
+    aaudio_result_t unregisterAudioThread(pid_t clientThreadId) EXCLUDES(mLock);
 
     bool isRunning() const {
         return mState == AAUDIO_STREAM_STATE_STARTED;
@@ -136,7 +137,7 @@
     /**
      * Fill in a parcelable description of stream.
      */
-    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable);
+    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable) EXCLUDES(mLock);
 
     void setRegisteredThread(pid_t pid) {
         mRegisteredClientThread = pid;
@@ -152,18 +153,18 @@
 
     void run() override; // to implement Runnable
 
-    void disconnect();
+    void disconnect() EXCLUDES(mLock);
 
     const android::AudioClient &getAudioClient() {
         return mMmapClient;
     }
 
     uid_t getOwnerUserId() const {
-        return mMmapClient.clientUid;
+        return VALUE_OR_FATAL(android::aidl2legacy_int32_t_uid_t(mMmapClient.identity.uid));
     }
 
     pid_t getOwnerProcessId() const {
-        return mMmapClient.clientPid;
+        return VALUE_OR_FATAL(android::aidl2legacy_int32_t_pid_t(mMmapClient.identity.pid));
     }
 
     aaudio_handle_t getHandle() const {
@@ -247,7 +248,7 @@
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
 
-    aaudio_result_t sendCurrentTimestamp();
+    aaudio_result_t sendCurrentTimestamp() EXCLUDES(mLock);
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 6ba1725..667465a 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -73,7 +73,8 @@
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames,
+            int64_t *timeNanos) EXCLUDES(mLock) override;
 
     aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 21f3247..a419dd5 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -66,10 +66,13 @@
         "liblog",
         "libutils",
         "aaudio-aidl-cpp",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     export_shared_lib_headers: [
         "libaaudio_internal",
+        "media_permission-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
new file mode 100644
index 0000000..f4e8a81
--- /dev/null
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -0,0 +1,72 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "oboeservice_fuzzer",
+    srcs: [
+        "oboeservice_fuzzer.cpp",
+    ],
+    shared_libs: [
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioflinger",
+        "libaudioutils",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "aaudio-aidl-cpp",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+    ],
+    static_libs: [
+        "libaaudioservice",
+    ],
+    include_dirs: [
+        "frameworks/av/services/oboeservice",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wno-unused-parameter",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/oboeservice/fuzzer/README.md b/services/oboeservice/fuzzer/README.md
new file mode 100644
index 0000000..00b85df
--- /dev/null
+++ b/services/oboeservice/fuzzer/README.md
@@ -0,0 +1,65 @@
+# Fuzzer for libaaudioservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libaaudioservice is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AAudio Service request contains the following parameters:
+1. AAudioFormat
+2. UserId
+3. ProcessId
+4. InService
+5. DeviceId
+6. SampleRate
+7. SamplesPerFrame
+8. Direction
+9. SharingMode
+10. Usage
+11. ContentType
+12. InputPreset
+13. BufferCapacity
+
+| Parameter| Valid Input Values| Configured Value|
+|------------- |-------------| ----- |
+| `AAudioFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `UserId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
+| `ProcessId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
+| `InService`   | `bool` | Value obtained from FuzzedDataProvider |
+| `DeviceId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SampleRate`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SamplesPerFrame` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build oboeservice_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) oboeservice_fuzzer
+```
+
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/oboeservice_fuzzer/oboeservice_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
new file mode 100644
index 0000000..8e508d3
--- /dev/null
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -0,0 +1,369 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <stdio.h>
+
+#include <AAudioService.h>
+#include <aaudio/AAudio.h>
+#include "aaudio/BnAAudioClient.h"
+#include <android/media/permission/Identity.h>
+
+#define UNUSED_PARAM __attribute__((unused))
+
+using namespace android;
+using namespace aaudio;
+
+aaudio_format_t kAAudioFormats[] = {
+    AAUDIO_FORMAT_UNSPECIFIED,
+    AAUDIO_FORMAT_PCM_I16,
+    AAUDIO_FORMAT_PCM_FLOAT,
+};
+
+aaudio_usage_t kAAudioUsages[] = {
+    AAUDIO_USAGE_MEDIA,
+    AAUDIO_USAGE_VOICE_COMMUNICATION,
+    AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    AAUDIO_USAGE_ALARM,
+    AAUDIO_USAGE_NOTIFICATION,
+    AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+    AAUDIO_USAGE_NOTIFICATION_EVENT,
+    AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+    AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+    AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    AAUDIO_USAGE_GAME,
+    AAUDIO_USAGE_ASSISTANT,
+    AAUDIO_SYSTEM_USAGE_EMERGENCY,
+    AAUDIO_SYSTEM_USAGE_SAFETY,
+    AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+    AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT,
+};
+
+aaudio_content_type_t kAAudioContentTypes[] = {
+    AAUDIO_CONTENT_TYPE_SPEECH,
+    AAUDIO_CONTENT_TYPE_MUSIC,
+    AAUDIO_CONTENT_TYPE_MOVIE,
+    AAUDIO_CONTENT_TYPE_SONIFICATION,
+};
+
+aaudio_input_preset_t kAAudioInputPresets[] = {
+    AAUDIO_INPUT_PRESET_GENERIC,           AAUDIO_INPUT_PRESET_CAMCORDER,
+    AAUDIO_INPUT_PRESET_VOICE_RECOGNITION, AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+    AAUDIO_INPUT_PRESET_UNPROCESSED,       AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE,
+};
+
+const size_t kNumAAudioFormats = std::size(kAAudioFormats);
+const size_t kNumAAudioUsages = std::size(kAAudioUsages);
+const size_t kNumAAudioContentTypes = std::size(kAAudioContentTypes);
+const size_t kNumAAudioInputPresets = std::size(kAAudioInputPresets);
+
+class FuzzAAudioClient : public virtual RefBase, public AAudioServiceInterface {
+   public:
+    FuzzAAudioClient(sp<AAudioService> service);
+
+    virtual ~FuzzAAudioClient();
+
+    AAudioServiceInterface *getAAudioService();
+
+    void dropAAudioService();
+
+    void registerClient(const sp<IAAudioClient> &client UNUSED_PARAM) override {}
+
+    aaudio_handle_t openStream(const AAudioStreamRequest &request,
+                               AAudioStreamConfiguration &configurationOutput) override;
+
+    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+                                         AudioEndpointParcelable &parcelable) override;
+
+    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle, pid_t clientThreadId,
+                                        int64_t periodNanoseconds) override;
+
+    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+                                          pid_t clientThreadId) override;
+
+    aaudio_result_t startClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+                                const AudioClient &client UNUSED_PARAM,
+                                const audio_attributes_t *attr UNUSED_PARAM,
+                                audio_port_handle_t *clientHandle UNUSED_PARAM) override {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    aaudio_result_t stopClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+                               audio_port_handle_t clientHandle UNUSED_PARAM) override {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {}
+
+    int getDeathCount() { return mDeathCount; }
+
+    void incDeathCount() { ++mDeathCount; }
+
+    class AAudioClient : public IBinder::DeathRecipient, public BnAAudioClient {
+       public:
+        AAudioClient(wp<FuzzAAudioClient> fuzzAAudioClient) : mBinderClient(fuzzAAudioClient) {}
+
+        virtual void binderDied(const wp<IBinder> &who UNUSED_PARAM) {
+            sp<FuzzAAudioClient> client = mBinderClient.promote();
+            if (client.get()) {
+                client->dropAAudioService();
+                client->incDeathCount();
+            }
+        }
+
+        android::binder::Status onStreamChange(int32_t handle, int32_t opcode, int32_t value) {
+            static_assert(std::is_same_v<aaudio_handle_t, int32_t>);
+            android::sp<FuzzAAudioClient> client = mBinderClient.promote();
+            if (client.get() != nullptr) {
+                client->onStreamChange(handle, opcode, value);
+            }
+            return android::binder::Status::ok();
+        }
+
+       private:
+        wp<FuzzAAudioClient> mBinderClient;
+    };
+
+   private:
+    sp<AAudioService> mAAudioService;
+    sp<AAudioClient> mAAudioClient;
+    AAudioServiceInterface *mAAudioServiceInterface;
+    int mDeathCount;
+};
+
+FuzzAAudioClient::FuzzAAudioClient(sp<AAudioService> service) : AAudioServiceInterface() {
+    mAAudioService = service;
+    mAAudioServiceInterface = &service->asAAudioServiceInterface();
+    mAAudioClient = new AAudioClient(this);
+    mDeathCount = 0;
+    if (mAAudioClient.get() && mAAudioService.get()) {
+        mAAudioService->linkToDeath(mAAudioClient);
+        mAAudioService->registerClient(mAAudioClient);
+    }
+}
+
+FuzzAAudioClient::~FuzzAAudioClient() { dropAAudioService(); }
+
+AAudioServiceInterface *FuzzAAudioClient::getAAudioService() {
+    if (!mAAudioServiceInterface && mAAudioService.get()) {
+        mAAudioServiceInterface = &mAAudioService->asAAudioServiceInterface();
+    }
+    return mAAudioServiceInterface;
+}
+
+void FuzzAAudioClient::dropAAudioService() {
+    mAAudioService.clear();
+}
+
+aaudio_handle_t FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
+                                             AAudioStreamConfiguration &configurationOutput) {
+    aaudio_handle_t stream;
+    for (int i = 0; i < 2; ++i) {
+        AAudioServiceInterface *service = getAAudioService();
+        if (!service) {
+            return AAUDIO_ERROR_NO_SERVICE;
+        }
+
+        stream = service->openStream(request, configurationOutput);
+
+        if (stream == AAUDIO_ERROR_NO_SERVICE) {
+            dropAAudioService();
+        } else {
+            break;
+        }
+    }
+    return stream;
+}
+
+aaudio_result_t FuzzAAudioClient::closeStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->closeStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::getStreamDescription(aaudio_handle_t streamHandle,
+                                                       AudioEndpointParcelable &parcelable) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->getStreamDescription(streamHandle, parcelable);
+}
+
+aaudio_result_t FuzzAAudioClient::startStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->startStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::pauseStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->pauseStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::stopStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->stopStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::flushStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->flushStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::registerAudioThread(aaudio_handle_t streamHandle,
+                                                      pid_t clientThreadId,
+                                                      int64_t periodNanoseconds) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+}
+
+aaudio_result_t FuzzAAudioClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+                                                        pid_t clientThreadId) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->unregisterAudioThread(streamHandle, clientThreadId);
+}
+
+class OboeserviceFuzzer {
+   public:
+    OboeserviceFuzzer();
+    ~OboeserviceFuzzer() = default;
+    void process(const uint8_t *data, size_t size);
+
+   private:
+    sp<FuzzAAudioClient> mClient;
+};
+
+OboeserviceFuzzer::OboeserviceFuzzer() {
+    sp<AAudioService> service = new AAudioService();
+    mClient = new FuzzAAudioClient(service);
+}
+
+void OboeserviceFuzzer::process(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    AAudioStreamRequest request;
+    AAudioStreamConfiguration configurationOutput;
+
+    // Initialize stream request
+    request.getConfiguration().setFormat((audio_format_t)(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
+
+    // TODO b/182392769: use identity util
+    media::permission::Identity identity;
+    identity.uid = getuid();
+    identity.pid = getpid();
+    request.setIdentity(identity);
+    request.setInService(fdp.ConsumeBool());
+
+    request.getConfiguration().setDeviceId(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setSampleRate(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setDirection(
+        fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+                          : (fdp.ConsumeBool() ? AAUDIO_DIRECTION_OUTPUT : AAUDIO_DIRECTION_INPUT));
+    request.getConfiguration().setSharingMode(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : (fdp.ConsumeBool() ? AAUDIO_SHARING_MODE_EXCLUSIVE : AAUDIO_SHARING_MODE_SHARED));
+
+    request.getConfiguration().setUsage(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : kAAudioUsages[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioUsages - 1)]);
+    request.getConfiguration().setContentType(
+        fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+                          : kAAudioContentTypes[fdp.ConsumeIntegralInRange<int32_t>(
+                                0, kNumAAudioContentTypes - 1)]);
+    request.getConfiguration().setInputPreset(
+        fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+                          : kAAudioInputPresets[fdp.ConsumeIntegralInRange<int32_t>(
+                                0, kNumAAudioInputPresets - 1)]);
+    request.getConfiguration().setPrivacySensitive(fdp.ConsumeBool());
+
+    request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
+
+    aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
+    if (stream < 0) {
+        // invalid request, stream not opened.
+        return;
+    }
+    while (fdp.remaining_bytes()) {
+        AudioEndpointParcelable audioEndpointParcelable;
+        int action = fdp.ConsumeIntegralInRange<int32_t>(0, 4);
+        switch (action) {
+            case 0:
+                mClient->getStreamDescription(stream, audioEndpointParcelable);
+                break;
+            case 1:
+                mClient->startStream(stream);
+                break;
+            case 2:
+                mClient->pauseStream(stream);
+                break;
+            case 3:
+                mClient->stopStream(stream);
+                break;
+            case 4:
+                mClient->flushStream(stream);
+                break;
+        }
+    }
+    mClient->closeStream(stream);
+    assert(mClient->getDeathCount() == 0);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    OboeserviceFuzzer oboeserviceFuzzer;
+    oboeserviceFuzzer.process(data, size);
+    return 0;
+}
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index caa3474..df2b4a3 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 filegroup {
     name: "tv_tuner_aidl",
     srcs: [
@@ -72,6 +81,7 @@
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
         "libbase",
+        "libbinder",
         "libbinder_ndk",
         "libcutils",
         "libfmq",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index ba8d6a7..1122368 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -98,7 +98,7 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filterSp, cbSp, type, subType);
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filterSp, type, subType);
     return Status::ok();
 }
 
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index 16338db..bdf826c 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -67,8 +67,9 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->addPid(getHidlDemuxPid(pid),
-            static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter());
+    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
+            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
+    Result res = mDescrambler->addPid(getHidlDemuxPid(pid), halFilter);
     if (res != Result::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
@@ -82,8 +83,9 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->removePid(getHidlDemuxPid(pid),
-            static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter());
+    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
+            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
+    Result res = mDescrambler->removePid(getHidlDemuxPid(pid), halFilter);
     if (res != Result::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index dc9d246..d3d138d 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -39,10 +39,9 @@
 using namespace std;
 
 TunerFilter::TunerFilter(
-        sp<IFilter> filter, sp<IFilterCallback> callback, int mainType, int subType) {
+        sp<IFilter> filter, int mainType, int subType) {
     mFilter = filter;
     mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
-    mFilterCallback = callback;
     mMainType = mainType;
     mSubType = subType;
 }
@@ -50,7 +49,6 @@
 TunerFilter::~TunerFilter() {
     mFilter = nullptr;
     mFilter_1_1 = nullptr;
-    mFilterCallback = nullptr;
 }
 
 Status TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
@@ -473,7 +471,7 @@
         res = r;
         if (res == Result::SUCCESS) {
             TunerFilterSharedHandleInfo info{
-                .handle = dupToAidl(hidl_handle(avMemory.getNativeHandle())),
+                .handle = dupToAidl(avMemory),
                 .size = static_cast<int64_t>(avMemSize),
             };
             *_aidl_return = move(info);
@@ -482,7 +480,10 @@
         }
     });
 
-    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
 }
 
 Status TunerFilter::releaseAvHandle(
@@ -499,7 +500,6 @@
     return Status::ok();
 }
 
-
 Status TunerFilter::start() {
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index d12b7ac..ff4728c 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -92,7 +92,7 @@
 class TunerFilter : public BnTunerFilter {
 
 public:
-    TunerFilter(sp<IFilter> filter, sp<IFilterCallback> callback, int mainType, int subTyp);
+    TunerFilter(sp<IFilter> filter, int mainType, int subTyp);
     virtual ~TunerFilter();
     Status getId(int32_t* _aidl_return) override;
     Status getId64Bit(int64_t* _aidl_return) override;
@@ -181,7 +181,6 @@
 
     sp<IFilter> mFilter;
     sp<::android::hardware::tv::tuner::V1_1::IFilter> mFilter_1_1;
-    sp<IFilterCallback> mFilterCallback;
     int32_t mId;
     int64_t mId64Bit;
     int mMainType;
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 4a5acf5..77248d4 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -48,7 +48,10 @@
 
     sp<ILnbCallback> lnbCallback = new LnbCallback(tunerLnbCallback);
     Result status = mLnb->setCallback(lnbCallback);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setVoltage(int voltage) {
@@ -58,7 +61,10 @@
     }
 
     Result status = mLnb->setVoltage(static_cast<LnbVoltage>(voltage));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setTone(int tone) {
@@ -68,7 +74,10 @@
     }
 
     Result status = mLnb->setTone(static_cast<LnbTone>(tone));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setSatellitePosition(int position) {
@@ -78,7 +87,10 @@
     }
 
     Result status = mLnb->setSatellitePosition(static_cast<LnbPosition>(position));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) {
@@ -88,7 +100,10 @@
     }
 
     Result status = mLnb->sendDiseqcMessage(diseqcMessage);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::close() {
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index b80fd85..77e1c40 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "TunerService"
 
 #include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
+#include <binder/IServiceManager.h>
 #include <utils/Log.h>
 #include "TunerService.h"
 #include "TunerFrontend.h"
@@ -49,7 +51,39 @@
 
 namespace android {
 
-TunerService::TunerService() {}
+TunerService::TunerService() {
+    sp<IServiceManager> serviceMgr = defaultServiceManager();
+    sp<content::pm::IPackageManagerNative> packageMgr;
+    if (serviceMgr.get() == nullptr) {
+        ALOGE("%s: Cannot find service manager", __func__);
+        return;
+    } else {
+        sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+        packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+    }
+
+    bool hasFeature = false;
+    if (packageMgr != nullptr) {
+        binder::Status status = packageMgr->hasSystemFeature(FEATURE_TUNER, 0, &hasFeature);
+        if (!status.isOk()) {
+            ALOGE("%s: hasSystemFeature failed: %s",
+                    __func__, status.exceptionMessage().c_str());
+            return;
+        }
+        if (!hasFeature) {
+            ALOGD("Current device does not support tuner feaure.");
+            return;
+        }
+    } else {
+        ALOGD("%s: Cannot find package manager.", __func__);
+        return;
+    }
+
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
+    mTunerResourceManager = ITunerResourceManager::fromBinder(binder);
+    updateTunerResources();
+}
+
 TunerService::~TunerService() {}
 
 binder_status_t TunerService::instantiate() {
@@ -282,19 +316,15 @@
     return Status::ok();
 }
 
-Status TunerService::updateTunerResources() {
-    if (!hasITuner()) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+void TunerService::updateTunerResources() {
+    if (!hasITuner() || mTunerResourceManager == NULL) {
+        ALOGE("Failed to updateTunerResources");
+        return;
     }
 
-    // Connect with Tuner Resource Manager.
-    ::ndk::SpAIBinder binder(AServiceManager_getService("tv_tuner_resource_mgr"));
-    mTunerResourceManager = ITunerResourceManager::fromBinder(binder);
-
     updateFrontendResources();
     updateLnbResources();
     // TODO: update Demux, Descrambler.
-    return Status::ok();
 }
 
 Status TunerService::getTunerHalVersion(int* _aidl_return) {
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index cc65b39..f8e2ee6 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -72,6 +72,8 @@
 const static int TUNER_HAL_VERSION_UNKNOWN = 0;
 const static int TUNER_HAL_VERSION_1_0 = 1 << 16;
 const static int TUNER_HAL_VERSION_1_1 = (1 << 16) | 1;
+// System Feature defined in PackageManager
+static const ::android::String16 FEATURE_TUNER(::android::String16("android.hardware.tv.tuner"));
 
 typedef enum {
     FRONTEND,
@@ -113,7 +115,6 @@
     Status getDemuxCaps(TunerDemuxCapabilities* _aidl_return) override;
     Status openDescrambler(int32_t descramblerHandle,
             std::shared_ptr<ITunerDescrambler>* _aidl_return) override;
-    Status updateTunerResources() override;
     Status getTunerHalVersion(int* _aidl_return) override;
 
     // TODO: create a map between resource id and handles.
@@ -131,6 +132,7 @@
 private:
     bool hasITuner();
     bool hasITuner_1_1();
+    void updateTunerResources();
 
     void updateFrontendResources();
     void updateLnbResources();
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 25e1ad9..ea9da30 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -38,7 +38,10 @@
     }
 
     Result status = mTimeFilter->setTimeStamp(timeStamp);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerTimeFilter::clearTimeStamp() {
@@ -48,7 +51,10 @@
     }
 
     Result status = mTimeFilter->clearTimeStamp();
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
@@ -66,8 +72,9 @@
             });
     if (status != Result::SUCCESS) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return Status::ok();
 }
 
 Status TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
@@ -85,8 +92,9 @@
             });
     if (status != Result::SUCCESS) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return Status::ok();
 }
 
 Status TunerTimeFilter::close() {
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index f1651b9..755b152 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -97,12 +97,6 @@
     ITunerDescrambler openDescrambler(in int descramblerHandle);
 
     /**
-     * Update Tuner Resources in TunerResourceManager.
-     */
-    // TODO: b/178124017 update TRM in TunerService independently.
-    void updateTunerResources();
-
-    /**
      * Get an integer that carries the Tuner HIDL version. The high 16 bits are the
      * major version number while the low 16 bits are the minor version. Default
      * value is unknown version 0.