Merge "Log metrics for aaudio stream." into sc-dev
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 8af704d..459ad15 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -174,6 +174,13 @@
     oneway void notifySystemEvent(int eventId, in int[] args);
 
     /**
+     * Notify the camera service of a display configuration change.
+     *
+     * Callers require the android.permission.CAMERA_SEND_SYSTEM_EVENTS permission.
+     */
+    oneway void notifyDisplayConfigurationChange();
+
+    /**
      * Notify the camera service of a device physical status change. May only be called from
      * a privileged process.
      *
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7387442..dab2fef 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -24,6 +24,28 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+// Enum value must match corresponding enum in ui/PublicFormat.h (which is not
+// available to VNDK)
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
+};
+
 /**
  * ACameraMetadata Implementation
  */
@@ -290,6 +312,10 @@
             format = AIMAGE_FORMAT_DEPTH_POINT_CLOUD;
         } else if (format == HAL_PIXEL_FORMAT_Y16) {
             format = AIMAGE_FORMAT_DEPTH16;
+        } else if (format == HAL_PIXEL_FORMAT_RAW16) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH);
+        } else if (format == HAL_PIXEL_FORMAT_RAW10) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH10);
         }
 
         filteredDepthStreamConfigs.push_back(format);
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 6c1cf33..2b7f040 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -61,6 +61,10 @@
  */
 typedef void (*ACameraCaptureSession_stateCallback)(void* context, ACameraCaptureSession *session);
 
+/**
+ * Capture session state callbacks used in {@link ACameraDevice_createCaptureSession} and
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters}
+ */
 typedef struct ACameraCaptureSession_stateCallbacks {
     /// optional application context.
     void*                               context;
@@ -246,6 +250,10 @@
         void* context, ACameraCaptureSession* session,
         ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
 
+/**
+ * ACaptureCaptureSession_captureCallbacks structure used in
+ * {@link ACameraCaptureSession_capture} and {@link ACameraCaptureSession_setRepeatingRequest}.
+ */
 typedef struct ACameraCaptureSession_captureCallbacks {
     /// optional application context.
     void*                                               context;
@@ -413,7 +421,10 @@
  */
 void ACameraCaptureSession_close(ACameraCaptureSession* session);
 
-struct ACameraDevice;
+/**
+ * ACameraDevice is opaque type that provides access to a camera device.
+ * A pointer can be obtained using {@link ACameraManager_openCamera} method.
+ */
 typedef struct ACameraDevice ACameraDevice;
 
 /**
@@ -591,6 +602,10 @@
 camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
         __INTRODUCED_IN(24);
 
+/**
+ * Opaque object for capture session output, use {@link ACaptureSessionOutput_create} or
+ * {@link ACaptureSessionSharedOutput_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
@@ -604,9 +619,9 @@
  *
  * <p>Native windows that get removed must not be part of any active repeating or single/burst
  * request or have any pending results. Consider updating repeating requests via
- * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * {@link ACameraCaptureSession_setRepeatingRequest} and then wait for the last frame number
  * when the sequence completes
- * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.</p>
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.</p>
  *
  * <p>Native windows that get added must not be part of any other registered ACaptureSessionOutput
  * and must be compatible. Compatible windows must have matching format, rotation and
@@ -713,7 +728,15 @@
      * Same as ACameraCaptureSession_captureCallbacks
      */
     void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}.
+     */
     ACameraCaptureSession_captureCallback_start         onCaptureStarted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
     ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
 
     /**
@@ -751,10 +774,18 @@
     ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
 
     /**
-     * Same as ACameraCaptureSession_captureCallbacks
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
      */
     ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
     ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
     ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
 } ACameraCaptureSession_logicalCamera_captureCallbacks;
 
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index f72fe8d..7be4bd3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -124,6 +124,10 @@
  */
 typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
 
+/**
+ * Applications' callbacks for camera device state changes, register with
+ * {@link ACameraManager_openCamera}.
+ */
 typedef struct ACameraDevice_StateCallbacks {
     /// optional application context.
     void*                             context;
@@ -198,6 +202,10 @@
  */
 const char* ACameraDevice_getId(const ACameraDevice* device) __INTRODUCED_IN(24);
 
+/**
+ * Capture request pre-defined template types, used in {@link ACameraDevice_createCaptureRequest}
+ * and {@link ACameraDevice_createCaptureRequest_withPhysicalIds}.
+ */
 typedef enum {
     /**
      * Create a request suitable for a camera preview window. Specifically, this
@@ -301,10 +309,12 @@
         const ACameraDevice* device, ACameraDevice_request_template templateId,
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(24);
 
-
+/**
+ * Opaque object for CaptureSessionOutput container, use
+ * {@link ACaptureSessionOutputContainer_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
 
-typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
  * Create a capture session output container.
@@ -844,7 +854,7 @@
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(29);
 
 /**
- * Check whether a particular {@ACaptureSessionOutputContainer} is supported by
+ * Check whether a particular {@link ACaptureSessionOutputContainer} is supported by
  * the camera device.
  *
  * <p>This method performs a runtime check of a given {@link
@@ -875,6 +885,7 @@
  *                                                         device.</li>
  *        <li>{@link ACAMERA_ERROR_UNSUPPORTED_OPERATION} if the query operation is not
  *                                                        supported by the camera device.</li>
+ *        </ul>
  */
 camera_status_t ACameraDevice_isSessionConfigurationSupported(
         const ACameraDevice* device,
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 9d77eb4..26db7f2 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -40,7 +40,13 @@
 
 __BEGIN_DECLS
 
+/**
+ * Camera status enum types.
+ */
 typedef enum {
+    /**
+     * Camera operation has succeeded.
+     */
     ACAMERA_OK = 0,
 
     ACAMERA_ERROR_BASE                  = -10000,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index be32b11..729182e 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -326,7 +326,7 @@
  * @see ACameraManager_registerExtendedAvailabilityCallback
  */
 typedef struct ACameraManager_ExtendedAvailabilityListener {
-    ///
+    /// Called when a camera becomes available or unavailable
     ACameraManager_AvailabilityCallbacks availabilityCallbacks;
 
     /// Called when there is camera access permission change
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 0d5e6c4..b331d50 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -256,10 +256,12 @@
 
 /**
  * Return a {@link ACameraMetadata} that references the same data as
- * {@link cameraMetadata}, which is an instance of
- * {@link android.hardware.camera2.CameraMetadata} (e.g., a
- * {@link android.hardware.camera2.CameraCharacteristics} or
- * {@link android.hardware.camera2.CaptureResult}).
+ * <a href="/reference/android/hardware/camera2/CameraMetadata">
+ *     android.hardware.camera2.CameraMetadata</a> from Java API. (e.g., a
+ * <a href="/reference/android/hardware/camera2/CameraCharacteristics">
+ *     android.hardware.camera2.CameraCharacteristics</a>
+ * or <a href="/reference/android/hardware/camera2/CaptureResult">
+ *     android.hardware.camera2.CaptureResult</a>).
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
  * after application is done using it.</p>
@@ -269,11 +271,13 @@
  * the Java metadata is garbage collected.
  *
  * @param env the JNI environment.
- * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * @param cameraMetadata the source <a href="/reference/android/hardware/camera2/CameraMetadata">
+                         android.hardware.camera2.CameraMetadata </a>from which the
  *                       returned {@link ACameraMetadata} is a view.
  *
- * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
- *         instance of {@link android.hardware.camera2.CameraMetadata}.
+ * @return a valid ACameraMetadata pointer or NULL if cameraMetadata is null or not a valid
+ *         instance of <a href="android/hardware/camera2/CameraMetadata">
+ *         android.hardware.camera2.CameraMetadata</a>.
  *
  */
 ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 70ce864..20ffd48 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -1868,7 +1868,7 @@
      * <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
      * BURST_CAPTURE must still be met.</li>
      * <li>All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
-     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES })
+     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES })
      * will have preview bokeh effect applied.</li>
      * </ul>
      * <p>When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
@@ -3502,7 +3502,7 @@
      * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom.  See
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
      * <p>For camera devices with the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -3964,7 +3964,7 @@
      * configurations which belong to this physical camera, and it will advertise and will only
      * advertise the maximum supported resolutions for a particular format.</p>
      * <p>If this camera device isn't a physical camera device constituting a logical camera,
-     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * camera, this field represents the multi-resolution input/output stream configurations of
      * default mode and max resolution modes. The sizes will be the maximum resolution of a
      * particular format for default mode and max resolution mode.</p>
@@ -4867,12 +4867,12 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
      * When operating in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
-     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability would typically perform pixel binning in order to improve low light
      * performance, noise reduction etc. However, in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
      * mode (supported only
-     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
      * The stream configurations supported in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
@@ -4905,7 +4905,7 @@
      * </ul></p>
      *
      * <p>This key will only be present in devices advertisting the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
      * RAW targets will have a regular bayer pattern.</p>
      */
@@ -5231,7 +5231,7 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
      * counterparts.
      * This key will only be present for devices which advertise the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
@@ -5263,7 +5263,7 @@
      * is, when ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      *
      * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
@@ -5291,7 +5291,7 @@
      * when ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
@@ -5321,7 +5321,7 @@
      * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
      * will have a regular bayer pattern.</p>
      * <p>This key will not be present for sensors which don't have the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      */
     ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
@@ -9264,13 +9264,13 @@
     /**
      * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
      * supported unless a camera device advertises
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
      */
     ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
 
     /**
      * <p>This sensor pixel mode is offered by devices with capability
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
      * In this mode, sensors typically do not bin pixels, as a result can offer larger
      * image sizes.</p>
      */
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index a4dc374..d83c5b3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -44,10 +44,10 @@
 
 __BEGIN_DECLS
 
-// Container for output targets
+/** Container for output targets */
 typedef struct ACameraOutputTargets ACameraOutputTargets;
 
-// Container for a single output target
+/** Container for a single output target */
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
 /**
@@ -383,10 +383,10 @@
  * Set/change a camera capture control entry with unsigned 8 bits data type for
  * a physical camera backing a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_u8, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_u8, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -413,10 +413,10 @@
  * Set/change a camera capture control entry with signed 32 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i32, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i32, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -443,10 +443,10 @@
  * Set/change a camera capture control entry with float data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_float, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_float, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -473,10 +473,10 @@
  * Set/change a camera capture control entry with signed 64 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i64, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i64, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -503,10 +503,10 @@
  * Set/change a camera capture control entry with double data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_double, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_double, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -533,10 +533,10 @@
  * Set/change a camera capture control entry with rational data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_rational, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_rational, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 988cda9..ec0b878 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -211,7 +211,7 @@
     }
 
     auto allLogs(gLogBuf.getLogs());
-    LOG2BI("framework logs size %zu; plugin logs size %zu",
+    LOG2BD("framework logs size %zu; plugin logs size %zu",
            allLogs.size(), pluginLogs.size());
     std::copy(pluginLogs.begin(), pluginLogs.end(), std::back_inserter(allLogs));
     std::sort(allLogs.begin(), allLogs.end(),
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index ea76cbb..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
         return UNKNOWN_ERROR;
     }
 
-    if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
-        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+    if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+        int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
             ALOGE("Failed to set AAC encoder parameters");
             return UNKNOWN_ERROR;
         }
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index bab651f..fc5b75d 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -454,11 +454,19 @@
 
 }  // namespace
 
+static IV_COLOR_FORMAT_T GetIvColorFormat() {
+    static IV_COLOR_FORMAT_T sColorFormat =
+        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
+        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
+        IV_YUV_420P;
+    return sColorFormat;
+}
+
 C2SoftAvcEnc::C2SoftAvcEnc(
         const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mIvVideoColorFormat(IV_YUV_420P),
+      mIvVideoColorFormat(GetIvColorFormat()),
       mAVCEncProfile(IV_PROFILE_BASE),
       mAVCEncLevel(41),
       mStarted(false),
@@ -1026,8 +1034,7 @@
     // Assume worst case output buffer size to be equal to number of bytes in input
     mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
 
-    // TODO
-    mIvVideoColorFormat = IV_YUV_420P;
+    mIvVideoColorFormat = GetIvColorFormat();
 
     ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
             height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
@@ -1325,7 +1332,6 @@
               mSize->width, input->height(), mSize->height);
         return C2_BAD_VALUE;
     }
-    ALOGV("width = %d, height = %d", input->width(), input->height());
     const C2PlanarLayout &layout = input->layout();
     uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
     uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
@@ -1362,7 +1368,8 @@
                 return C2_BAD_VALUE;
             }
 
-            if (layout.planes[layout.PLANE_Y].colInc == 1
+            if (mIvVideoColorFormat == IV_YUV_420P
+                    && layout.planes[layout.PLANE_Y].colInc == 1
                     && layout.planes[layout.PLANE_U].colInc == 1
                     && layout.planes[layout.PLANE_V].colInc == 1
                     && uStride == vStride
@@ -1370,21 +1377,61 @@
                 // I420 compatible - already set up above
                 break;
             }
+            if (mIvVideoColorFormat == IV_YUV_420SP_UV
+                    && layout.planes[layout.PLANE_Y].colInc == 1
+                    && layout.planes[layout.PLANE_U].colInc == 2
+                    && layout.planes[layout.PLANE_V].colInc == 2
+                    && uStride == vStride
+                    && yStride == vStride
+                    && uPlane + 1 == vPlane) {
+                // NV12 compatible - already set up above
+                break;
+            }
+            if (mIvVideoColorFormat == IV_YUV_420SP_VU
+                    && layout.planes[layout.PLANE_Y].colInc == 1
+                    && layout.planes[layout.PLANE_U].colInc == 2
+                    && layout.planes[layout.PLANE_V].colInc == 2
+                    && uStride == vStride
+                    && yStride == vStride
+                    && uPlane == vPlane + 1) {
+                // NV21 compatible - already set up above
+                break;
+            }
 
             // copy to I420
             yStride = width;
             uStride = vStride = yStride / 2;
             MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
             mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
-            MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+            MediaImage2 img;
+            switch (mIvVideoColorFormat) {
+                case IV_YUV_420P:
+                    img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+                    yPlane = conversionBuffer.data();
+                    uPlane = yPlane + yPlaneSize;
+                    vPlane = uPlane + yPlaneSize / 4;
+                    break;
+                case IV_YUV_420SP_VU:
+                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+                    img.mPlane[MediaImage2::U].mOffset++;
+                    img.mPlane[MediaImage2::V].mOffset--;
+                    yPlane = conversionBuffer.data();
+                    vPlane = yPlane + yPlaneSize;
+                    uPlane = vPlane + 1;
+                    break;
+                case IV_YUV_420SP_UV:
+                default:
+                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+                    yPlane = conversionBuffer.data();
+                    uPlane = yPlane + yPlaneSize;
+                    vPlane = uPlane + 1;
+                    break;
+            }
             status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
             if (err != OK) {
                 ALOGE("Buffer conversion failed: %d", err);
                 return C2_BAD_VALUE;
             }
-            yPlane = conversionBuffer.data();
-            uPlane = yPlane + yPlaneSize;
-            vPlane = uPlane + yPlaneSize / 4;
             break;
 
         }
@@ -1430,15 +1477,17 @@
             break;
         }
 
-        case IV_YUV_420SP_UV:
         case IV_YUV_420SP_VU:
+            uPlane = vPlane;
+            [[fallthrough]];
+        case IV_YUV_420SP_UV:
         default:
         {
             ps_inp_raw_buf->apv_bufs[0] = yPlane;
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
 
             ps_inp_raw_buf->au4_wd[0] = mSize->width;
-            ps_inp_raw_buf->au4_wd[1] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
 
             ps_inp_raw_buf->au4_ht[0] = mSize->height;
             ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
new file mode 120000
index 0000000..136279c
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/.clang-format
@@ -0,0 +1 @@
+../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 1f95eaf..efc5813 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -33,11 +33,11 @@
 using android::C2AllocatorIon;
 
 #include "media_c2_hidl_test_common.h"
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
-
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
 
 struct CompToURL {
     std::string mime;
@@ -46,36 +46,26 @@
 };
 
 std::vector<CompToURL> kCompToURL = {
-    {"mp4a-latm",
-     "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
-    {"mp4a-latm",
-     "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
-    {"audio/mpeg",
-     "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
-    {"audio/mpeg",
-     "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
-    {"3gpp",
-     "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
-    {"3gpp",
-     "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
-    {"amr-wb",
-     "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
-    {"amr-wb",
-     "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
-    {"vorbis",
-     "bbb_vorbis_stereo_128kbps_48000hz.vorbis", "bbb_vorbis_stereo_128kbps_48000hz.info"},
-    {"opus",
-     "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
-    {"g711-alaw",
-     "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
-    {"g711-mlaw",
-     "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
-    {"gsm",
-     "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
-    {"raw",
-     "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
-    {"flac",
-     "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
+         "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+         "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+         "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+         "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
+        {"vorbis", "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+         "bbb_vorbis_stereo_128kbps_48000hz.info"},
+        {"opus", "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
+        {"g711-alaw", "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
+        {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
+        {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
+        {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+        {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
 };
 
 class LinearBuffer : public C2Buffer {
@@ -212,9 +202,8 @@
     }
 };
 
-class Codec2AudioDecHidlTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioDecHidlTest : public Codec2AudioDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -438,10 +427,8 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecDecodeTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -452,9 +439,8 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    ;
-    bool signalEOS = !std::get<3>(GetParam()).compare("true");
+    uint32_t streamIndex = std::get<2>(GetParam());
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
     char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
@@ -771,9 +757,8 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecCsdInputTests
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2AudioDecCsdInputTests : public Codec2AudioDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -819,7 +804,7 @@
     ASSERT_EQ(eleStream.is_open(), true);
 
     bool signalEOS = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -875,16 +860,16 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2AudioDecDecodeTest,
                          testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
                          testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
@@ -893,18 +878,18 @@
     kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
     for (auto params : kTestParameters) {
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
 
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 5e8809e..562c77f 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -35,8 +35,9 @@
 
 #include "media_c2_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
+
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -170,9 +171,8 @@
     }
 };
 
-class Codec2AudioEncHidlTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioEncHidlTest : public Codec2AudioEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -246,7 +246,8 @@
         *nChannels = 1;
         *nSampleRate = 16000;
         *samplesPerFrame = 160;
-    } else return false;
+    } else
+        return false;
 
     return true;
 }
@@ -258,10 +259,8 @@
         const char* mURL;
     };
     static const CompToURL kCompToURL[] = {
-            {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"},
-            {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
-            {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"},
-            {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
+            {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"}, {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
+            {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"},    {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
             {"opus", "bbb_raw_2ch_48khz_s16le.raw"},
     };
 
@@ -363,10 +362,8 @@
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2AudioEncEncodeTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioEncEncodeTest : public Codec2AudioEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -379,9 +376,9 @@
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
     GetURLForComponent(mURL);
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<2>(GetParam());
     // Ratio w.r.t to mInputMaxBufSize
-    int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
+    int32_t inputMaxBufRatio = std::get<3>(GetParam());
 
     int32_t nChannels;
     int32_t nSampleRate;
@@ -428,8 +425,7 @@
         ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
         ASSERT_TRUE(false);
     }
-    if ((mMime.find("flac") != std::string::npos) ||
-        (mMime.find("opus") != std::string::npos) ||
+    if ((mMime.find("flac") != std::string::npos) || (mMime.find("opus") != std::string::npos) ||
         (mMime.find("mp4a-latm") != std::string::npos)) {
         ASSERT_TRUE(mCsd) << "CSD buffer missing";
     }
@@ -753,13 +749,13 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS and inputMaxBufRatio
 // inputMaxBufRatio is ratio w.r.t. to mInputMaxBufSize
 INSTANTIATE_TEST_SUITE_P(EncodeTest, Codec2AudioEncEncodeTest,
                          testing::ValuesIn(kEncodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
@@ -768,13 +764,13 @@
     kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
     for (auto params : kTestParameters) {
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "1"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 1));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "2"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 2));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "1"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 1));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "2"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index de34705..1f1681d 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -27,13 +27,13 @@
 std::string sComponentNamePrefix = "";
 
 static constexpr struct option kArgOptions[] = {
-    {"res", required_argument, 0, 'P'},
-    {"prefix", required_argument, 0, 'p'},
-    {"help", required_argument, 0, 'h'},
-    {nullptr, 0, nullptr, 0},
+        {"res", required_argument, 0, 'P'},
+        {"prefix", required_argument, 0, 'p'},
+        {"help", required_argument, 0, 'h'},
+        {nullptr, 0, nullptr, 0},
 };
 
-void printUsage(char *me) {
+void printUsage(char* me) {
     std::cerr << "VTS tests to test codec2 components \n";
     std::cerr << "Usage: " << me << " [options] \n";
     std::cerr << "\t -P,  --res:    Mandatory path to a folder that contains test resources \n";
@@ -49,17 +49,17 @@
     int option_index;
     while ((arg = getopt_long(argc, argv, ":P:p:h", kArgOptions, &option_index)) != -1) {
         switch (arg) {
-        case 'P':
-            sResourceDir = optarg;
-            break;
-        case 'p':
-            sComponentNamePrefix = optarg;
-            break;
-        case 'h':
-            printUsage(argv[0]);
-            break;
-        default:
-            break;
+            case 'P':
+                sResourceDir = optarg;
+                break;
+            case 'p':
+                sComponentNamePrefix = optarg;
+                break;
+            case 'h':
+                printUsage(argv[0]);
+                break;
+            default:
+                break;
         }
     }
 }
@@ -134,8 +134,7 @@
         for (size_t i = 0; i < updates.size(); ++i) {
             C2Param* param = updates[i].get();
             if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
-                C2StreamInitDataInfo::output* csdBuffer =
-                        (C2StreamInitDataInfo::output*)(param);
+                C2StreamInitDataInfo::output* csdBuffer = (C2StreamInitDataInfo::output*)(param);
                 size_t csdSize = csdBuffer->flexCount();
                 if (csdSize > 0) csd = true;
             } else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
@@ -160,8 +159,7 @@
             typedef std::unique_lock<std::mutex> ULock;
             ULock l(queueLock);
             workQueue.push_back(std::move(work));
-            if (!flushedIndices.empty() &&
-                (frameIndexIt != flushedIndices.end())) {
+            if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
                 flushedIndices.erase(frameIndexIt);
             }
             queueCondition.notify_all();
@@ -178,15 +176,15 @@
 }
 
 // Return all test parameters, a list of tuple of <instance, component>
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters() {
+const std::vector<TestParameters>& getTestParameters() {
     return getTestParameters(C2Component::DOMAIN_OTHER, C2Component::KIND_OTHER);
 }
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind) {
-    static std::vector<std::tuple<std::string, std::string>> parameters;
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind) {
+    static std::vector<TestParameters> parameters;
 
     auto instances = android::Codec2Client::GetServiceNames();
     for (std::string instance : instances) {
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index a2f1561..e74f247 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -40,7 +40,8 @@
 
 using namespace ::std::chrono;
 
-static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+using TestParameters = std::tuple<std::string, std::string>;
+static std::vector<TestParameters> kTestParameters;
 
 // Resource directory
 extern std::string sResourceDir;
@@ -54,6 +55,18 @@
     int64_t timestamp;
 };
 
+template <typename... T>
+static inline std::string PrintInstanceTupleNameToString(
+        const testing::TestParamInfo<std::tuple<T...>>& info) {
+    std::stringstream ss;
+    std::apply([&ss](auto&&... elems) { ((ss << elems << '_'), ...); }, info.param);
+    ss << info.index;
+    std::string param_string = ss.str();
+    auto isNotAlphaNum = [](char c) { return !std::isalnum(c); };
+    std::replace_if(param_string.begin(), param_string.end(), isNotAlphaNum, '_');
+    return param_string;
+}
+
 /*
  * Handle Callback functions onWorkDone(), onTripped(),
  * onError(), onDeath(), onFramesRendered()
@@ -114,12 +127,12 @@
 void parseArgs(int argc, char** argv);
 
 // Return all test parameters, a list of tuple of <instance, component>.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters();
+const std::vector<TestParameters>& getTestParameters();
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind);
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind);
 
 /*
  * common functions declarations
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 0648dd9..29acd33 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -53,9 +53,8 @@
     }
 
 namespace {
-
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kInputTestParameters;
+using InputTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<InputTestParameters> kInputTestParameters;
 
 // google.codec2 Component test setup
 class Codec2ComponentHidlTestBase : public ::testing::Test {
@@ -120,9 +119,8 @@
     }
 };
 
-class Codec2ComponentHidlTest
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2ComponentHidlTest : public Codec2ComponentHidlTestBase,
+                                public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -317,10 +315,8 @@
     ASSERT_EQ(err, C2_OK);
 }
 
-class Codec2ComponentInputTests
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2ComponentInputTests : public Codec2ComponentHidlTestBase,
+                                  public ::testing::WithParamInterface<InputTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -330,8 +326,8 @@
 TEST_P(Codec2ComponentInputTests, InputBufferTest) {
     description("Tests for different inputs");
 
-    uint32_t flags = std::stoul(std::get<2>(GetParam()));
-    bool isNullBuffer = !std::get<3>(GetParam()).compare("true");
+    uint32_t flags = std::get<2>(GetParam());
+    bool isNullBuffer = std::get<3>(GetParam());
     if (isNullBuffer)
         ALOGD("Testing for null input buffer with flag : %u", flags);
     else
@@ -350,11 +346,10 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_CASE_P(NonStdInputs, Codec2ComponentInputTests,
-                        testing::ValuesIn(kInputTestParameters),
-                        android::hardware::PrintInstanceTupleNameToString<>);
+                        testing::ValuesIn(kInputTestParameters), PrintInstanceTupleNameToString<>);
 }  // anonymous namespace
 
 // TODO: Add test for Invalid work,
@@ -364,18 +359,15 @@
     kTestParameters = getTestParameters();
     for (auto params : kTestParameters) {
         kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, true));
         kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "true"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_CODEC_CONFIG), "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_CODEC_CONFIG, false));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index f29da0e..d0a1c31 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -40,10 +40,11 @@
 #include "media_c2_hidl_test_common.h"
 #include "media_c2_video_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
 
 struct CompToURL {
     std::string mime;
@@ -52,43 +53,30 @@
     std::string chksum;
 };
 std::vector<CompToURL> kCompToURL = {
-    {"avc",
-     "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
-     "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
-    {"avc",
-     "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
-     "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
-    {"hevc",
-     "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
-     "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
-    {"hevc",
-     "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
-     "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
-    {"mpeg2",
-     "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info", ""},
-    {"mpeg2",
-     "bbb_mpeg2_352x288_1mbps_60fps.m2v","bbb_mpeg2_352x288_1mbps_60fps.info", ""},
-    {"3gpp",
-     "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
-    {"mp4v-es",
-     "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
-    {"vp8",
-     "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
-    {"vp8",
-     "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
-     "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
-    {"vp9",
-     "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
-    {"vp9",
-     "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
-     "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
-    {"vp9",
-     "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
-     "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
-    {"av01",
-     "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
-    {"av01",
-     "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+        {"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
+         "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
+        {"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
+         "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
+        {"hevc", "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
+         "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
+        {"hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
+         "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
+        {"mpeg2", "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info",
+         ""},
+        {"mpeg2", "bbb_mpeg2_352x288_1mbps_60fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+        {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
+        {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
+         ""},
+        {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
+        {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+         "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
+        {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
+         "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+         "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
+        {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
+        {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
 };
 
 class LinearBuffer : public C2Buffer {
@@ -251,8 +239,7 @@
                 if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
                     if (mReorderDepth < 0) {
                         C2PortReorderBufferDepthTuning::output reorderBufferDepth;
-                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
-                                          nullptr);
+                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK, nullptr);
                         mReorderDepth = reorderBufferDepth.value;
                         if (mReorderDepth > 0) {
                             // TODO: Add validation for reordered output
@@ -333,9 +320,8 @@
     }
 };
 
-class Codec2VideoDecHidlTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoDecHidlTest : public Codec2VideoDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -541,10 +527,8 @@
     return false;
 }
 
-class Codec2VideoDecDecodeTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -556,8 +540,8 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    uint32_t streamIndex = std::get<2>(GetParam());
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
 
     char mURL[512], info[512], chksum[512];
@@ -657,8 +641,8 @@
     description("Adaptive Decode Test");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
-        strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
-        strcasestr(mMime.c_str(), "mpeg2"))) {
+          strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
+          strcasestr(mMime.c_str(), "mpeg2"))) {
         return;
     }
 
@@ -987,9 +971,8 @@
     }
 }
 
-class Codec2VideoDecCsdInputTests
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2VideoDecCsdInputTests : public Codec2VideoDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -1022,7 +1005,7 @@
     bool flushedDecoder = false;
     bool signalEOS = false;
     bool keyFrame = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
 
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
@@ -1092,16 +1075,16 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2VideoDecDecodeTest,
                          testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
                          testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
@@ -1111,22 +1094,22 @@
     kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
     for (auto params : kTestParameters) {
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
 
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index e116fe1..23ceff4 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -41,10 +41,11 @@
         : C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
 };
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeResolutionTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, bool, bool>;
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
+
+using EncodeResolutionTestParameters = std::tuple<std::string, std::string, int32_t, int32_t>;
+static std::vector<EncodeResolutionTestParameters> kEncodeResolutionTestParameters;
 
 namespace {
 
@@ -205,9 +206,8 @@
     }
 };
 
-class Codec2VideoEncHidlTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoEncHidlTest : public Codec2VideoEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -376,10 +376,8 @@
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2VideoEncEncodeTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string, std::string>> {
+class Codec2VideoEncEncodeTest : public Codec2VideoEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -393,10 +391,10 @@
     char mURL[512];
     int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
     int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<3>(GetParam());
     // Send an empty frame to receive CSD data from encoder.
-    bool sendEmptyFirstFrame = !std::get<3>(GetParam()).compare("true");
-    mConfigBPictures = !std::get<4>(GetParam()).compare("true");
+    bool sendEmptyFirstFrame = std::get<3>(GetParam());
+    mConfigBPictures = std::get<4>(GetParam());
 
     strcpy(mURL, sResourceDir.c_str());
     GetURLForComponent(mURL);
@@ -484,8 +482,7 @@
         ASSERT_TRUE(false);
     }
 
-    if ((mMime.find("vp8") != std::string::npos) ||
-        (mMime.find("3gpp") != std::string::npos)) {
+    if ((mMime.find("vp8") != std::string::npos) || (mMime.find("3gpp") != std::string::npos)) {
         ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
     } else if (mMime.find("vp9") == std::string::npos) {
         ASSERT_TRUE(mCsd) << "CSD Buffer not received";
@@ -665,8 +662,7 @@
 
 class Codec2VideoEncResolutionTest
     : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+      public ::testing::WithParamInterface<EncodeResolutionTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -678,8 +674,8 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
     std::ifstream eleStream;
-    int32_t nWidth = std::stoi(std::get<2>(GetParam()));
-    int32_t nHeight = std::stoi(std::get<3>(GetParam()));
+    int32_t nWidth = std::get<2>(GetParam());
+    int32_t nHeight = std::get<3>(GetParam());
     ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
     mEos = false;
 
@@ -711,14 +707,16 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(NonStdSizes, Codec2VideoEncResolutionTest,
-                         ::testing::ValuesIn(kEncodeResolutionTestParameters));
+                         ::testing::ValuesIn(kEncodeResolutionTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(EncodeTestwithEOS, Codec2VideoEncEncodeTest,
-                         ::testing::ValuesIn(kEncodeTestParameters));
+                         ::testing::ValuesIn(kEncodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 TEST_P(Codec2VideoEncHidlTest, AdaptiveBitrateTest) {
     description("Encodes input file for different bitrates");
@@ -812,27 +810,23 @@
     parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
     for (auto params : kTestParameters) {
-        constexpr char const* kBoolString[] = { "false", "true" };
         for (size_t i = 0; i < 1 << 3; ++i) {
             kEncodeTestParameters.push_back(std::make_tuple(
-                    std::get<0>(params), std::get<1>(params),
-                    kBoolString[i & 1],
-                    kBoolString[(i >> 1) & 1],
-                    kBoolString[(i >> 2) & 1]));
+                    std::get<0>(params), std::get<1>(params), i & 1, (i >> 1) & 1, (i >> 2) & 1));
         }
 
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "52", "18"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 52, 18));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "365", "365"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 365, 365));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "484", "362"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 484, 362));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "244", "488"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 244, 488));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "852", "608"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 852, 608));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1400", "442"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1400, 442));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 0296004..d49141c 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -1503,8 +1503,7 @@
         bqId = 0;
         mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
     } else {
-        mOutputBufferQueue->configure(surface, generation, bqId,
-                                      mBase1_2 ? &syncObj : nullptr);
+        mOutputBufferQueue->configure(surface, generation, bqId, nullptr);
     }
     ALOGD("surface generation remote change %u HAL ver: %s",
           generation, syncObj ? "1.2" : "1.0");
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 0b38bc1..bed8aeb 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -19,7 +19,6 @@
 #include <android-base/logging.h>
 
 #include <set>
-#include <sstream>
 
 #include <dlfcn.h>
 
@@ -383,6 +382,9 @@
         // Configure the next interface with the params.
         std::vector<C2Param *> configParams;
         for (size_t i = 0; i < heapParams.size(); ++i) {
+            if (!heapParams[i]) {
+                continue;
+            }
             if (heapParams[i]->forStream()) {
                 heapParams[i] = C2Param::CopyAsStream(
                         *heapParams[i], false /* output */, heapParams[i]->stream());
@@ -782,10 +784,7 @@
         if (C2_OK != mStore->createComponent(filter.traits.name, &comp)) {
             return {};
         }
-        if (C2_OK != mStore->createInterface(filter.traits.name, &intf)) {
-            return {};
-        }
-        filters.push_back({comp, intf, filter.traits, filter.desc});
+        filters.push_back({comp, comp->intf(), filter.traits, filter.desc});
     }
     return filters;
 }
@@ -869,7 +868,7 @@
     }
     std::vector<Component> filters = createFilters();
     std::shared_ptr wrapped = std::make_shared<WrappedDecoder>(
-            comp, std::move(filters), weak_from_this());
+            comp, std::vector(filters), weak_from_this());
     {
         std::unique_lock lock(mWrappedComponentsMutex);
         std::vector<std::weak_ptr<const C2Component>> &components =
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 63ae5cd..bae82f6 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1798,17 +1798,19 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
-    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-    const std::unique_ptr<Config> &config = *configLocked;
-    if (config->mTunneled && config->mSidebandHandle != nullptr) {
-        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
-        status_t err = native_window_set_sideband_stream(
-                nativeWindow.get(),
-                const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
-        if (err != OK) {
-            ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
-                    nativeWindow.get(), config->mSidebandHandle->handle(), err);
-            return err;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mTunneled && config->mSidebandHandle != nullptr) {
+            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+            status_t err = native_window_set_sideband_stream(
+                    nativeWindow.get(),
+                    const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+            if (err != OK) {
+                ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                        nativeWindow.get(), config->mSidebandHandle->handle(), err);
+                return err;
+            }
         }
     }
     return mChannel->setSurface(surface);
@@ -2149,80 +2151,88 @@
             }
 
             // handle configuration changes in work done
-            Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-            const std::unique_ptr<Config> &config = *configLocked;
-            Config::Watcher<C2StreamInitDataInfo::output> initData =
-                config->watch<C2StreamInitDataInfo::output>();
-            if (!work->worklets.empty()
-                    && (work->worklets.front()->output.flags
-                            & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
+            const C2StreamInitDataInfo::output *initData = nullptr;
+            sp<AMessage> outputFormat = nullptr;
+            {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                Config::Watcher<C2StreamInitDataInfo::output> initDataWatcher =
+                    config->watch<C2StreamInitDataInfo::output>();
+                if (!work->worklets.empty()
+                        && (work->worklets.front()->output.flags
+                                & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
 
-                // copy buffer info to config
-                std::vector<std::unique_ptr<C2Param>> updates;
-                for (const std::unique_ptr<C2Param> &param
-                        : work->worklets.front()->output.configUpdate) {
-                    updates.push_back(C2Param::Copy(*param));
-                }
-                unsigned stream = 0;
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    for (const std::shared_ptr<const C2Info> &info : buf->info()) {
-                        // move all info into output-stream #0 domain
-                        updates.emplace_back(C2Param::CopyAsStream(*info, true /* output */, stream));
+                    // copy buffer info to config
+                    std::vector<std::unique_ptr<C2Param>> updates;
+                    for (const std::unique_ptr<C2Param> &param
+                            : work->worklets.front()->output.configUpdate) {
+                        updates.push_back(C2Param::Copy(*param));
+                    }
+                    unsigned stream = 0;
+                    std::vector<std::shared_ptr<C2Buffer>> &outputBuffers =
+                        work->worklets.front()->output.buffers;
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        for (const std::shared_ptr<const C2Info> &info : buf->info()) {
+                            // move all info into output-stream #0 domain
+                            updates.emplace_back(
+                                    C2Param::CopyAsStream(*info, true /* output */, stream));
+                        }
+
+                        const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
+                        // for now only do the first block
+                        if (!blocks.empty()) {
+                            // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
+                            //      block.crop().left, block.crop().top,
+                            //      block.crop().width, block.crop().height,
+                            //      block.width(), block.height());
+                            const C2ConstGraphicBlock &block = blocks[0];
+                            updates.emplace_back(new C2StreamCropRectInfo::output(
+                                    stream, block.crop()));
+                            updates.emplace_back(new C2StreamPictureSizeInfo::output(
+                                    stream, block.crop().width, block.crop().height));
+                        }
+                        ++stream;
                     }
 
-                    const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
-                    // for now only do the first block
-                    if (!blocks.empty()) {
-                        // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
-                        //      block.crop().left, block.crop().top,
-                        //      block.crop().width, block.crop().height,
-                        //      block.width(), block.height());
-                        const C2ConstGraphicBlock &block = blocks[0];
-                        updates.emplace_back(new C2StreamCropRectInfo::output(stream, block.crop()));
-                        updates.emplace_back(new C2StreamPictureSizeInfo::output(
-                                stream, block.crop().width, block.crop().height));
-                    }
-                    ++stream;
-                }
+                    sp<AMessage> oldFormat = config->mOutputFormat;
+                    config->updateConfiguration(updates, config->mOutputDomain);
+                    RevertOutputFormatIfNeeded(oldFormat, config->mOutputFormat);
 
-                sp<AMessage> outputFormat = config->mOutputFormat;
-                config->updateConfiguration(updates, config->mOutputDomain);
-                RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
-
-                // copy standard infos to graphic buffers if not already present (otherwise, we
-                // may overwrite the actual intermediate value with a final value)
-                stream = 0;
-                const static C2Param::Index stdGfxInfos[] = {
-                    C2StreamRotationInfo::output::PARAM_TYPE,
-                    C2StreamColorAspectsInfo::output::PARAM_TYPE,
-                    C2StreamDataSpaceInfo::output::PARAM_TYPE,
-                    C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                    C2StreamHdr10PlusInfo::output::PARAM_TYPE,
-                    C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
-                    C2StreamSurfaceScalingInfo::output::PARAM_TYPE
-                };
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    if (buf->data().graphicBlocks().size()) {
-                        for (C2Param::Index ix : stdGfxInfos) {
-                            if (!buf->hasInfo(ix)) {
-                                const C2Param *param =
-                                    config->getConfigParameterValue(ix.withStream(stream));
-                                if (param) {
-                                    std::shared_ptr<C2Param> info(C2Param::Copy(*param));
-                                    buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                    // copy standard infos to graphic buffers if not already present (otherwise, we
+                    // may overwrite the actual intermediate value with a final value)
+                    stream = 0;
+                    const static C2Param::Index stdGfxInfos[] = {
+                        C2StreamRotationInfo::output::PARAM_TYPE,
+                        C2StreamColorAspectsInfo::output::PARAM_TYPE,
+                        C2StreamDataSpaceInfo::output::PARAM_TYPE,
+                        C2StreamHdrStaticInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
+                        C2StreamSurfaceScalingInfo::output::PARAM_TYPE
+                    };
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        if (buf->data().graphicBlocks().size()) {
+                            for (C2Param::Index ix : stdGfxInfos) {
+                                if (!buf->hasInfo(ix)) {
+                                    const C2Param *param =
+                                        config->getConfigParameterValue(ix.withStream(stream));
+                                    if (param) {
+                                        std::shared_ptr<C2Param> info(C2Param::Copy(*param));
+                                        buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                                    }
                                 }
                             }
                         }
+                        ++stream;
                     }
-                    ++stream;
                 }
+                if (config->mInputSurface) {
+                    config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                }
+                initData = initDataWatcher.hasChanged() ? initDataWatcher.update().get() : nullptr;
+                outputFormat = config->mOutputFormat;
             }
-            if (config->mInputSurface) {
-                config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
-            }
-            mChannel->onWorkDone(
-                    std::move(work), config->mOutputFormat,
-                    initData.hasChanged() ? initData.update().get() : nullptr);
+            mChannel->onWorkDone(std::move(work), outputFormat, initData);
             break;
         }
         case kWhatWatch: {
@@ -2307,9 +2317,13 @@
             pendingDeadline = true;
         }
     }
-    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-    const std::unique_ptr<Config> &config = *configLocked;
-    if (config->mTunneled == false && name.empty()) {
+    bool tunneled = false;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        tunneled = config->mTunneled;
+    }
+    if (!tunneled && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index c4f9d84..d0c1357 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1368,7 +1368,7 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    {
+    if (inputFormat || outputFormat) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
@@ -1468,14 +1468,14 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
-    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::reset() {
     stop();
+    if (mInputSurface != nullptr) {
+        mInputSurface.reset();
+    }
+    mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -1503,8 +1503,10 @@
 
 void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     ALOGV("[%s] flush", mName);
+    std::vector<uint64_t> indices;
     std::list<std::unique_ptr<C2Work>> configs;
     for (const std::unique_ptr<C2Work> &work : flushedWork) {
+        indices.push_back(work->input.ordinal.frameIndex.peeku());
         if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
             continue;
         }
@@ -1517,6 +1519,7 @@
         std::unique_ptr<C2Work> copy(new C2Work);
         copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
         copy->input.ordinal = work->input.ordinal;
+        copy->input.ordinal.frameIndex = mFrameIndex++;
         copy->input.buffers.insert(
                 copy->input.buffers.begin(),
                 work->input.buffers.begin(),
@@ -1545,7 +1548,12 @@
             output->buffers->flushStash();
         }
     }
-    mPipelineWatcher.lock()->flush();
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (uint64_t index : indices) {
+            watcher->onWorkDone(index);
+        }
+    }
 }
 
 void CCodecBufferChannel::onWorkDone(
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 7969a6f..27e87e6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -362,7 +362,10 @@
         .limitTo(D::OUTPUT & D::READ));
 
     add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
-        .limitTo(D::ENCODER & D::OUTPUT));
+        .limitTo(D::ENCODER & D::CODED));
+    // Some audio decoders require bitrate information to be set
+    add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+        .limitTo(D::AUDIO & D::DECODER & D::CODED));
     // we also need to put the bitrate in the max bitrate field
     add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
         .limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -730,6 +733,17 @@
             return C2Value();
         }));
 
+    add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+        .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+        .withMapper([mapper](C2Value v) -> C2Value {
+            C2Config::profile_t c2 = PROFILE_UNUSED;
+            int32_t sdk;
+            if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                return c2;
+            }
+            return PROFILE_UNUSED;
+        }));
+
     // convert to dBFS and add default
     add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
         .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1322,6 +1336,14 @@
         }
     }
 
+    // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+    // as SDK doesn't have a way to signal default sbr mode based on profile and
+    // requires that the key isn't present in format to signal that
+    int sbrMode;
+    if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+        msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+    }
+
     { // convert color info
         // move default color to color aspect if not read from the component
         int32_t tmp;
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index 9cec23f..af054c7 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -143,6 +143,7 @@
 
     if (buffer->size() > 0) {
         mCurrentOrdinal.timestamp = timeUs;
+        mCurrentOrdinal.customOrdinal = timeUs;
     }
 
     size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
@@ -219,6 +220,7 @@
 
     ++mCurrentOrdinal.frameIndex;
     mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+    mCurrentOrdinal.customOrdinal = mCurrentOrdinal.timestamp;
     mCurrentBlock.reset();
     mWriteView.reset();
 }
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
 }
 
 void PipelineWatcher::flush() {
+    ALOGV("flush");
     mFramesInPipeline.clear();
 }
 
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 74e7ef1..2f4d6b1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -33,11 +33,13 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libnativewindow",
         "libstagefright_foundation",
         "libutils",
     ],
 
     static_libs: [
+        "libarect",
         "libyuv_static",
     ],
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index a54af83..a78d811 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -23,6 +23,7 @@
 #include <list>
 #include <mutex>
 
+#include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/AUtils.h>
 
@@ -136,31 +137,56 @@
     int width = view.crop().width;
     int height = view.crop().height;
 
-    if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        if (IsNV12(view) && IsI420(img)) {
+    if (IsNV12(view)) {
+        if (IsNV12(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(view)) {
+        if (IsNV12(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(img)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(view)) {
+        if (IsNV12(img)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(img)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
-    if (IsNV12(view) && IsNV12(img)) {
-        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
-        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
-        return OK;
-    }
-    if (IsI420(view) && IsI420(img)) {
-        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
-        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
-        libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
-        return OK;
-    }
     return _ImageCopy<true>(view, img, imgBase);
 }
 
@@ -182,33 +208,56 @@
     int32_t dst_stride_v = view.layout().planes[2].rowInc;
     int width = view.crop().width;
     int height = view.crop().height;
-    if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        if (IsNV12(img) && IsI420(view)) {
+    if (IsNV12(img)) {
+        if (IsNV12(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(img)) {
+        if (IsNV12(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(view)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(img)) {
+        if (IsNV12(view)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(view)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
-    if (IsNV12(img) && IsNV12(view)) {
-        // For NV12, copy Y and UV plane
-        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
-        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
-        return OK;
-    }
-    if (IsI420(img) && IsI420(view)) {
-        // For I420, copy Y, U and V plane.
-        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
-        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
-        libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
-        return OK;
-    }
     return _ImageCopy<false>(view, img, imgBase);
 }
 
@@ -250,6 +299,20 @@
             && layout.planes[layout.PLANE_V].offset == 1);
 }
 
+bool IsNV21(const C2GraphicView &view) {
+    if (!IsYUV420(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 2
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_U].offset == 1
+            && layout.planes[layout.PLANE_V].colInc == 2
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_V].offset == 0);
+}
+
 bool IsI420(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
@@ -286,6 +349,15 @@
             && (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
 }
 
+bool IsNV21(const MediaImage2 *img) {
+    if (!IsYUV420(img)) {
+        return false;
+    }
+    return (img->mPlane[1].mColInc == 2
+            && img->mPlane[2].mColInc == 2
+            && (img->mPlane[1].mOffset - img->mPlane[2].mOffset == 1));
+}
+
 bool IsI420(const MediaImage2 *img) {
     if (!IsYUV420(img)) {
         return false;
@@ -295,6 +367,76 @@
             && img->mPlane[2].mOffset > img->mPlane[1].mOffset);
 }
 
+FlexLayout GetYuv420FlexibleLayout() {
+    static FlexLayout sLayout = []{
+        AHardwareBuffer_Desc desc = {
+            16,  // width
+            16,  // height
+            1,   // layers
+            AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+            AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+            0,   // stride
+            0,   // rfu0
+            0,   // rfu1
+        };
+        AHardwareBuffer *buffer = nullptr;
+        int ret = AHardwareBuffer_allocate(&desc, &buffer);
+        if (ret != 0) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        class AutoCloser {
+        public:
+            AutoCloser(AHardwareBuffer *buffer) : mBuffer(buffer), mLocked(false) {}
+            ~AutoCloser() {
+                if (mLocked) {
+                    AHardwareBuffer_unlock(mBuffer, nullptr);
+                }
+                AHardwareBuffer_release(mBuffer);
+            }
+
+            void setLocked() { mLocked = true; }
+
+        private:
+            AHardwareBuffer *mBuffer;
+            bool mLocked;
+        } autoCloser(buffer);
+        AHardwareBuffer_Planes planes;
+        ret = AHardwareBuffer_lockPlanes(
+                buffer,
+                AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                -1,       // fence
+                nullptr,  // rect
+                &planes);
+        if (ret != 0) {
+            AHardwareBuffer_release(buffer);
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        autoCloser.setLocked();
+        if (planes.planeCount != 3) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[0].pixelStride != 1) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[1].pixelStride == 1 && planes.planes[2].pixelStride == 1) {
+            return FLEX_LAYOUT_PLANAR;
+        }
+        if (planes.planes[1].pixelStride == 2 && planes.planes[2].pixelStride == 2) {
+            ssize_t uvDist =
+                static_cast<uint8_t *>(planes.planes[2].data) -
+                static_cast<uint8_t *>(planes.planes[1].data);
+            if (uvDist == 1) {
+                return FLEX_LAYOUT_SEMIPLANAR_UV;
+            } else if (uvDist == -1) {
+                return FLEX_LAYOUT_SEMIPLANAR_VU;
+            }
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        return FLEX_LAYOUT_UNKNOWN;
+    }();
+    return sLayout;
+}
+
 MediaImage2 CreateYUV420PlanarMediaImage2(
         uint32_t width, uint32_t height, uint32_t stride, uint32_t vstride) {
     return MediaImage2 {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index afadf00..af29e81 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -96,6 +96,11 @@
 bool IsNV12(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a NV21 layout.
+ */
+bool IsNV21(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a I420 layout.
  */
 bool IsI420(const C2GraphicView &view);
@@ -111,10 +116,26 @@
 bool IsNV12(const MediaImage2 *img);
 
 /**
+ * Returns true iff a MediaImage2 has a NV21 layout.
+ */
+bool IsNV21(const MediaImage2 *img);
+
+/**
  * Returns true iff a MediaImage2 has a I420 layout.
  */
 bool IsI420(const MediaImage2 *img);
 
+enum FlexLayout {
+    FLEX_LAYOUT_UNKNOWN,
+    FLEX_LAYOUT_PLANAR,
+    FLEX_LAYOUT_SEMIPLANAR_UV,
+    FLEX_LAYOUT_SEMIPLANAR_VU,
+};
+/**
+ * Returns layout of YCBCR_420_888 pixel format.
+ */
+FlexLayout GetYuv420FlexibleLayout();
+
 /**
  * A raw memory block to use for internal buffers.
  *
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 6385bac..b1d72e8 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -2345,7 +2345,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_ESDS, &buffer[4], chunk_data_size - 4);
 
             if (mPath.size() >= 2
@@ -2427,7 +2427,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_CSD_AVC, buffer.get(), chunk_data_size);
 
             break;
@@ -2449,7 +2449,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_CSD_HEVC, buffer.get(), chunk_data_size);
 
             *offset += chunk_size;
@@ -4021,13 +4021,13 @@
                 // custom genre string
                 buffer[size] = '\0';
 
-                AMediaFormat_setString(mFileMetaData, 
+                AMediaFormat_setString(mFileMetaData,
                         metadataKey, (const char *)buffer + 8);
             }
         } else {
             buffer[size] = '\0';
 
-            AMediaFormat_setString(mFileMetaData, 
+            AMediaFormat_setString(mFileMetaData,
                     metadataKey, (const char *)buffer + 8);
         }
     }
@@ -6194,9 +6194,13 @@
         if (newBuffer) {
             if (mIsPcm) {
                 // The twos' PCM block reader assumes that all samples has the same size.
-
-                uint32_t samplesToRead = mSampleTable->getLastSampleIndexInChunk()
-                                                      - mCurrentSampleIndex + 1;
+                uint32_t lastSampleIndexInChunk = mSampleTable->getLastSampleIndexInChunk();
+                if (lastSampleIndexInChunk < mCurrentSampleIndex) {
+                    mBuffer->release();
+                    mBuffer = nullptr;
+                    return AMEDIA_ERROR_UNKNOWN;
+                }
+                uint32_t samplesToRead = lastSampleIndexInChunk - mCurrentSampleIndex + 1;
                 if (samplesToRead > kMaxPcmFrameSize) {
                     samplesToRead = kMaxPcmFrameSize;
                 }
@@ -6205,13 +6209,17 @@
                       samplesToRead, size, mCurrentSampleIndex,
                       mSampleTable->getLastSampleIndexInChunk());
 
-               size_t totalSize = samplesToRead * size;
+                size_t totalSize = samplesToRead * size;
+                if (mBuffer->size() < totalSize) {
+                    mBuffer->release();
+                    mBuffer = nullptr;
+                    return AMEDIA_ERROR_UNKNOWN;
+                }
                 uint8_t* buf = (uint8_t *)mBuffer->data();
                 ssize_t bytesRead = mDataSource->readAt(offset, buf, totalSize);
                 if (bytesRead < (ssize_t)totalSize) {
                     mBuffer->release();
                     mBuffer = NULL;
-
                     return AMEDIA_ERROR_IO;
                 }
 
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 22cf254..3333925 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -74,8 +74,9 @@
      * The nominal range of the data is [-1.0f, 1.0f).
      * Values outside that range may be clipped.
      *
-     * See also 'floatData' at
-     * https://developer.android.com/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)
+     * See also the float Data in
+     * <a href="/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)">
+     *   write(float[], int, int, int)</a>.
      */
     AAUDIO_FORMAT_PCM_FLOAT,
 
@@ -196,21 +197,69 @@
 };
 typedef int32_t  aaudio_result_t;
 
+/**
+ * AAudio Stream states, for details, refer to
+ * <a href="/ndk/guides/audio/aaudio/aaudio#using-streams">Using an Audio Stream</a>
+ */
 enum
 {
+
+    /**
+     * The stream is created but not initialized yet.
+     */
     AAUDIO_STREAM_STATE_UNINITIALIZED = 0,
+    /**
+     * The stream is in an unrecognized state.
+     */
     AAUDIO_STREAM_STATE_UNKNOWN,
+
+    /**
+     * The stream is open and ready to use.
+     */
     AAUDIO_STREAM_STATE_OPEN,
+    /**
+     * The stream is just starting up.
+     */
     AAUDIO_STREAM_STATE_STARTING,
+    /**
+     * The stream has started.
+     */
     AAUDIO_STREAM_STATE_STARTED,
+    /**
+     * The stream is pausing.
+     */
     AAUDIO_STREAM_STATE_PAUSING,
+    /**
+     * The stream has paused, could be restarted or flushed.
+     */
     AAUDIO_STREAM_STATE_PAUSED,
+    /**
+     * The stream is being flushed.
+     */
     AAUDIO_STREAM_STATE_FLUSHING,
+    /**
+     * The stream is flushed, ready to be restarted.
+     */
     AAUDIO_STREAM_STATE_FLUSHED,
+    /**
+     * The stream is stopping.
+     */
     AAUDIO_STREAM_STATE_STOPPING,
+    /**
+     * The stream has been stopped.
+     */
     AAUDIO_STREAM_STATE_STOPPED,
+    /**
+     * The stream is closing.
+     */
     AAUDIO_STREAM_STATE_CLOSING,
+    /**
+     * The stream has been closed.
+     */
     AAUDIO_STREAM_STATE_CLOSED,
+    /**
+     * The stream is disconnected from audio device.
+     */
     AAUDIO_STREAM_STATE_DISCONNECTED
 };
 typedef int32_t aaudio_stream_state_t;
@@ -260,7 +309,8 @@
  * This information is used by certain platforms or routing policies
  * to make more refined volume or routing decisions.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 28.
@@ -361,7 +411,8 @@
  * an audio book application) this information might be used by the audio framework to
  * enforce audio focus.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 28.
@@ -441,7 +492,8 @@
 /**
  * Specifying if audio may or may not be captured by other apps or the system.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 29.
@@ -453,10 +505,11 @@
      * For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
      * AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
      *
-     * On {@link android.os.Build.VERSION_CODES#Q}, this means only {@link #AAUDIO_USAGE_MEDIA}
-     * and {@link #AAUDIO_USAGE_GAME} may be captured.
+     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+     * this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
      *
-     * See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_ALL}.
+     * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
+     * ALLOW_CAPTURE_BY_ALL</a>.
      */
     AAUDIO_ALLOW_CAPTURE_BY_ALL = 1,
     /**
@@ -464,8 +517,9 @@
      *
      * System apps can capture for many purposes like accessibility, user guidance...
      * but have strong restriction. See
-     * {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_SYSTEM} for what the system apps
-     * can do with the capture audio.
+     * <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_SYSTEM">
+     * ALLOW_CAPTURE_BY_SYSTEM</a>
+     * for what the system apps can do with the capture audio.
      */
     AAUDIO_ALLOW_CAPTURE_BY_SYSTEM = 2,
     /**
@@ -473,7 +527,8 @@
      *
      * It is encouraged to use {@link #AAUDIO_ALLOW_CAPTURE_BY_SYSTEM} instead of this value as system apps
      * provide significant and useful features for the user (eg. accessibility).
-     * See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_NONE}.
+     * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_NONE">
+     * ALLOW_CAPTURE_BY_NONE</a>.
      */
     AAUDIO_ALLOW_CAPTURE_BY_NONE = 3,
 };
@@ -803,7 +858,9 @@
  * The default is {@link #AAUDIO_ALLOW_CAPTURE_BY_ALL}.
  *
  * Note that an application can also set its global policy, in which case the most restrictive
- * policy is always applied. See {@link android.media.AudioAttributes#setAllowedCapturePolicy(int)}
+ * policy is always applied. See
+ * <a href="/reference/android/media/AudioManager#setAllowedCapturePolicy(int)">
+ * setAllowedCapturePolicy(int)</a>
  *
  * Available since API level 29.
  *
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 5d75055..7998879 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -63,7 +63,6 @@
         "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
         "Common/src/Copy_16.cpp",
         "Common/src/MonoTo2I_32.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/dB_to_Lin32.cpp",
         "Common/src/Shift_Sat_v16xv16.cpp",
         "Common/src/Shift_Sat_v32xv32.cpp",
@@ -148,7 +147,6 @@
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/From2iToMono_32.cpp",
         "Common/src/Mult3s_32x16.cpp",
         "Common/src/Copy_16.cpp",
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 9f5f448..12b86f3 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -137,9 +137,9 @@
 
         pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
-        LoadConst_Float(0, /* Clear the input delay buffer */
-                        (LVM_FLOAT*)&pInstance->pBufferManagement->InDelayBuffer,
-                        (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
+        memset(pInstance->pBufferManagement->InDelayBuffer, 0,
+                LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE *
+                sizeof(pInstance->pBufferManagement->InDelayBuffer[0]));
         pInstance->pBufferManagement->InDelaySamples =
                 MIN_INTERNAL_BLOCKSIZE;                    /* Set the number of delay samples */
         pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index 18de85b..10f351e 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,8 +24,6 @@
     VARIOUS FUNCTIONS
 ***********************************************************************************/
 
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
-
 void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                           LVM_INT32 NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index be19fa0..5a67bda 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "LVC_Mixer_Private.h"
 #include "VectorArithmetic.h"
 #include "ScalarArithmetic.h"
@@ -68,7 +69,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, n);
@@ -150,7 +151,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, NrFrames * NrChannels);
+            memset(dst, 0, NrFrames * NrChannels * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
deleted file mode 100644
index df7a558..0000000
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION LoadConst_32
-***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--) {
-        *dst = val;
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index 8408962..58a9102 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "Mixer_private.h"
 #include "VectorArithmetic.h"
 
@@ -61,7 +62,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else if ((pInstance->Target) == 1.0f) {
             if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
         } else
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index d4b321f..be3505f 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -60,7 +60,8 @@
     pLVREV_Private->pRevLPFBiquad->clear();
     for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
         pLVREV_Private->revLPFBiquad[i]->clear();
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[i], LVREV_MAX_T_DELAY[i]);
+        memset(pLVREV_Private->pDelay_T[i], 0, LVREV_MAX_T_DELAY[i] *
+                sizeof(pLVREV_Private->pDelay_T[i][0]));
     }
     return LVREV_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index c5b6598..de23d07 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -81,10 +81,7 @@
         pConfig->DelaySize =
                 (pParams->NrChannels == FCC_1) ? (LVM_INT16)Delay : (LVM_INT16)(FCC_2 * Delay);
         pConfig->DelayOffset = 0;
-        LoadConst_Float(0,                                      /* Value */
-                        (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
+        memset(pConfig->StereoSamples, 0, sizeof(pConfig->StereoSamples));
         /*
          * Setup the filters
          */
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/libmediaformatshaper/CodecProperties.cpp
index 961f676..e6b3c46 100644
--- a/media/libmediaformatshaper/CodecProperties.cpp
+++ b/media/libmediaformatshaper/CodecProperties.cpp
@@ -23,6 +23,10 @@
 
 #include <media/formatshaper/CodecProperties.h>
 
+
+// we aren't going to mess with shaping points dimensions beyond this
+static const int32_t DIMENSION_LIMIT = 16384;
+
 namespace android {
 namespace mediaformatshaper {
 
@@ -113,7 +117,13 @@
             setBpp(bpp);
             legal = true;
         }
+    } else if (!strncmp(key.c_str(), "vq-target-bpp-", strlen("vq-target-bpp-"))) {
+            std::string resolution = key.substr(strlen("vq-target-bpp-"));
+            if (bppPoint(resolution, value)) {
+                legal = true;
+            }
     } else if (!strcmp(key.c_str(), "vq-target-bppx100")) {
+        // legacy, prototyping
         const char *p = value.c_str();
         char *q;
         int32_t iValue =  strtol(p, &q, 0);
@@ -143,6 +153,119 @@
     return false;
 }
 
+bool CodecProperties::bppPoint(std::string resolution, std::string value) {
+
+    int32_t width = 0;
+    int32_t height = 0;
+    double bpp = -1;
+
+    // resolution is "WxH", "W*H" or a standard name like "720p"
+    if (resolution == "1080p") {
+        width = 1080; height = 1920;
+    } else if (resolution == "720p") {
+        width = 720; height = 1280;
+    } else if (resolution == "540p") {
+        width = 540; height = 960;
+    } else if (resolution == "480p") {
+        width = 480; height = 854;
+    } else {
+        size_t sep = resolution.find('x');
+        if (sep == std::string::npos) {
+            sep = resolution.find('*');
+        }
+        if (sep == std::string::npos) {
+            ALOGW("unable to parse resolution: '%s'", resolution.c_str());
+            return false;
+        }
+        std::string w = resolution.substr(0, sep);
+        std::string h = resolution.substr(sep+1);
+
+        char *q;
+        const char *p = w.c_str();
+        width = strtol(p, &q, 0);
+        if (q == p) {
+                width = -1;
+        }
+        p = h.c_str();
+        height = strtol(p, &q, 0);
+        if (q == p) {
+                height = -1;
+        }
+        if (width <= 0 || height <= 0 || width > DIMENSION_LIMIT || height > DIMENSION_LIMIT) {
+            ALOGW("unparseable: width, height '%s'", resolution.c_str());
+            return false;
+        }
+    }
+
+    const char *p = value.c_str();
+    char *q;
+    bpp = strtod(p, &q);
+    if (q == p) {
+        ALOGW("unparseable bpp '%s'", value.c_str());
+        return false;
+    }
+
+    struct bpp_point *point = (struct bpp_point*) malloc(sizeof(*point));
+    if (point == nullptr) {
+        ALOGW("unable to allocate memory for bpp point");
+        return false;
+    }
+
+    point->pixels = width * height;
+    point->width = width;
+    point->height = height;
+    point->bpp = bpp;
+
+    if (mBppPoints == nullptr) {
+        point->next = nullptr;
+        mBppPoints = point;
+    } else if (point->pixels < mBppPoints->pixels) {
+        // at the front
+        point->next = mBppPoints;
+        mBppPoints = point;
+    } else {
+        struct bpp_point *after = mBppPoints;
+        while (after->next) {
+            if (point->pixels > after->next->pixels) {
+                after = after->next;
+                continue;
+            }
+
+            // insert before after->next
+            point->next = after->next;
+            after->next = point;
+            break;
+        }
+        if (after->next == nullptr) {
+            // hasn't gone in yet
+            point->next = nullptr;
+            after->next = point;
+        }
+    }
+
+    return true;
+}
+
+double CodecProperties::getBpp(int32_t width, int32_t height) {
+    // look in the per-resolution list
+
+    int32_t pixels = width * height;
+
+    if (mBppPoints) {
+        struct bpp_point *point = mBppPoints;
+        while (point && point->pixels < pixels) {
+            point = point->next;
+        }
+        if (point) {
+            ALOGV("getBpp(w=%d,h=%d) returns %f from bpppoint w=%d h=%d",
+                width, height, point->bpp, point->width, point->height);
+            return point->bpp;
+        }
+    }
+
+    ALOGV("defaulting to %f bpp", mBpp);
+    return mBpp;
+}
 
 std::string CodecProperties::getMapping(std::string key, std::string kind) {
     ALOGV("getMapping(key %s, kind %s )", key.c_str(), kind.c_str());
diff --git a/media/libmediaformatshaper/CodecSeeding.cpp b/media/libmediaformatshaper/CodecSeeding.cpp
index fde7833..a7fcc66 100644
--- a/media/libmediaformatshaper/CodecSeeding.cpp
+++ b/media/libmediaformatshaper/CodecSeeding.cpp
@@ -50,13 +50,16 @@
 
 static preloadTuning_t featuresAvc[] = {
       {true, "vq-target-bpp", "2.45"},
-      {true, "vq-target-qpmax", "41"},
+      {true, "vq-target-bpp-1080p", "2.40"},
+      {true, "vq-target-bpp-540p", "2.60"},
+      {true, "vq-target-bpp-480p", "3.00"},
+      {true, "vq-target-qpmax", "40"},
       {true, nullptr, 0}
 };
 
 static preloadTuning_t featuresHevc[] = {
       {true, "vq-target-bpp", "2.30"},
-      {true, "vq-target-qpmax", "42"}, // nop, since hevc codecs don't declare qp support
+      {true, "vq-target-qpmax", "40"}, // nop, since hevc codecs don't declare qp support
       {true, nullptr, 0}
 };
 
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
index 39a5e19..08e23cc 100644
--- a/media/libmediaformatshaper/VQApply.cpp
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -48,6 +48,15 @@
 //
 static const int BITRATE_MODE_VBR = 1;
 
+
+// constants we use within the calculations
+//
+constexpr double BITRATE_LEAVE_UNTOUCHED = 2.0;
+constexpr double BITRATE_QP_UNAVAILABLE = 1.20;
+// 10% didn't work so hot on bonito (with no QP support)
+// 15% is next.. still leaves a few short
+// 20% ? this is on the edge of what I want do do
+
 //
 // Caller retains ownership of and responsibility for inFormat
 //
@@ -69,69 +78,82 @@
     }
 
     //
-    // apply any and all tools that we have.
+    // consider any and all tools available
     // -- qp
     // -- minimum bits-per-pixel
     //
-    if (!codec->supportsQp()) {
-        ALOGD("minquality: no qp bounding in codec %s", codec->getName().c_str());
-    } else {
-        // use a (configurable) QP value to force better quality
-        //
+    int64_t bitrateChosen = 0;
+    int32_t qpChosen = INT32_MAX;
+
+    int64_t bitrateConfigured = 0;
+    int32_t bitrateConfiguredTmp = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrateConfiguredTmp);
+    bitrateConfigured = bitrateConfiguredTmp;
+    bitrateChosen = bitrateConfigured;
+
+    int32_t width = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+    int32_t height = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+    int64_t pixels = ((int64_t)width) * height;
+    double minimumBpp = codec->getBpp(width, height);
+
+    int64_t bitrateFloor = pixels * minimumBpp;
+    if (bitrateFloor > INT32_MAX) bitrateFloor = INT32_MAX;
+
+    // if we are far enough above the target bpp, leave it alone
+    //
+    ALOGV("bitrate: configured %" PRId64 " floor %" PRId64, bitrateConfigured, bitrateFloor);
+    if (bitrateConfigured >= BITRATE_LEAVE_UNTOUCHED * bitrateFloor) {
+        ALOGV("high enough bitrate: configured %" PRId64 " >= %f * floor %" PRId64,
+                bitrateConfigured, BITRATE_LEAVE_UNTOUCHED, bitrateFloor);
+        return 0;
+    }
+
+    // raise anything below the bitrate floor
+    if (bitrateConfigured < bitrateFloor) {
+        ALOGD("raise bitrate: configured %" PRId64 " to floor %" PRId64,
+                bitrateConfigured, bitrateFloor);
+        bitrateChosen = bitrateFloor;
+    }
+
+    bool qpPresent = hasQp(inFormat);
+
+    // add QP, if not already present
+    if (!qpPresent) {
         int32_t qpmax = codec->targetQpMax();
-        int32_t qpmaxUser = INT32_MAX;
-        if (hasQp(inFormat)) {
-            (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, &qpmaxUser);
-            ALOGD("minquality by QP: format already sets QP");
-        }
-
-        // if the system didn't do one, use what the user provided
-        if (qpmax == 0 && qpmaxUser != INT32_MAX) {
-                qpmax = qpmaxUser;
-        }
-        // XXX: if both said something, how do we want to reconcile that
-
-        if (qpmax > 0) {
-            ALOGD("minquality by QP: inject %s=%d", AMEDIAFORMAT_VIDEO_QP_MAX, qpmax);
-            AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, qpmax);
-
-            // force spreading the QP across frame types, since we imposing a value
-            qpSpreadMaxPerFrameType(inFormat, info->qpDelta, info->qpMax, /* override */ true);
+        if (qpmax != INT32_MAX) {
+            ALOGV("choosing qp=%d", qpmax);
+            qpChosen = qpmax;
         }
     }
 
-    double bpp = codec->getBpp();
-    if (bpp > 0.0) {
-        // if we've decided to use bits-per-pixel (per second) to drive the quality
-        //
-        // (properly phrased as 'bits per second per pixel' so that it's resolution
-        // and framerate agnostic
-        //
-        // all of these is structured so that a missing value cleanly gets us to a
-        // non-faulting value of '0' for the minimum bits-per-pixel.
-        //
-        int32_t width = 0;
-        (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
-        int32_t height = 0;
-        (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
-        int32_t bitrateConfigured = 0;
-        (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrateConfigured);
-
-        int64_t pixels = ((int64_t)width) * height;
-        int64_t bitrateFloor = pixels * bpp;
-
-        if (bitrateFloor > INT32_MAX) bitrateFloor = INT32_MAX;
-
-        ALOGD("minquality/bitrate: target %d floor %" PRId64 "(%.3f bpp * (%d w * %d h)",
-              bitrateConfigured, bitrateFloor, codec->getBpp(), height, width);
-
-        if (bitrateConfigured < bitrateFloor) {
-            ALOGD("minquality/target bitrate raised from %d to %" PRId64 " bps",
-                  bitrateConfigured, bitrateFloor);
-            AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, (int32_t)bitrateFloor);
+    // if QP is desired but not supported, compensate with additional bits
+    if (!codec->supportsQp()) {
+        if (qpPresent || qpChosen != INT32_MAX) {
+            ALOGD("minquality: desired QP, but unsupported, boost bitrate %" PRId64 " to %" PRId64,
+                bitrateChosen, (int64_t)(bitrateChosen * BITRATE_QP_UNAVAILABLE));
+            bitrateChosen =  bitrateChosen * BITRATE_QP_UNAVAILABLE;
+            qpChosen = INT32_MAX;
         }
     }
 
+    // apply our chosen values
+    //
+    if (qpChosen != INT32_MAX) {
+        ALOGD("minquality by QP: inject %s=%d", AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
+
+        // force spreading the QP across frame types, since we are imposing a value
+        qpSpreadMaxPerFrameType(inFormat, info->qpDelta, info->qpMax, /* override */ true);
+    }
+
+    if (bitrateChosen != bitrateConfigured) {
+        ALOGD("minquality/target bitrate raised from %" PRId64 " to %" PRId64 " bps",
+              bitrateConfigured, bitrateChosen);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, (int32_t)bitrateChosen);
+    }
+
     return 0;
 }
 
diff --git a/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h b/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
index 84268b9..ff7051f 100644
--- a/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
+++ b/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
@@ -21,6 +21,8 @@
 #include <mutex>
 #include <string>
 
+#include <inttypes.h>
+
 #include <utils/RefBase.h>
 
 namespace android {
@@ -73,7 +75,7 @@
     // This is used to calculate a minimum bitrate for any particular resolution.
     // A 1080p (1920*1080 = 2073600 pixels) to be encoded at 5Mbps has a bpp == 2.41
     void setBpp(double bpp) { mBpp = bpp;}
-    double getBpp() {return mBpp;}
+    double getBpp(int32_t width, int32_t height);
 
     // Does this codec support QP bounding
     // The getMapping() methods provide any needed mapping to non-standard keys.
@@ -92,10 +94,22 @@
     std::string mMediaType;
     int mApi = 0;
     int mMinimumQuality = 0;
-    int mTargetQpMax = 0;
+    int mTargetQpMax = INT32_MAX;
     bool mSupportsQp = false;
     double mBpp = 0.0;
 
+    // allow different target bits-per-pixel based on resolution
+    // similar to codec 'performance points'
+    // uses 'next largest' (by pixel count) point as minimum bpp
+    struct bpp_point {
+        struct bpp_point *next;
+        int32_t pixels;
+        int32_t width, height;
+        double bpp;
+    };
+    struct bpp_point *mBppPoints = nullptr;
+    bool bppPoint(std::string resolution, std::string value);
+
     std::mutex mMappingLock;
     // XXX figure out why I'm having problems getting compiler to like GUARDED_BY
     std::map<std::string, std::string> mMappings /*GUARDED_BY(mMappingLock)*/ ;
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index d250976..287317d 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -38,6 +38,7 @@
         "media_permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libbase",
+        "libactivitymanager_aidl",
         "libandroid_net",
         "libaudioclient",
         "libbinder",
diff --git a/media/libmediatranscoding/TEST_MAPPING b/media/libmediatranscoding/TEST_MAPPING
index f8a9db9..40f7b21 100644
--- a/media/libmediatranscoding/TEST_MAPPING
+++ b/media/libmediatranscoding/TEST_MAPPING
@@ -26,6 +26,9 @@
         },
         {
             "name": "VideoTrackTranscoderTests"
+        },
+        {
+            "name": "CtsMediaTranscodingTestCases"
         }
     ]
 }
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index 086c658..6dbcaf9 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -97,8 +97,8 @@
     Status addClientUid(int32_t /*in_sessionId*/, int32_t /*in_clientUid*/,
                         bool* /*_aidl_return*/) override;
 
-    Status getClientUids(int32_t /*in_sessionId*/, std::vector<int32_t>* /*out_clientUids*/,
-                         bool* /*_aidl_return*/) override;
+    Status getClientUids(int32_t /*in_sessionId*/,
+                         std::optional<std::vector<int32_t>>* /*_aidl_return*/) override;
 
     Status unregister() override;
 };
@@ -259,10 +259,9 @@
     return Status::ok();
 }
 
-Status TranscodingClientManager::ClientImpl::getClientUids(int32_t in_sessionId,
-                                                           std::vector<int32_t>* out_clientUids,
-                                                           bool* _aidl_return) {
-    *_aidl_return = false;
+Status TranscodingClientManager::ClientImpl::getClientUids(
+        int32_t in_sessionId, std::optional<std::vector<int32_t>>* _aidl_return) {
+    *_aidl_return = std::nullopt;
 
     std::shared_ptr<TranscodingClientManager> owner;
     if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
@@ -273,8 +272,11 @@
         return Status::ok();
     }
 
-    *_aidl_return =
-            owner->mSessionController->getClientUids(mClientId, in_sessionId, out_clientUids);
+    std::vector<int32_t> result;
+
+    if (owner->mSessionController->getClientUids(mClientId, in_sessionId, &result)) {
+        *_aidl_return = result;
+    }
     return Status::ok();
 }
 
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
index c6fa57f..9ef9052 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
@@ -77,7 +77,8 @@
      * @clientUids array to hold the retrieved client uid list.
      * @return false if the session doesn't exist, true otherwise.
      */
-    boolean getClientUids(in int sessionId, out int[] clientUids);
+    @nullable
+    int[] getClientUids(in int sessionId);
 
     /**
     * Unregister the client with the MediaTranscodingService.
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index b7b1279..9233410 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -577,7 +577,7 @@
     addMultipleClients();
 
     bool result;
-    std::vector<int32_t> clientUids;
+    std::optional<std::vector<int32_t>> clientUids;
     TranscodingRequestParcel request;
     TranscodingSessionParcel session;
     uid_t ownUid = ::getuid();
@@ -587,10 +587,10 @@
     EXPECT_FALSE(result);
     EXPECT_TRUE(mClient1->addClientUid(SESSION(0), ownUid, &result).isOk());
     EXPECT_FALSE(result);
-    EXPECT_TRUE(mClient1->getClientUids(-1, &clientUids, &result).isOk());
-    EXPECT_FALSE(result);
-    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids, &result).isOk());
-    EXPECT_FALSE(result);
+    EXPECT_TRUE(mClient1->getClientUids(-1, &clientUids).isOk());
+    EXPECT_EQ(clientUids, std::nullopt);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids).isOk());
+    EXPECT_EQ(clientUids, std::nullopt);
 
     unregisterMultipleClients();
 }
@@ -599,7 +599,7 @@
     addMultipleClients();
 
     bool result;
-    std::vector<int32_t> clientUids;
+    std::optional<std::vector<int32_t>> clientUids;
     TranscodingRequestParcel request;
     TranscodingSessionParcel session;
     uid_t ownUid = ::getuid();
@@ -612,10 +612,10 @@
     EXPECT_TRUE(result);
 
     // Should have own uid in client uid list.
-    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids, &result).isOk());
-    EXPECT_TRUE(result);
-    EXPECT_EQ(clientUids.size(), 1);
-    EXPECT_EQ(clientUids[0], ownUid);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 1);
+    EXPECT_EQ((*clientUids)[0], ownUid);
 
     // Adding invalid client uid should fail.
     EXPECT_TRUE(mClient1->addClientUid(SESSION(0), kInvalidClientUid, &result).isOk());
@@ -633,28 +633,28 @@
     EXPECT_TRUE(result);
 
     // Should not have own uid in client uid list.
-    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids, &result).isOk());
-    EXPECT_TRUE(result);
-    EXPECT_EQ(clientUids.size(), 0);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 0);
 
     // Add own uid (with IMediaTranscodingService::USE_CALLING_UID) again, should succeed.
     EXPECT_TRUE(
             mClient1->addClientUid(SESSION(1), IMediaTranscodingService::USE_CALLING_UID, &result)
                     .isOk());
     EXPECT_TRUE(result);
-    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids, &result).isOk());
-    EXPECT_TRUE(result);
-    EXPECT_EQ(clientUids.size(), 1);
-    EXPECT_EQ(clientUids[0], ownUid);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 1);
+    EXPECT_EQ((*clientUids)[0], ownUid);
 
     // Add more uids, should succeed.
     int32_t kFakeUid = ::getuid() ^ 0x1;
     EXPECT_TRUE(mClient1->addClientUid(SESSION(1), kFakeUid, &result).isOk());
     EXPECT_TRUE(result);
-    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids, &result).isOk());
-    EXPECT_TRUE(result);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
     std::unordered_set<uid_t> uidSet;
-    uidSet.insert(clientUids.begin(), clientUids.end());
+    uidSet.insert(clientUids->begin(), clientUids->end());
     EXPECT_EQ(uidSet.size(), 2);
     EXPECT_EQ(uidSet.count(ownUid), 1);
     EXPECT_EQ(uidSet.count(kFakeUid), 1);
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 4405180..d56bec0 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -220,16 +220,15 @@
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
-    int32_t bitrate;
-    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrate)) {
-        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &bitrate);
+    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &mConfiguredBitrate)) {
+        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &mConfiguredBitrate);
         if (status != AMEDIA_OK) {
             LOG(ERROR) << "Unable to estimate bitrate. Using default " << kDefaultBitrateMbps;
-            bitrate = kDefaultBitrateMbps;
+            mConfiguredBitrate = kDefaultBitrateMbps;
         }
 
-        LOG(INFO) << "Configuring bitrate " << bitrate;
-        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+        LOG(INFO) << "Configuring bitrate " << mConfiguredBitrate;
+        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, mConfiguredBitrate);
     }
 
     SetDefaultFormatValueFloat(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, encoderFormat,
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
index 8a506a0..3e72882 100644
--- a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -45,6 +45,7 @@
 
 private:
     friend struct AsyncCodecCallbackDispatch;
+    friend class VideoTrackTranscoderTests;
 
     // Minimal blocking queue used as a message queue by VideoTrackTranscoder.
     template <typename T>
@@ -101,6 +102,7 @@
     uid_t mUid;
     uint64_t mInputFrameCount = 0;
     uint64_t mOutputFrameCount = 0;
+    int32_t mConfiguredBitrate = 0;
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index e40a507..c3a0ced 100644
--- a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -24,7 +24,7 @@
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="{MODULE}" />
-        <option name="native-test-timeout" value="10m" />
+        <option name="native-test-timeout" value="30m" />
     </test>
 </configuration>
 
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 1f9ec77..88c3fd3 100644
--- a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -86,6 +86,10 @@
 
     ~VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests destroyed"; }
 
+    static int32_t getConfiguredBitrate(const std::shared_ptr<VideoTrackTranscoder>& transcoder) {
+        return transcoder->mConfiguredBitrate;
+    }
+
     std::shared_ptr<MediaSampleReader> mMediaSampleReader;
     int mTrackIndex;
     std::shared_ptr<AMediaFormat> mSourceFormat;
@@ -140,7 +144,7 @@
 TEST_F(VideoTrackTranscoderTests, PreserveBitrate) {
     LOG(DEBUG) << "Testing PreserveBitrate";
     auto callback = std::make_shared<TestTrackTranscoderCallback>();
-    std::shared_ptr<MediaTrackTranscoder> transcoder = VideoTrackTranscoder::create(callback);
+    auto transcoder = VideoTrackTranscoder::create(callback);
 
     auto destFormat = TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(
             mSourceFormat.get(), false /* includeBitrate*/);
@@ -155,15 +159,11 @@
     ASSERT_TRUE(transcoder->start());
 
     callback->waitUntilTrackFormatAvailable();
-
-    auto outputFormat = transcoder->getOutputFormat();
-    ASSERT_NE(outputFormat, nullptr);
-
     transcoder->stop();
     EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
 
-    int32_t outBitrate;
-    EXPECT_TRUE(AMediaFormat_getInt32(outputFormat.get(), AMEDIAFORMAT_KEY_BIT_RATE, &outBitrate));
+    int32_t outBitrate = getConfiguredBitrate(transcoder);
+    ASSERT_GT(outBitrate, 0);
 
     EXPECT_EQ(srcBitrate, outBitrate);
 }
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 01190b5..0fd4ef2 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -113,7 +113,7 @@
         return NULL;
     }
     sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
-    if (frameMem == NULL) {
+    if (frameMem == NULL || frameMem->unsecurePointer() == NULL) {
         ALOGE("not enough memory for VideoFrame size=%zu", size);
         return NULL;
     }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ad67379..50ebeef 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -29,7 +29,6 @@
 #include <C2Buffer.h>
 
 #include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -140,8 +139,6 @@
 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
-static const char *kCodecPlaybackDuration =
-        "android.media.mediacodec.playback-duration"; /* in sec */
 
 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";    /* 0/1 */
 
@@ -722,8 +719,6 @@
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
-      mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
-      mIsSurfaceToScreen(false),
       mLatencyUnknown(0),
       mBytesEncoded(0),
       mEarliestEncodedPtsUs(INT64_MAX),
@@ -830,10 +825,6 @@
     if (mLatencyUnknown > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
     }
-    int64_t playbackDuration = mPlaybackDurationAccumulator->getDurationInSeconds();
-    if (playbackDuration > 0) {
-        mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDuration, playbackDuration);
-    }
     if (mLifetimeStartNs > 0) {
         nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
         lifetime = lifetime / (1000 * 1000);    // emitted in ms, truncated not rounded
@@ -971,22 +962,6 @@
     ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
 }
 
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
-    if (msg->what() != kWhatOutputFramesRendered) {
-        ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
-        return;
-    }
-    // Playback duration only counts if the buffers are going to the screen.
-    if (!mIsSurfaceToScreen) {
-        return;
-    }
-    int64_t renderTimeNs;
-    size_t index = 0;
-    while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
-        mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
-    }
-}
-
 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
 {
     if (nbuckets <= 0 || width <= 0) {
@@ -1422,6 +1397,7 @@
  * MediaFormat Shaping forward declarations
  * including the property name we use for control.
  */
+static int enableMediaFormatShapingDefault = 1;
 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
                       bool reverse);
@@ -1497,7 +1473,8 @@
     }
 
     if (flags & CONFIGURE_FLAG_ENCODE) {
-        int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty, 0);
+        int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
+                                                 enableMediaFormatShapingDefault);
         if (!enableShaping) {
             ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
         } else {
@@ -3147,7 +3124,6 @@
                     ALOGV("TunnelPeekState: %s -> %s",
                           asString(previousState),
                           asString(TunnelPeekState::kBufferRendered));
-                    updatePlaybackDuration(msg);
                     // check that we have a notification set
                     if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4848,10 +4824,6 @@
             return ALREADY_EXISTS;
         }
 
-        // in case we don't connect, ensure that we don't signal the surface is
-        // connected to the screen
-        mIsSurfaceToScreen = false;
-
         err = nativeWindowConnect(surface.get(), "connectToSurface");
         if (err == OK) {
             // Require a fresh set of buffers after each connect by using a unique generation
@@ -4877,10 +4849,6 @@
             if (!mAllowFrameDroppingBySurface) {
                 disableLegacyBufferDropPostQ(surface);
             }
-            // keep track whether or not the buffers of the connected surface go to the screen
-            int result = 0;
-            surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
-            mIsSurfaceToScreen = result != 0;
         }
     }
     // do not return ALREADY_EXISTS unless surfaces are the same
@@ -4898,7 +4866,6 @@
         }
         // assume disconnected even on error
         mSurface.clear();
-        mIsSurfaceToScreen = false;
     }
     return err;
 }
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/PlaybackDurationAccumulator.h
deleted file mode 100644
index cb5f0c4..0000000
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PLAYBACK_DURATION_ACCUMULATOR_H_
-
-namespace android {
-
-// Accumulates playback duration by processing render times of individual frames and by ignoring
-// frames rendered during inactive playbacks such as seeking, pausing, or re-buffering.
-class PlaybackDurationAccumulator {
-private:
-    // Controls the maximum delta between render times before considering the playback is not
-    // active and has stalled.
-    static const int64_t MAX_PRESENTATION_DURATION_NS = 500 * 1000 * 1000;
-
-public:
-    PlaybackDurationAccumulator() {
-        mPlaybackDurationNs = 0;
-        mPreviousRenderTimeNs = 0;
-    }
-
-    // Process a render time expressed in nanoseconds.
-    void processRenderTime(int64_t newRenderTimeNs) {
-        // If we detect wrap-around or out of order frames, just ignore the duration for this
-        // and the next frame.
-        if (newRenderTimeNs < mPreviousRenderTimeNs) {
-            mPreviousRenderTimeNs = 0;
-        }
-        if (mPreviousRenderTimeNs > 0) {
-            int64_t presentationDurationNs = newRenderTimeNs - mPreviousRenderTimeNs;
-            if (presentationDurationNs < MAX_PRESENTATION_DURATION_NS) {
-                mPlaybackDurationNs += presentationDurationNs;
-            }
-        }
-        mPreviousRenderTimeNs = newRenderTimeNs;
-    }
-
-    int64_t getDurationInSeconds() {
-        return mPlaybackDurationNs / 1000 / 1000 / 1000; // Nanoseconds to seconds.
-    }
-
-private:
-    // The playback duration accumulated so far.
-    int64_t mPlaybackDurationNs;
-    // The previous render time used to compute the next presentation duration.
-    int64_t mPreviousRenderTimeNs;
-};
-
-}
-
-#endif
-
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index dff7b22..7ce2968 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -34,6 +34,9 @@
   "presubmit": [
     {
       "name": "mediacodecTest"
+    },
+    {
+      "name": "CtsMediaTranscodingTestCases"
     }
   ],
   "postsubmit": [
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index d7b1794..3f93e6d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -58,7 +58,6 @@
 struct PersistentSurface;
 class SoftwareRenderer;
 class Surface;
-class PlaybackDurationAccumulator;
 namespace hardware {
 namespace cas {
 namespace native {
@@ -414,7 +413,6 @@
     void updateLowLatency(const sp<AMessage> &msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
-    void updatePlaybackDuration(const sp<AMessage> &msg);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
@@ -482,9 +480,6 @@
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
 
-    PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
-    bool mIsSurfaceToScreen;
-
     MediaCodec(
             const sp<ALooper> &looper, pid_t pid, uid_t uid,
             std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index b019448..05115b9 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -40,6 +40,14 @@
      */
 
     AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
 };
 
 // TODO: this only supports ImageReader
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index b75901a..1067e24 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -72,6 +72,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return true;
         case AIMAGE_FORMAT_PRIVATE:
             // For private format, cpu usage is prohibited.
@@ -102,6 +103,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return 1;
         case AIMAGE_FORMAT_PRIVATE:
             return 0;
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index e19dd3a..71bc6d9 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -50,7 +50,10 @@
  */
 typedef struct AImage AImage;
 
-// Formats not listed here will not be supported by AImageReader
+/**
+ * AImage supported formats: AImageReader only guarantees the support for the formats
+ * listed here.
+ */
 enum AIMAGE_FORMATS {
     /**
      * 32 bits RGBA format, 8 bits for each of the four channels.
@@ -813,7 +816,7 @@
  * Available since API level 26.
  *
  * @param image the {@link AImage} of interest.
- * @param outBuffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
+ * @param buffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
  *         handle.
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index d86f3c7..4bd7f2a 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -328,10 +328,10 @@
  * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
  * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
  * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
- * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
- * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
- * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
- * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * For example, you can obtain an EGLClientBuffer from the {@link AHardwareBuffer} by using
+ * eglGetNativeClientBufferANDROID extension and pass that EGLClientBuffer to
+ * eglCreateImageKHR to create an EGLImage resource type, which may then be bound to a
+ * texture via glEGLImageTargetTexture2DOES on supported devices. This can be useful for
  * transporting textures that may be shared cross-process.</p>
  * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
  * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
@@ -339,7 +339,7 @@
  *
  * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
  * especially if \c format is {@link AIMAGE_FORMAT_PRIVATE}, \c usage must not include either
- * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</p>
  *
  * @param width The default width in pixels of the Images that this reader will produce.
  * @param height The default height in pixels of the Images that this reader will produce.
@@ -358,7 +358,7 @@
  *   <th>Compatible usage flags</th>
  * </tr>
  * <tr>
- *   <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ *   <td>non-{@link AIMAGE_FORMAT_PRIVATE} formats defined in {@link NdkImage.h}
  * </td>
  *   <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
  *   {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
@@ -441,6 +441,10 @@
         AImageReader* reader,
         AHardwareBuffer* buffer);
 
+/**
+ * A listener to the AHardwareBuffer removal event, use
+ * {@link AImageReader_setBufferRemovedListener} to register the listener object to AImageReader.
+ */
 typedef struct AImageReader_BufferRemovedListener {
     /// Optional application context passed as the first parameter of the callback.
     void*                      context;
diff --git a/media/ndk/include/media/NdkMediaError.h b/media/ndk/include/media/NdkMediaError.h
index 2be1d6e..02fdc79 100644
--- a/media/ndk/include/media/NdkMediaError.h
+++ b/media/ndk/include/media/NdkMediaError.h
@@ -40,7 +40,11 @@
 
 __BEGIN_DECLS
 
+/**
+ * Media error message types returned from NDK media functions.
+ */
 typedef enum {
+    /** The requested media operation completed successfully. */
     AMEDIA_OK = 0,
 
     /**
@@ -55,14 +59,34 @@
     AMEDIACODEC_ERROR_RECLAIMED             = 1101,
 
     AMEDIA_ERROR_BASE                  = -10000,
+
+    /** The called media function failed with an unknown error. */
     AMEDIA_ERROR_UNKNOWN               = AMEDIA_ERROR_BASE,
+
+    /** The input media data is corrupt or incomplete. */
     AMEDIA_ERROR_MALFORMED             = AMEDIA_ERROR_BASE - 1,
+
+    /** The required operation or media formats are not supported. */
     AMEDIA_ERROR_UNSUPPORTED           = AMEDIA_ERROR_BASE - 2,
+
+    /** An invalid (or already closed) object is used in the function call. */
     AMEDIA_ERROR_INVALID_OBJECT        = AMEDIA_ERROR_BASE - 3,
+
+    /** At least one of the invalid parameters is used. */
     AMEDIA_ERROR_INVALID_PARAMETER     = AMEDIA_ERROR_BASE - 4,
+
+    /** The media object is not in the right state for the required operation. */
     AMEDIA_ERROR_INVALID_OPERATION     = AMEDIA_ERROR_BASE - 5,
+
+    /** Media stream ends while processing the requested operation. */
     AMEDIA_ERROR_END_OF_STREAM         = AMEDIA_ERROR_BASE - 6,
+
+    /** An Error occurred when the Media object is carrying IO operation. */
     AMEDIA_ERROR_IO                    = AMEDIA_ERROR_BASE - 7,
+
+    /** The required operation would have to be blocked (on I/O or others),
+     *   but blocking is not enabled.
+     */
     AMEDIA_ERROR_WOULD_BLOCK           = AMEDIA_ERROR_BASE - 8,
 
     AMEDIA_DRM_ERROR_BASE              = -20000,
@@ -77,10 +101,20 @@
     AMEDIA_DRM_LICENSE_EXPIRED         = AMEDIA_DRM_ERROR_BASE - 9,
 
     AMEDIA_IMGREADER_ERROR_BASE          = -30000,
+
+    /** There are no more image buffers to read/write image data. */
     AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE = AMEDIA_IMGREADER_ERROR_BASE - 1,
+
+    /** The AImage object has used up the allowed maximum image buffers. */
     AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED = AMEDIA_IMGREADER_ERROR_BASE - 2,
+
+    /** The required image buffer could not be locked to read. */
     AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE   = AMEDIA_IMGREADER_ERROR_BASE - 3,
+
+    /** The media data or buffer could not be unlocked. */
     AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE = AMEDIA_IMGREADER_ERROR_BASE - 4,
+
+    /** The media/buffer needs to be locked to perform the required operation. */
     AMEDIA_IMGREADER_IMAGE_NOT_LOCKED    = AMEDIA_IMGREADER_ERROR_BASE - 5,
 
 } media_status_t;
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index 866ebfd..d7eccb8 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -152,12 +152,17 @@
 /**
  * Creates a new media muxer for appending data to an existing MPEG4 file.
  * This is a synchronous API call and could take a while to return if the existing file is large.
- * Works for only MPEG4 files that contain a) a single audio track, b) a single video track,
- * c) a single audio and a single video track.
- * @param(fd): needs to be opened with read and write permission.  Does not take ownership of
+ * Only works for MPEG4 files matching one of the following characteristics:
+ * <ul>
+ *    <li>a single audio track.</li>
+ *    <li>a single video track.</li>
+ *    <li>a single audio and a single video track.</li>
+ * </ul>
+ * @param fd Must be opened with read and write permission. Does not take ownership of
  * this fd i.e., caller is responsible for closing fd.
- * @param(mode): AppendMode is an enum that specifies one of the modes of appending data.
- * @return : Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
+ * @param mode Specifies how data will be appended; the AppendMode enum describes
+ *             the possible methods for appending..
+ * @return Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
  * {@link AMediaMuxer_delete} should be used to free the returned pointer.
  *
  * Available since API level 31.
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 2294c49..a7d47fb 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -74,6 +74,7 @@
         "libmediautils",
         "libnbaio",
         "libnblog",
+        "libpermission",
         "libpowermanager",
         "libmediautils",
         "libmemunreachable",
@@ -95,6 +96,7 @@
     ],
 
     export_shared_lib_headers: [
+        "libpermission",
         "media_permission-aidl-cpp",
     ],
 
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 2e59baa..2436248 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -219,6 +219,10 @@
     void flushAck();
     bool isResumePending();
     void resumeAck();
+    // For direct or offloaded tracks ensure that the pause state is acknowledged
+    // by the playback thread in case of an immediate flush.
+    bool isPausePending() const { return mPauseHwPending; }
+    void pauseAck();
     void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
             uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
 
@@ -314,6 +318,7 @@
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
+    bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag     mChangeNotified = ATOMIC_FLAG_INIT;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c83fc80..997f24a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -5880,8 +5880,15 @@
         sp<Track> l = mActiveTracks.getLatest();
         bool last = l.get() == track;
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (mHwSupportsPause && last && !mHwPaused) {
                 doHwPause = true;
                 mHwPaused = true;
@@ -6423,8 +6430,15 @@
             continue;
         }
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause if last, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (last) {
                 if (mHwSupportsPause && !mHwPaused) {
                     doHwPause = true;
@@ -8094,6 +8108,9 @@
 {
     ALOGV("RecordThread::getActiveMicrophones");
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     status_t status = mInput->stream->getActiveMicrophones(activeMicrophones);
     return status;
 }
@@ -8103,6 +8120,9 @@
 {
     ALOGV("setPreferredMicrophoneDirection(%d)", direction);
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneDirection(direction);
 }
 
@@ -8110,6 +8130,9 @@
 {
     ALOGV("setPreferredMicrophoneFieldDimension(%f)", zoom);
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneFieldDimension(zoom);
 }
 
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index db7528d..21651af 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1219,6 +1219,9 @@
             mState = PAUSING;
             ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
+            if (isOffloadedOrDirect()) {
+                mPauseHwPending = true;
+            }
             playbackThread->broadcast_l();
             break;
 
@@ -1306,6 +1309,11 @@
     mFlushHwPending = false;
 }
 
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+    mPauseHwPending = false;
+}
+
 void AudioFlinger::PlaybackThread::Track::reset()
 {
     // Do not reset twice to avoid discarding data written just after a flush and before
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 0537365..552919d 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -226,6 +226,8 @@
             return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_ARC) != 0) {
             return AUDIO_DEVICE_OUT_HDMI_ARC;
+        } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_EARC) != 0) {
+            return AUDIO_DEVICE_OUT_HDMI_EARC;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_AUX_LINE) != 0) {
             return AUDIO_DEVICE_OUT_AUX_LINE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPDIF) != 0) {
@@ -240,4 +242,4 @@
             return a2dpDevices.empty() ? AUDIO_DEVICE_NONE : a2dpDevices[0];
         }
     }
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index c6bdb04..c2a20c6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -77,6 +77,7 @@
 
     sp<DeviceDescriptor> getDeviceAndMixForInputSource(audio_source_t inputSource,
                                                        const DeviceVector &availableDeviceTypes,
+                                                       uid_t uid,
                                                        sp<AudioPolicyMix> *policyMix) const;
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 05ec69e..20b4044 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -226,7 +226,9 @@
             add(devices);
             return size();
         }
-        return SortedVector::merge(devices);
+        ssize_t ret = SortedVector::merge(devices);
+        refreshTypes();
+        return ret;
     }
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index c024a85..b209a88 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -391,6 +391,7 @@
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForInputSource(
         audio_source_t inputSource,
         const DeviceVector &availDevices,
+        uid_t uid,
         sp<AudioPolicyMix> *policyMix) const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -402,7 +403,11 @@
             if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
                     mix->mCriteria[j].mValue.mSource == inputSource) ||
                (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mSource != inputSource)) {
+                    mix->mCriteria[j].mValue.mSource != inputSource) ||
+               (RULE_MATCH_UID == mix->mCriteria[j].mRule &&
+                    mix->mCriteria[j].mValue.mUid == uid) ||
+               (RULE_EXCLUDE_UID == mix->mCriteria[j].mRule &&
+                    mix->mCriteria[j].mValue.mUid != uid)) {
                 // assuming PolicyMix only for remote submix for input
                 // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX
                 audio_devices_t device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 9bef97c..0f8b0a5 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -170,11 +170,13 @@
     status_t getMediaDevicesForRole(device_role_t role, const DeviceVector& availableDevices,
             DeviceVector& devices) const;
 
+    void dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const;
+
     AudioPolicyManagerObserver *mApmObserver = nullptr;
 
     ProductStrategyMap mProductStrategies;
-    ProductStrategyPreferredRoutingMap mProductStrategyPreferredDevices;
-    CapturePresetDevicesRoleMap mCapturePresetDevicesRole;
+    ProductStrategyDevicesRoleMap mProductStrategyDeviceRoleMap;
+    CapturePresetDevicesRoleMap mCapturePresetDevicesRoleMap;
     VolumeGroupMap mVolumeGroups;
     LastRemovableMediaDevices mLastRemovableMediaDevices;
     audio_mode_t mPhoneState = AUDIO_MODE_NORMAL;  /**< current phone state. */
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 54625ea..2aa2f9a 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -18,20 +18,20 @@
 
 #include "VolumeGroup.h"
 
-#include <system/audio.h>
-#include <utils/RefBase.h>
-#include <HandleGenerator.h>
-#include <string>
-#include <vector>
 #include <map>
-#include <utils/Errors.h>
-#include <utils/String8.h>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <HandleGenerator.h>
 #include <media/AudioAttributes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
-
-#include <vector>
+#include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
 
 namespace android {
 
@@ -170,11 +170,12 @@
     product_strategy_t mDefaultStrategy = PRODUCT_STRATEGY_NONE;
 };
 
-class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t,
-                                                           AudioDeviceTypeAddrVector>
-{
-public:
-    void dump(String8 *dst, int spaces = 0) const;
-};
+using ProductStrategyDevicesRoleMap =
+        std::map<std::pair<product_strategy_t, device_role_t>, AudioDeviceTypeAddrVector>;
+
+void dumpProductStrategyDevicesRoleMap(
+        const ProductStrategyDevicesRoleMap& productStrategyDeviceRoleMap,
+        String8 *dst,
+        int spaces);
 
 } // namespace android
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index aa43691..150a9a8 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "APM::AudioPolicyEngine/Base"
 //#define LOG_NDEBUG 0
 
+#include <functional>
+#include <string>
 #include <sys/stat.h>
 
 #include "EngineBase.h"
@@ -349,23 +351,33 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
-            const AudioDeviceTypeAddrVector &devices)
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s invalid strategy %u", __func__, strategy);
+namespace {
+template <typename T>
+status_t setDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const AudioDeviceTypeAddrVector &devices,
+        const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
-        mProductStrategyPreferredDevices[strategy] = devices;
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO (b/184065221): support set devices role as disabled for strategy.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
+    case DEVICE_ROLE_DISABLED: {
+        tDevicesRoleMap[std::make_pair(t, role)] = devices;
+        // The preferred devices and disabled devices are mutually exclusive. Once a device is added
+        // the a list, it must be removed from the other one.
+        const device_role_t roleToRemove = role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED
+                                                                         : DEVICE_ROLE_PREFERRED;
+        auto it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
+        if (it != tDevicesRoleMap.end()) {
+            it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+            if (it->second.empty()) {
+                tDevicesRoleMap.erase(it);
+            }
+        }
+    } break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it is no need to set device role as none for a strategy.
     default:
@@ -375,28 +387,26 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s invalid strategy %u", __func__, strategy);
+template <typename T>
+status_t removeAllDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
-        if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
-            // no preferred device was set
+    case DEVICE_ROLE_DISABLED:
+        if (tDevicesRoleMap.erase(std::make_pair(t, role)) == 0) {
+            // no preferred/disabled device was set
             return NAME_NOT_FOUND;
         }
         break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO (b/184065221): support remove devices role as disabled for strategy.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it makes no sense to remove devices with
-        // role as DEVICE_ROLE_NONE for a strategy
+        // role as DEVICE_ROLE_NONE
     default:
         ALOGE("%s invalid role %d", __func__, role);
         return BAD_VALUE;
@@ -404,30 +414,27 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
-            AudioDeviceTypeAddrVector &devices) const
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s unknown strategy %u", __func__, strategy);
+template <typename T>
+status_t getDevicesRoleForT(
+        const std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, AudioDeviceTypeAddrVector &devices, const std::string& logStr,
+        std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
     switch (role) {
-    case DEVICE_ROLE_PREFERRED: {
-        // preferred device for this strategy?
-        auto devIt = mProductStrategyPreferredDevices.find(strategy);
-        if (devIt == mProductStrategyPreferredDevices.end()) {
-            ALOGV("%s no preferred device for strategy %u", __func__, strategy);
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+        if (it == tDevicesRoleMap.end()) {
+            ALOGV("%s no device as role %u for %s %u", __func__, role, logStr.c_str(), t);
             return NAME_NOT_FOUND;
         }
 
-        devices = devIt->second;
+        devices = it->second;
     } break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO (b/184065221): support devices role as disabled for strategy.
-        ALOGV("%s no implemented for role as %d", __func__, role);
-        break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
     default:
@@ -437,32 +444,45 @@
     return NO_ERROR;
 }
 
+} // namespace
+
+status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return setDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return removeAllDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return getDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
 status_t EngineBase::setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
         const AudioDeviceTypeAddrVector &devices)
 {
-    // verify if the audio source is valid
-    if (!audio_is_valid_audio_source(audioSource)) {
-        ALOGE("%s unknown audio source %u", __func__, audioSource);
-    }
-
-    switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-        mCapturePresetDevicesRole[audioSource][role] = devices;
-        // When the devices are set as preferred devices, remove them from the disabled devices.
-        doRemoveDevicesRoleForCapturePreset(
-                audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support setting devices role as disabled for capture preset.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
-    case DEVICE_ROLE_NONE:
-        // Intentionally fall-through as it is no need to set device role as none
-    default:
-        ALOGE("%s invalid role %d", __func__, role);
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return setDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, devices, "audio source" /*logStr*/, p);
 }
 
 status_t EngineBase::addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
@@ -475,19 +495,20 @@
 
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
-        mCapturePresetDevicesRole[audioSource][role] = excludeDeviceTypeAddrsFrom(
-                mCapturePresetDevicesRole[audioSource][role], devices);
-        for (const auto& device : devices) {
-            mCapturePresetDevicesRole[audioSource][role].push_back(device);
+    case DEVICE_ROLE_DISABLED: {
+        const auto audioSourceRole = std::make_pair(audioSource, role);
+        mCapturePresetDevicesRoleMap[audioSourceRole] = excludeDeviceTypeAddrsFrom(
+                mCapturePresetDevicesRoleMap[audioSourceRole], devices);
+        for (const auto &device : devices) {
+            mCapturePresetDevicesRoleMap[audioSourceRole].push_back(device);
         }
         // When the devices are set as preferred devices, remove them from the disabled devices.
         doRemoveDevicesRoleForCapturePreset(
-                audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support setting devices role as disabled for capture preset.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
+                audioSource,
+                role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED : DEVICE_ROLE_PREFERRED,
+                devices,
+                false /*forceMatched*/);
+    } break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it is no need to set device role as none
     default:
@@ -513,21 +534,22 @@
     switch (role) {
     case DEVICE_ROLE_PREFERRED:
     case DEVICE_ROLE_DISABLED: {
-        if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
-                mCapturePresetDevicesRole[audioSource].count(role) == 0) {
+        const auto audioSourceRole = std::make_pair(audioSource, role);
+        if (mCapturePresetDevicesRoleMap.find(audioSourceRole) ==
+                mCapturePresetDevicesRoleMap.end()) {
             return NAME_NOT_FOUND;
         }
         AudioDeviceTypeAddrVector remainingDevices = excludeDeviceTypeAddrsFrom(
-                mCapturePresetDevicesRole[audioSource][role], devices);
+                mCapturePresetDevicesRoleMap[audioSourceRole], devices);
         if (forceMatched && remainingDevices.size() !=
-                mCapturePresetDevicesRole[audioSource][role].size() - devices.size()) {
+                mCapturePresetDevicesRoleMap[audioSourceRole].size() - devices.size()) {
             // There are some devices from `devicesToRemove` that are not shown in the cached record
             return BAD_VALUE;
         }
-        mCapturePresetDevicesRole[audioSource][role] = remainingDevices;
-        if (mCapturePresetDevicesRole[audioSource][role].empty()) {
+        mCapturePresetDevicesRoleMap[audioSourceRole] = remainingDevices;
+        if (mCapturePresetDevicesRoleMap[audioSourceRole].empty()) {
             // Remove the role when device list is empty
-            mCapturePresetDevicesRole[audioSource].erase(role);
+            mCapturePresetDevicesRoleMap.erase(audioSourceRole);
         }
     } break;
     case DEVICE_ROLE_NONE:
@@ -543,63 +565,21 @@
 status_t EngineBase::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                       device_role_t role)
 {
-    // verify if the audio source is valid
-    if (!audio_is_valid_audio_source(audioSource)) {
-        ALOGE("%s unknown audio source %u", __func__, audioSource);
-    }
-
-    switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-        if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
-                mCapturePresetDevicesRole[audioSource].erase(role) == 0) {
-            // no preferred device for the given audio source
-            return NAME_NOT_FOUND;
-        }
-        break;
-    case DEVICE_ROLE_DISABLED:
-        // TODO: support remove devices role as disabled for strategy.
-        ALOGI("%s no implemented for role as %d", __func__, role);
-        break;
-    case DEVICE_ROLE_NONE:
-        // Intentionally fall-through as it makes no sense to remove devices with
-        // role as DEVICE_ROLE_NONE for a strategy
-    default:
-        ALOGE("%s invalid role %d", __func__, role);
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return removeAllDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, "audio source" /*logStr*/, p);
 }
 
 status_t EngineBase::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
         device_role_t role, AudioDeviceTypeAddrVector &devices) const
 {
-    // verify if the audio source is valid
-    if (!audio_is_valid_audio_source(audioSource)) {
-        ALOGE("%s unknown audio source %u", __func__, audioSource);
-        return BAD_VALUE;
-    }
-
-    switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-    case DEVICE_ROLE_DISABLED: {
-        if (mCapturePresetDevicesRole.count(audioSource) == 0) {
-            return NAME_NOT_FOUND;
-        }
-        auto devIt = mCapturePresetDevicesRole.at(audioSource).find(role);
-        if (devIt == mCapturePresetDevicesRole.at(audioSource).end()) {
-            ALOGV("%s no devices role(%d) for capture preset %u", __func__, role, audioSource);
-            return NAME_NOT_FOUND;
-        }
-
-        devices = devIt->second;
-    } break;
-    case DEVICE_ROLE_NONE:
-        // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
-    default:
-        ALOGE("%s invalid role %d", __func__, role);
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return getDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, devices, "audio source" /*logStr*/, p);
 }
 
 status_t EngineBase::getMediaDevicesForRole(device_role_t role,
@@ -641,10 +621,22 @@
     return activeDevices;
 }
 
+void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
+{
+    dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
+    for (const auto& [capturePresetRolePair, devices] : mCapturePresetDevicesRoleMap) {
+        dst->appendFormat("\n%*sCapture preset(%u) Device Role(%u) Devices(%s)", spaces + 2, "",
+                capturePresetRolePair.first, capturePresetRolePair.second,
+                dumpAudioDeviceTypeAddrVector(devices, true /*includeSensitiveInfo*/).c_str());
+    }
+    dst->appendFormat("\n");
+}
+
 void EngineBase::dump(String8 *dst) const
 {
     mProductStrategies.dump(dst, 2);
-    mProductStrategyPreferredDevices.dump(dst, 2);
+    dumpProductStrategyDevicesRoleMap(mProductStrategyDeviceRoleMap, dst, 2);
+    dumpCapturePresetDevicesRoleMap(dst, 2);
     mVolumeGroups.dump(dst, 2);
 }
 
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index d4cea5a..b3d144f 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -320,14 +320,15 @@
     }
 }
 
-void ProductStrategyPreferredRoutingMap::dump(android::String8* dst, int spaces) const {
-    dst->appendFormat("\n%*sPreferred devices per product strategy dump:", spaces, "");
-    for (const auto& iter : *this) {
-        dst->appendFormat("\n%*sStrategy %u %s",
-                          spaces + 2, "",
-                          (uint32_t) iter.first,
-                          dumpAudioDeviceTypeAddrVector(iter.second, true /*includeSensitiveInfo*/)
-                                  .c_str());
+void dumpProductStrategyDevicesRoleMap(
+        const ProductStrategyDevicesRoleMap& productStrategyDeviceRoleMap,
+        String8 *dst,
+        int spaces) {
+    dst->appendFormat("\n%*sDevice role per product strategy dump:", spaces, "");
+    for (const auto& [strategyRolePair, devices] : productStrategyDeviceRoleMap) {
+        dst->appendFormat("\n%*sStrategy(%u) Device Role(%u) Devices(%s)", spaces + 2, "",
+                strategyRolePair.first, strategyRolePair.second,
+                dumpAudioDeviceTypeAddrVector(devices, true /*includeSensitiveInfo*/).c_str());
     }
     dst->appendFormat("\n");
 }
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index f0a01d3..518f86e 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <utility>
+
 #include <AudioPolicyManagerObserver.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
@@ -35,7 +37,7 @@
 using StrategyVector = std::vector<product_strategy_t>;
 using VolumeGroupVector = std::vector<volume_group_t>;
 using CapturePresetDevicesRoleMap =
-        std::map<audio_source_t, std::map<device_role_t, AudioDeviceTypeAddrVector>>;
+        std::map<std::pair<audio_source_t, device_role_t>, AudioDeviceTypeAddrVector>;
 
 /**
  * This interface is dedicated to the policy manager that a Policy Engine shall implement.
@@ -171,8 +173,10 @@
      * @param[out] mix to be used if a mix has been installed for the given audio attributes.
      * @return selected input device for the audio attributes, may be null if error.
      */
-    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const = 0;
+    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                             uid_t uid = 0,
+                                                             sp<AudioPolicyMix> *mix = nullptr)
+                                                             const = 0;
 
     /**
      * Get the legacy stream type for a given audio attributes.
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 6d42fcf..b0c376a 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -310,6 +310,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         uid_t uid,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -328,7 +329,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source, availableInputDevices, mix);
+    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+                                                       availableInputDevices,
+                                                       uid,
+                                                       mix);
     if (device != nullptr) {
         return device;
     }
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 3b371d8..d8e2742 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -61,8 +61,10 @@
     DeviceVector getOutputDevicesForStream(audio_stream_type_t stream,
                                            bool fromCache = false) const override;
 
-    sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const override;
+    sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     uid_t uid = 0,
+                                                     sp<AudioPolicyMix> *mix = nullptr)
+                                                     const override;
 
     void updateDeviceSelectionCache() override;
 
diff --git a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
index 5083b14..43b3dd2 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
@@ -172,12 +172,6 @@
         logging.info("added stub input device mask")
 
     # Transform input source in inclusive criterion
-    shift = len(all_component_types['OutputDevicesMask'])
-    if shift > 32:
-        logging.critical("OutputDevicesMask incompatible with criterion representation on 32 bits")
-        logging.info("EXIT ON FAILURE")
-        exit(1)
-
     for component_types in all_component_types:
         values = ','.join('{}:{}'.format(value, key) for key, value in all_component_types[component_types].items())
         logging.info("{}: <{}>".format(component_types, values))
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 1a903a6..edcdf5a 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -196,7 +196,7 @@
             if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
                 availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
                         AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
-                        AUDIO_DEVICE_OUT_HDMI_ARC}));
+                        AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC}));
             }
         }
         } break;
@@ -366,7 +366,9 @@
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
             devices3 = availableOutputDevices.getDevicesFromTypes({
-                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE});
+                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+                    AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE,
+                    });
         }
 
         devices2.add(devices3);
@@ -707,6 +709,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         uid_t uid,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -726,7 +729,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source, availableInputDevices, mix);
+    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+                                                       availableInputDevices,
+                                                       uid,
+                                                       mix);
     if (device != nullptr) {
         return device;
     }
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 98f59d3..595e289 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -62,8 +62,10 @@
     DeviceVector getOutputDevicesForStream(audio_stream_type_t stream,
                                            bool fromCache = false) const override;
 
-    sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const override;
+    sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     uid_t uid = 0,
+                                                     sp<AudioPolicyMix> *mix = nullptr)
+                                                     const override;
 
     void updateDeviceSelectionCache() override;
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 94e8d30..dd44c54 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2228,7 +2228,8 @@
         } else {
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
-            device = mEngine->getInputDeviceForAttributes(attributes, &policyMix);
+            device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+            ALOGV("%s found device type is 0x%X", __FUNCTION__, device->type());
         }
         if (device == nullptr) {
             ALOGW("getInputForAttr() could not find device for source %d", attributes.source);
@@ -2614,7 +2615,7 @@
             bool close = false;
             for (const auto& client : input->clientsList()) {
                 sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes());
+                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
                 if (!input->supportedDevices().contains(device)) {
                     close = true;
                     break;
@@ -5858,12 +5859,22 @@
 
     // If we are not in call and no client is active on this input, this methods returns
     // a null sp<>, causing the patch on the input stream to be released.
-    audio_attributes_t attributes = inputDesc->getHighestPriorityAttributes();
+    audio_attributes_t attributes;
+    uid_t uid;
+    sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
+    if (topClient != nullptr) {
+      attributes = topClient->attributes();
+      uid = topClient->uid();
+    } else {
+      attributes = { .source = AUDIO_SOURCE_DEFAULT };
+      uid = 0;
+    }
+
     if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
         attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     }
     if (attributes.source != AUDIO_SOURCE_DEFAULT) {
-        device = mEngine->getInputDeviceForAttributes(attributes);
+        device = mEngine->getInputDeviceForAttributes(attributes, uid);
     }
 
     return device;
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 639fa58..551013f 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -197,6 +197,7 @@
     mAudioPolicyManager->setPhoneState(state);
     mPhoneState = state;
     mPhoneStateOwnerUid = uid;
+    updateUidStates_l();
     return Status::ok();
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 4ffa9cc..b5eb98f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -537,35 +537,34 @@
 {
 //    Go over all active clients and allow capture (does not force silence) in the
 //    following cases:
-//    The client source is virtual (remote submix, call audio TX or RX...)
-//    OR The user the client is running in has microphone sensor privacy disabled
-//        AND The client is the assistant
-//                AND an accessibility service is on TOP or a RTT call is active
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR uses VOICE_RECOGNITION AND is on TOP
-//                        OR uses HOTWORD
-//                    AND there is no active privacy sensitive capture or call
-//                        OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR The client is an accessibility service
-//                AND Is on TOP
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR The assistant is not on TOP
-//                        AND there is no active privacy sensitive capture or call
-//                            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//                AND is on TOP
+//    The client is the assistant
+//        AND an accessibility service is on TOP or a RTT call is active
 //                AND the source is VOICE_RECOGNITION or HOTWORD
-//            OR the client source is HOTWORD
-//                AND is on TOP
-//                    OR all active clients are using HOTWORD source
-//                AND no call is active
-//                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR the client is the current InputMethodService
-//                AND a RTT call is active AND the source is VOICE_RECOGNITION
-//            OR Any client
-//                AND The assistant is not on TOP
-//                AND is on TOP or latest started
+//            OR uses VOICE_RECOGNITION AND is on TOP
+//                OR uses HOTWORD
+//            AND there is no active privacy sensitive capture or call
+//                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR The client is an accessibility service
+//        AND Is on TOP
+//                AND the source is VOICE_RECOGNITION or HOTWORD
+//            OR The assistant is not on TOP
 //                AND there is no active privacy sensitive capture or call
 //                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//        AND is on TOP
+//        AND the source is VOICE_RECOGNITION or HOTWORD
+//    OR the client source is virtual (remote submix, call audio TX or RX...)
+//    OR the client source is HOTWORD
+//        AND is on TOP
+//            OR all active clients are using HOTWORD source
+//        AND no call is active
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR the client is the current InputMethodService
+//        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR Any client
+//        AND The assistant is not on TOP
+//        AND is on TOP or latest started
+//        AND there is no active privacy sensitive capture or call
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 
 
     sp<AudioRecordClient> topActive;
@@ -596,8 +595,7 @@
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
         uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(current->identity.uid));
-        if (!current->active || (!isVirtualSource(current->attributes.source)
-                && isUserSensorPrivacyEnabledForUid(currentUid))) {
+        if (!current->active) {
             continue;
         }
 
@@ -734,9 +732,6 @@
         if (isVirtualSource(source)) {
             // Allow capture for virtual (remote submix, call audio TX or RX...) sources
             allowCapture = true;
-        } else if (isUserSensorPrivacyEnabledForUid(currentUid)) {
-            // If sensor privacy is enabled, don't allow capture
-            allowCapture = false;
         } else if (mUidPolicy->isAssistantUid(currentUid)) {
             // For assistant allow capture if:
             //     An accessibility service is on TOP or a RTT call is active
@@ -1145,16 +1140,6 @@
     return NO_INIT;
 }
 
-bool AudioPolicyService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mMicrophoneSensorPrivacyPolicies.find(userId) == mMicrophoneSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        userPolicy->registerSelfForMicrophoneOnly(userId);
-        mMicrophoneSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mMicrophoneSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 status_t AudioPolicyService::printHelp(int out) {
     return dprintf(out, "Audio policy service commands:\n"
         "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 145ba06..00d9670 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -346,8 +346,6 @@
     status_t validateUsage(audio_usage_t usage);
     status_t validateUsage(audio_usage_t usage, const media::permission::Identity& identity);
 
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
-
     void updateUidStates();
     void updateUidStates_l() REQUIRES(mLock);
 
@@ -908,8 +906,6 @@
     void *mLibraryHandle = nullptr;
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
-
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mMicrophoneSensorPrivacyPolicies;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 05422aa..32c0267 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -113,6 +113,7 @@
         "libutilscallstack",
         "libutils",
         "libbinder",
+        "libactivitymanager_aidl",
         "libcutils",
         "libmedia",
         "libmediautils",
@@ -161,6 +162,7 @@
 
     export_shared_lib_headers: [
         "libbinder",
+        "libactivitymanager_aidl",
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index eb24a93..6efb90b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -29,6 +29,7 @@
 #include <inttypes.h>
 #include <pthread.h>
 
+#include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/ICamera.h>
 #include <android/hardware/ICameraClient.h>
 
@@ -242,10 +243,6 @@
     VendorTagDescriptor::clearGlobalVendorTagDescriptor();
     mUidPolicy->unregisterSelf();
     mSensorPrivacyPolicy->unregisterSelf();
-
-    for (auto const& [_, policy] : mCameraSensorPrivacyPolicies) {
-        policy->unregisterSelf();
-    }
 }
 
 void CameraService::onNewProviderRegistered() {
@@ -800,8 +797,8 @@
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName,
         const std::optional<String16>& featureId,  const String8& cameraId,
-        int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
-        int deviceVersion, apiLevel effectiveApiLevel,
+        int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
+        int servicePid, int deviceVersion, apiLevel effectiveApiLevel,
         /*out*/sp<BasicClient>* client) {
 
     // Create CameraClient based on device version reported by the HAL.
@@ -824,13 +821,13 @@
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
                         cameraId, api1CameraId,
-                        facing, clientPid, clientUid,
+                        facing, sensorOrientation, clientPid, clientUid,
                         servicePid);
             } else { // Camera2 API route
                 sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                         static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
                 *client = new CameraDeviceClient(cameraService, tmp, packageName, featureId,
-                        cameraId, facing, clientPid, clientUid, servicePid);
+                        cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid);
             }
             break;
         default:
@@ -1645,7 +1642,7 @@
 
         sp<BasicClient> tmp = nullptr;
         if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, facing,
+                cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersion, effectiveApiLevel,
                 /*out*/&tmp)).isOk()) {
@@ -1711,8 +1708,9 @@
 
         // Set camera muting behavior
         if (client->supportsCameraMute()) {
-            client->setCameraMute(mOverrideCameraMuteMode ||
-                    isUserSensorPrivacyEnabledForUid(clientUid));
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
+            client->setCameraMute(mOverrideCameraMuteMode || isCameraPrivacyEnabled);
         }
 
         if (shimUpdateOnly) {
@@ -2030,7 +2028,50 @@
     return Status::ok();
 }
 
- Status CameraService::getConcurrentCameraIds(
+Status CameraService::notifyDisplayConfigurationChange() {
+    ATRACE_CALL();
+    const int callingPid = CameraThreadState::getCallingPid();
+    const int selfPid = getpid();
+
+    // Permission checks
+    if (callingPid != selfPid) {
+        // Ensure we're being called by system_server, or similar process with
+        // permissions to notify the camera service about system events
+        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+            const int uid = CameraThreadState::getCallingUid();
+            ALOGE("Permission Denial: cannot send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+                    "No permission to send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+        }
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+
+    // Don't do anything if rotate-and-crop override via cmd is active
+    if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return Status::ok();
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
+                            basicClient->getPackageName(), basicClient->getCameraOrientation(),
+                            basicClient->getCameraFacing())) {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
+                } else {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE);
+                }
+            }
+        }
+    }
+
+    return Status::ok();
+}
+
+Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
     if (!concurrentCameraIds) {
@@ -2690,13 +2731,13 @@
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr,
-        int api1CameraId, int cameraFacing,
+        int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, clientFeatureId,
-                cameraIdStr, cameraFacing,
+                cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
                 servicePid),
         mCameraId(api1CameraId)
@@ -2726,10 +2767,10 @@
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
         const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-        const String8& cameraIdStr, int cameraFacing,
+        const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid):
-        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing),
+        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mClientFeatureId(clientFeatureId),
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
@@ -2826,6 +2867,13 @@
     return mClientPackageName;
 }
 
+int CameraService::BasicClient::getCameraFacing() const {
+    return mCameraFacing;
+}
+
+int CameraService::BasicClient::getCameraOrientation() const {
+    return mOrientation;
+}
 
 int CameraService::BasicClient::getClientPid() const {
     return mClientPid;
@@ -2895,10 +2943,17 @@
         // If the calling Uid is trusted (a native service), the AppOpsManager could
         // return MODE_IGNORED. Do not treat such case as error.
         if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
-            // Return the same error as for device policy manager rejection
-            return -EACCES;
+            bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
+                    mClientPackageName);
+            bool isCameraPrivacyEnabled =
+                    sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                            multiuser_get_user_id(mClientUid));
+            if (!isUidActive || !isCameraPrivacyEnabled) {
+                ALOGI("Camera %s: Access for \"%s\" has been restricted",
+                        mCameraIdStr.string(), String8(mClientPackageName).string());
+                // Return the same error as for device policy manager rejection
+                return -EACCES;
+            }
         }
     }
 
@@ -2976,15 +3031,22 @@
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
+        bool isCameraPrivacyEnabled =
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                        multiuser_get_user_id(mClientUid));
         ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
                 mCameraIdStr.string(), String8(mClientPackageName).string(),
                 mUidIsTrusted, isUidActive);
         // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
         // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
         // error.
-        if (!mUidIsTrusted && !isUidActive) {
+        if (!mUidIsTrusted && isUidActive && isCameraPrivacyEnabled) {
+            setCameraMute(true);
+        } else if (!mUidIsTrusted && !isUidActive) {
             block();
         }
+    } else if (res == AppOpsManager::MODE_ALLOWED) {
+        setCameraMute(sCameraService->mOverrideCameraMuteMode);
     }
 }
 
@@ -3257,6 +3319,7 @@
     if (mRegistered) {
         return;
     }
+    hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
     status_t res = mSpm.linkToDeath(this);
@@ -3266,39 +3329,6 @@
     }
 }
 
-status_t CameraService::SensorPrivacyPolicy::registerSelfForIndividual(int userId) {
-    Mutex::Autolock _l(mSensorPrivacyLock);
-    if (mRegistered) {
-        return OK;
-    }
-
-    status_t res = mSpm.addIndividualSensorPrivacyListener(userId,
-            SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA, this);
-    if (res != OK) {
-        ALOGE("Unable to register camera privacy listener: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = mSpm.isIndividualSensorPrivacyEnabled(userId,
-        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA, mSensorPrivacyEnabled);
-    if (res != OK) {
-        ALOGE("Unable to check camera privacy: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = mSpm.linkToDeath(this);
-    if (res != OK) {
-        ALOGE("Register link to death failed for sensor privacy: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    mRegistered = true;
-    mIsIndividual = true;
-    mUserId = userId;
-    ALOGV("SensorPrivacyPolicy: Registered with SensorPrivacyManager");
-    return OK;
-}
-
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
@@ -3312,20 +3342,24 @@
     return mSensorPrivacyEnabled;
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(userid_t userId) {
+    if (!hasCameraPrivacyFeature()) {
+        return false;
+    }
+    return mSpm.isIndividualSensorPrivacyEnabled(userId,
+        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
         mSensorPrivacyEnabled = enabled;
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
-    sp<CameraService> service = mService.promote();
-    if (service != nullptr) {
-        if (mIsIndividual) {
-            service->setMuteForAllClients(mUserId, enabled);
-        } else {
-            if (enabled) {
-                service->blockAllClients();
-            }
+    if (enabled) {
+        sp<CameraService> service = mService.promote();
+        if (service != nullptr) {
+            service->blockAllClients();
         }
     }
     return binder::Status::ok();
@@ -3337,6 +3371,31 @@
     mRegistered = false;
 }
 
+bool CameraService::SensorPrivacyPolicy::hasCameraPrivacyFeature() {
+    if (!mNeedToCheckCameraPrivacyFeature) {
+        return mHasCameraPrivacyFeature;
+    }
+    bool hasCameraPrivacyFeature = false;
+    sp<IBinder> binder = defaultServiceManager()->getService(String16("package_native"));
+    if (binder != nullptr) {
+        sp<content::pm::IPackageManagerNative> packageManager =
+                interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (packageManager != nullptr) {
+            binder::Status status = packageManager->hasSystemFeature(
+                    String16("android.hardware.camera.toggle"), 0, &hasCameraPrivacyFeature);
+
+            if (status.isOk()) {
+                mNeedToCheckCameraPrivacyFeature = false;
+                mHasCameraPrivacyFeature = hasCameraPrivacyFeature;
+            } else {
+                ALOGE("Unable to check if camera privacy feature is supported");
+            }
+        }
+    }
+
+    return hasCameraPrivacyFeature;
+}
+
 // ----------------------------------------------------------------------------
 //                  CameraState
 // ----------------------------------------------------------------------------
@@ -3956,19 +4015,6 @@
     }
 }
 
-void CameraService::setMuteForAllClients(userid_t userId, bool enabled) {
-    const auto clients = mActiveClientManager.getAll();
-    for (auto& current : clients) {
-        if (current != nullptr) {
-            const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr
-                    && multiuser_get_user_id(basicClient->getClientUid()) == userId) {
-                basicClient->setCameraMute(enabled);
-            }
-        }
-    }
-}
-
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
     if (!checkCallingPermission(sManageCameraPermission, nullptr, nullptr)) {
@@ -4175,16 +4221,4 @@
     return mode;
 }
 
-bool CameraService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mCameraSensorPrivacyPolicies.find(userId) == mCameraSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        if (userPolicy->registerSelfForIndividual(userId) != OK) {
-            return false;
-        }
-        mCameraSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mCameraSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 98d4500..6317c7a 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -167,6 +167,8 @@
 
     virtual binder::Status    notifyDeviceStateChange(int64_t newState);
 
+    virtual binder::Status    notifyDisplayConfigurationChange();
+
     // OK = supports api of that version, -EOPNOTSUPP = does not support
     virtual binder::Status    supportsCameraApi(
             const String16& cameraId, int32_t apiVersion,
@@ -246,6 +248,12 @@
         // Return the package name for this client
         virtual String16 getPackageName() const;
 
+        // Return the camera facing for this client
+        virtual int getCameraFacing() const;
+
+        // Return the camera orientation for this client
+        virtual int getCameraOrientation() const;
+
         // Notify client about a fatal error
         virtual void notifyError(int32_t errorCode,
                 const CaptureResultExtras& resultExtras) = 0;
@@ -292,6 +300,7 @@
                 const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -308,6 +317,7 @@
         static sp<CameraService>        sCameraService;
         const String8                   mCameraIdStr;
         const int                       mCameraFacing;
+        const int                       mOrientation;
         String16                        mClientPackageName;
         std::optional<String16>         mClientFeatureId;
         pid_t                           mClientPid;
@@ -385,6 +395,7 @@
                 const String8& cameraIdStr,
                 int api1CameraId,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -639,13 +650,13 @@
         public:
             explicit SensorPrivacyPolicy(wp<CameraService> service)
                     : mService(service), mSensorPrivacyEnabled(false), mRegistered(false),
-                      mIsIndividual(false), mUserId(0) {}
+                    mHasCameraPrivacyFeature(false), mNeedToCheckCameraPrivacyFeature(true) {}
 
             void registerSelf();
-            status_t registerSelfForIndividual(int userId);
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
+            bool isCameraPrivacyEnabled(userid_t userId);
 
             binder::Status onSensorPrivacyChanged(bool enabled);
 
@@ -658,8 +669,10 @@
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
             bool mRegistered;
-            bool mIsIndividual;
-            userid_t mUserId;
+            bool mHasCameraPrivacyFeature;
+            bool mNeedToCheckCameraPrivacyFeature;
+
+            bool hasCameraPrivacyFeature();
     };
 
     sp<UidPolicy> mUidPolicy;
@@ -1035,9 +1048,6 @@
     // Blocks all active clients.
     void blockAllClients();
 
-    // Mutes all active clients for a user.
-    void setMuteForAllClients(userid_t userId, bool enabled);
-
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(const Vector<String16>& args, int err);
 
@@ -1073,7 +1083,7 @@
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
             const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
             int deviceVersion, apiLevel effectiveApiLevel,
             /*out*/sp<BasicClient>* client);
 
@@ -1102,7 +1112,7 @@
     // Aggreated audio restriction mode for all camera clients
     int32_t mAudioRestriction;
 
-    // Current override rotate-and-crop mode
+    // Current override cmd rotate-and-crop mode; AUTO means no override
     uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
 
     // Current image dump mask
@@ -1110,12 +1120,6 @@
 
     // Current camera mute mode
     bool mOverrideCameraMuteMode = false;
-
-    // Map from user to sensor privacy policy
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mCameraSensorPrivacyPolicies;
-
-    // Checks if the sensor privacy is enabled for the uid
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 31cfed6..72b3c40 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -55,11 +55,12 @@
         const String8& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                cameraDeviceId, api1CameraId, cameraFacing,
+                cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation,
                 clientPid, clientUid, servicePid),
         mParameters(api1CameraId, cameraFacing)
 {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 4d667e3..d16b242 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -101,6 +101,7 @@
             const String8& cameraDeviceId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 1b65d1a..343f4a7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,6 +61,7 @@
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
@@ -70,6 +71,7 @@
             clientFeatureId,
             cameraId,
             cameraFacing,
+            sensorOrientation,
             clientPid,
             clientUid,
             servicePid),
@@ -86,12 +88,13 @@
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
                 cameraId, /*API1 camera ID*/ -1,
-                cameraFacing, clientPid, clientUid, servicePid),
+                cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0) {
@@ -1711,7 +1714,8 @@
     if (offlineSession.get() != nullptr) {
         offlineClient = new CameraOfflineSessionClient(sCameraService,
                 offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
-                mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+                mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
+                mServicePid);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index adedf92..44ffeef 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -55,6 +55,7 @@
             const String8& cameraId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
@@ -181,6 +182,7 @@
             const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 5c5fcda..ba49325 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -49,13 +49,13 @@
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId,
-            const String8& cameraIdStr, int cameraFacing,
+            const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid) :
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
                     clientPackageName, clientFeatureId,
-                    cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6fd8d45..1f79354 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -51,11 +51,13 @@
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         TClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid, clientUid,
+                servicePid),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
         mDevice(new Camera3Device(cameraId)),
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 1ce4393..dab0050 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -52,6 +52,7 @@
                       const String8& cameraId,
                       int api1CameraId,
                       int cameraFacing,
+                      int sensorOrientation,
                       int clientPid,
                       uid_t clientUid,
                       int servicePid);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 8942d05..6dffc5d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -474,12 +474,12 @@
 hardware::Return<void> CameraProviderManager::onRegistration(
         const hardware::hidl_string& /*fqName*/,
         const hardware::hidl_string& name,
-        bool /*preexisting*/) {
+        bool preexisting) {
     std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
     {
         std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-        addProviderLocked(name);
+        addProviderLocked(name, preexisting);
     }
 
     sp<StatusListener> listener = getStatusListener();
@@ -1230,33 +1230,53 @@
     return falseRet;
 }
 
-status_t CameraProviderManager::addProviderLocked(const std::string& newProvider) {
-    for (const auto& providerInfo : mProviders) {
-        if (providerInfo->mProviderName == newProvider) {
-            ALOGW("%s: Camera provider HAL with name '%s' already registered", __FUNCTION__,
-                    newProvider.c_str());
-            return ALREADY_EXISTS;
-        }
-    }
-
+status_t CameraProviderManager::tryToInitializeProviderLocked(
+        const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     sp<provider::V2_4::ICameraProvider> interface;
-    interface = mServiceProxy->tryGetService(newProvider);
+    interface = mServiceProxy->tryGetService(providerName);
 
     if (interface == nullptr) {
         // The interface may not be started yet. In that case, this is not a
         // fatal error.
         ALOGW("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
-                newProvider.c_str());
+                providerName.c_str());
         return BAD_VALUE;
     }
 
-    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, this);
-    status_t res = providerInfo->initialize(interface, mDeviceState);
-    if (res != OK) {
-        return res;
+    return providerInfo->initialize(interface, mDeviceState);
+}
+
+status_t CameraProviderManager::addProviderLocked(const std::string& newProvider,
+        bool preexisting) {
+    // Several camera provider instances can be temporarily present.
+    // Defer initialization of a new instance until the older instance is properly removed.
+    auto providerInstance = newProvider + "-" + std::to_string(mProviderInstanceId);
+    bool providerPresent = false;
+    for (const auto& providerInfo : mProviders) {
+        if (providerInfo->mProviderName == newProvider) {
+            ALOGW("%s: Camera provider HAL with name '%s' already registered",
+                    __FUNCTION__, newProvider.c_str());
+            if (preexisting) {
+                return ALREADY_EXISTS;
+            } else{
+                ALOGW("%s: The new provider instance will get initialized immediately after the"
+                        " currently present instance is removed!", __FUNCTION__);
+                providerPresent = true;
+                break;
+            }
+        }
+    }
+
+    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, providerInstance, this);
+    if (!providerPresent) {
+        status_t res = tryToInitializeProviderLocked(newProvider, providerInfo);
+        if (res != OK) {
+            return res;
+        }
     }
 
     mProviders.push_back(providerInfo);
+    mProviderInstanceId++;
 
     return OK;
 }
@@ -1266,12 +1286,14 @@
     std::unique_lock<std::mutex> lock(mInterfaceMutex);
     std::vector<String8> removedDeviceIds;
     status_t res = NAME_NOT_FOUND;
+    std::string removedProviderName;
     for (auto it = mProviders.begin(); it != mProviders.end(); it++) {
-        if ((*it)->mProviderName == provider) {
+        if ((*it)->mProviderInstance == provider) {
             removedDeviceIds.reserve((*it)->mDevices.size());
             for (auto& deviceInfo : (*it)->mDevices) {
                 removedDeviceIds.push_back(String8(deviceInfo->mId.c_str()));
             }
+            removedProviderName = (*it)->mProviderName;
             mProviders.erase(it);
             res = OK;
             break;
@@ -1281,6 +1303,14 @@
         ALOGW("%s: Camera provider HAL with name '%s' is not registered", __FUNCTION__,
                 provider.c_str());
     } else {
+        // Check if there are any newer camera instances from the same provider and try to
+        // initialize.
+        for (const auto& providerInfo : mProviders) {
+            if (providerInfo->mProviderName == removedProviderName) {
+                return tryToInitializeProviderLocked(removedProviderName, providerInfo);
+            }
+        }
+
         // Inform camera service of loss of presence for all the devices from this provider,
         // without lock held for reentrancy
         sp<StatusListener> listener = getStatusListener();
@@ -1289,7 +1319,9 @@
             for (auto& id : removedDeviceIds) {
                 listener->onDeviceStatusChanged(id, CameraDeviceStatus::NOT_PRESENT);
             }
+            lock.lock();
         }
+
     }
     return res;
 }
@@ -1303,8 +1335,10 @@
 
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
+        const std::string &providerInstance,
         CameraProviderManager *manager) :
         mProviderName(providerName),
+        mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
@@ -1628,7 +1662,7 @@
 
 status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
     dprintf(fd, "== Camera Provider HAL %s (v2.%d, %s) static info: %zu devices: ==\n",
-            mProviderName.c_str(),
+            mProviderInstance.c_str(),
             mMinorVersion,
             mIsRemote ? "remote" : "passthrough",
             mDevices.size());
@@ -1944,12 +1978,12 @@
 void CameraProviderManager::ProviderInfo::serviceDied(uint64_t cookie,
         const wp<hidl::base::V1_0::IBase>& who) {
     (void) who;
-    ALOGI("Camera provider '%s' has died; removing it", mProviderName.c_str());
+    ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
                 __FUNCTION__, cookie, mId);
     }
-    mManager->removeProvider(mProviderName);
+    mManager->removeProvider(mProviderInstance);
 }
 
 status_t CameraProviderManager::ProviderInfo::setUpVendorTags() {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 12bda9b..5531dd7 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -365,6 +365,7 @@
             virtual public hardware::hidl_death_recipient
     {
         const std::string mProviderName;
+        const std::string mProviderInstance;
         const metadata_vendor_id_t mProviderTagid;
         int mMinorVersion;
         sp<VendorTagDescriptor> mVendorTagDescriptor;
@@ -379,7 +380,7 @@
 
         sp<hardware::camera::provider::V2_4::ICameraProvider> mSavedInterface;
 
-        ProviderInfo(const std::string &providerName,
+        ProviderInfo(const std::string &providerName, const std::string &providerInstance,
                 CameraProviderManager *manager);
         ~ProviderInfo();
 
@@ -657,7 +658,10 @@
             hardware::hidl_version minVersion = hardware::hidl_version{0,0},
             hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
 
-    status_t addProviderLocked(const std::string& newProvider);
+    status_t addProviderLocked(const std::string& newProvider, bool preexisting = false);
+
+    status_t tryToInitializeProviderLocked(const std::string& providerName,
+            const sp<ProviderInfo>& providerInfo);
 
     bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
@@ -666,6 +670,7 @@
 
     bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
 
+    size_t mProviderInstanceId = 0;
     std::vector<sp<ProviderInfo>> mProviders;
 
     void addProviderToMap(
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 855b5ab..a74fd9d 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -23,7 +23,9 @@
 #include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
 #include <camera_metadata_hidden.h>
+#include <hidl/HidlBinderSupport.h>
 #include <gtest/gtest.h>
+#include <utility>
 
 using namespace android;
 using namespace android::hardware::camera;
@@ -173,6 +175,25 @@
         return hardware::Void();
     }
 
+    virtual ::android::hardware::Return<bool> linkToDeath(
+            const ::android::sp<::android::hardware::hidl_death_recipient>& recipient,
+            uint64_t cookie) {
+        if (mInitialDeathRecipient.get() == nullptr) {
+            mInitialDeathRecipient =
+                std::make_unique<::android::hardware::hidl_binder_death_recipient>(recipient,
+                        cookie, this);
+        }
+        return true;
+    }
+
+    void signalInitialBinderDeathRecipient() {
+        if (mInitialDeathRecipient.get() != nullptr) {
+            mInitialDeathRecipient->binderDied(nullptr /*who*/);
+        }
+    }
+
+    std::unique_ptr<::android::hardware::hidl_binder_death_recipient> mInitialDeathRecipient;
+
     enum MethodNames {
         SET_CALLBACK,
         GET_VENDOR_TAGS,
@@ -567,3 +588,47 @@
     ASSERT_EQ(serviceProxy.mLastRequestedServiceNames.back(), testProviderInstanceName) <<
             "Incorrect instance requested from service manager";
 }
+
+// Test that CameraProviderManager can handle races between provider death notifications and
+// provider registration callbacks
+TEST(CameraProviderManagerTest, BinderDeathRegistrationRaceTest) {
+
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    deviceNames.push_back("device@3.2/test/1");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+    status_t res;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+    sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
+            vendorSection);
+
+    // Not setting up provider in the service proxy yet, to test cases where a
+    // HAL isn't starting right
+    res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    // Now set up provider and trigger a registration
+    serviceProxy.setProvider(provider);
+
+    hardware::hidl_string testProviderFqInterfaceName =
+            "android.hardware.camera.provider@2.4::ICameraProvider";
+    hardware::hidl_string testProviderInstanceName = "test/0";
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    // Simulate artificial delay of the registration callback which arrives before the
+    // death notification
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    provider->signalInitialBinderDeathRecipient();
+
+    auto deviceCount = static_cast<unsigned> (providerManager->getCameraCount().second);
+    ASSERT_EQ(deviceCount, deviceNames.size()) <<
+            "Unexpected amount of camera devices";
+}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 8e1af20..dbc68b2 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -207,8 +207,10 @@
     return { result, ss.str() };
 }
 
-AudioAnalytics::AudioAnalytics()
+AudioAnalytics::AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog)
     : mDeliverStatistics(property_get_bool(PROP_AUDIO_ANALYTICS_CLOUD_ENABLED, true))
+    , mStatsdLog(statsdLog)
+    , mAudioPowerUsage(this, statsdLog)
 {
     SetMinimumLogSeverity(android::base::DEBUG); // for LOG().
     ALOGD("%s", __func__);
@@ -416,20 +418,6 @@
         ll -= l;
     }
 
-    if (ll > 0) {
-        // Print the statsd atoms we sent out.
-        const std::string statsd = mStatsdLog.dumpToString("  " /* prefix */, ll - 1);
-        const size_t n = std::count(statsd.begin(), statsd.end(), '\n') + 1; // we control this.
-        if ((size_t)ll >= n) {
-            if (n == 1) {
-                ss << "Statsd atoms: empty or truncated\n";
-            } else {
-                ss << "Statsd atoms:\n" << statsd;
-            }
-            ll -= (int32_t)n;
-        }
-    }
-
     if (ll > 0 && prefix == nullptr) {
         auto [s, l] = mAudioPowerUsage.dump(ll);
         ss << s;
@@ -611,7 +599,8 @@
                     , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case THREAD: {
@@ -659,7 +648,8 @@
                 , ENUM_EXTRACT(typeForStats)
             );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case TRACK: {
@@ -779,7 +769,8 @@
                     , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
         }
         } break;
     }
@@ -855,7 +846,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
     }
 }
@@ -908,7 +900,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
         return;
     }
@@ -934,7 +927,8 @@
                 , /* connection_count */ 1
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+        mAudioAnalytics.mStatsdLog->log(
+                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
     }
 }
 
@@ -1081,7 +1075,7 @@
         ss << " " << fieldsStr;
         std::string str = ss.str();
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+        mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
     }
 }
 
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index 07872ef..2b41a95 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -17,10 +17,10 @@
 #pragma once
 
 #include <android-base/thread_annotations.h>
-#include <audio_utils/SimpleLog.h>
 #include "AnalyticsActions.h"
 #include "AnalyticsState.h"
 #include "AudioPowerUsage.h"
+#include "StatsdLog.h"
 #include "TimedAction.h"
 #include "Wrap.h"
 
@@ -32,7 +32,7 @@
     friend AudioPowerUsage;
 
 public:
-    AudioAnalytics();
+    explicit AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioAnalytics();
 
     /**
@@ -122,8 +122,7 @@
     SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
 
     TimedAction mTimedAction; // locked internally
-
-    SimpleLog mStatsdLog{16 /* log lines */}; // locked internally
+    const std::shared_ptr<StatsdLog> mStatsdLog; // locked internally, ok for multiple threads.
 
     // DeviceUse is a nested class which handles audio device usage accounting.
     // We define this class at the end to ensure prior variables all properly constructed.
@@ -212,7 +211,7 @@
         AudioAnalytics &mAudioAnalytics;
     } mAAudioStreamInfo{*this};
 
-    AudioPowerUsage mAudioPowerUsage{this};
+    AudioPowerUsage mAudioPowerUsage;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index e584f12..ab74c8e 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -127,14 +127,13 @@
     return deviceMask;
 }
 
-/* static */
-void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item)
+void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item) const
 {
     int32_t type;
     if (!item->getInt32(AUDIO_POWER_USAGE_PROP_TYPE, &type)) return;
 
-    int32_t device;
-    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &device)) return;
+    int32_t audio_device;
+    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &audio_device)) return;
 
     int64_t duration_ns;
     if (!item->getInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, &duration_ns)) return;
@@ -142,11 +141,24 @@
     double volume;
     if (!item->getDouble(AUDIO_POWER_USAGE_PROP_VOLUME, &volume)) return;
 
-    (void)android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
-                                         device,
-                                         (int32_t)(duration_ns / NANOS_PER_SECOND),
-                                         (float)volume,
+    const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
+    const float average_volume = (float)volume;
+    const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+                                         audio_device,
+                                         duration_secs,
+                                         average_volume,
                                          type);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audio_power_usage_data_reported:"
+            << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+            << " audio_device:" << audio_device
+            << " duration_secs:" << duration_secs
+            << " average_volume:" << average_volume
+            << " type:" << type
+            << " }";
+    mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
 bool AudioPowerUsage::saveAsItem_l(
@@ -360,8 +372,10 @@
     mPrimaryDevice = device;
 }
 
-AudioPowerUsage::AudioPowerUsage(AudioAnalytics *audioAnalytics)
+AudioPowerUsage::AudioPowerUsage(
+        AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog)
     : mAudioAnalytics(audioAnalytics)
+    , mStatsdLog(statsdLog)
     , mDisabled(property_get_bool(PROP_AUDIO_METRICS_DISABLED, AUDIO_METRICS_DISABLED_DEFAULT))
     , mIntervalHours(property_get_int32(PROP_AUDIO_METRICS_INTERVAL_HR, INTERVAL_HR_DEFAULT))
 {
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index b705a6a..7021902 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -22,13 +22,15 @@
 #include <mutex>
 #include <thread>
 
+#include "StatsdLog.h"
+
 namespace android::mediametrics {
 
 class AudioAnalytics;
 
 class AudioPowerUsage {
 public:
-    explicit AudioPowerUsage(AudioAnalytics *audioAnalytics);
+    AudioPowerUsage(AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioPowerUsage();
 
     void checkTrackRecord(const std::shared_ptr<const mediametrics::Item>& item, bool isTrack);
@@ -83,12 +85,13 @@
 private:
     bool saveAsItem_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
-    static void sendItem(const std::shared_ptr<const mediametrics::Item>& item);
+    void sendItem(const std::shared_ptr<const mediametrics::Item>& item) const;
     void collect();
     bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
 
     AudioAnalytics * const mAudioAnalytics;
+    const std::shared_ptr<StatsdLog> mStatsdLog;  // mStatsdLog is internally locked
     const bool mDisabled;
     const int32_t mIntervalHours;
 
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 1756c98..838cdd5 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -77,6 +77,7 @@
         {"AUDIO_DEVICE_IN_DEFAULT",                1LL << 28},
         // R values above.
         {"AUDIO_DEVICE_IN_BLE_HEADSET",            1LL << 29},
+        {"AUDIO_DEVICE_IN_HDMI_EARC",              1LL << 30},
     };
     return map;
 }
@@ -123,7 +124,8 @@
         {"AUDIO_DEVICE_OUT_DEFAULT",                   1LL << 30},
         // R values above.
         {"AUDIO_DEVICE_OUT_BLE_HEADSET",               1LL << 31},
-        {"AUDIO_DEVICE_OUT_BLE_SPAEKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_BLE_SPEAKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_HDMI_EARC",                 1LL << 33},
     };
     return map;
 }
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 7ee731e..5e672ee 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -206,7 +206,7 @@
 
     (void)mAudioAnalytics.submit(sitem, isTrusted);
 
-    (void)dump2Statsd(sitem);  // failure should be logged in function.
+    (void)dump2Statsd(sitem, mStatsdLog);  // failure should be logged in function.
     saveItem(sitem);
     return NO_ERROR;
 }
@@ -308,6 +308,11 @@
             if (lines == linesToDump) {
                 result << "-- some lines may be truncated --\n";
             }
+
+            // Dump the statsd atoms we sent out.
+            result << "Statsd atoms:\n"
+                   << mStatsdLog->dumpToString("  " /* prefix */,
+                           all ? STATSD_LOG_LINES_MAX : STATSD_LOG_LINES_DUMP);
         }
     }
     const std::string str = result.str();
@@ -542,7 +547,7 @@
     std::lock_guard _l(mLock);
     for (auto &item : mPullableItems[key]) {
         if (const auto sitem = item.lock()) {
-            dump2Statsd(sitem, data);
+            dump2Statsd(sitem, data, mStatsdLog);
         }
     }
     mPullableItems[key].clear();
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index 6234656..8d0b1cf 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -124,7 +124,14 @@
 
     std::atomic<int64_t> mItemsSubmitted{}; // accessed outside of lock.
 
-    mediametrics::AudioAnalytics mAudioAnalytics; // mAudioAnalytics is locked internally.
+    // mStatsdLog is locked internally (thread-safe) and shows the last atoms logged
+    static constexpr size_t STATSD_LOG_LINES_MAX = 30; // recent log lines to keep
+    static constexpr size_t STATSD_LOG_LINES_DUMP = 4; // normal amount of lines to dump
+    const std::shared_ptr<mediametrics::StatsdLog> mStatsdLog{
+            std::make_shared<mediametrics::StatsdLog>(STATSD_LOG_LINES_MAX)};
+
+    // mAudioAnalytics is locked internally.
+    mediametrics::AudioAnalytics mAudioAnalytics{mStatsdLog};
 
     std::mutex mLock;
     // statistics about our analytics
diff --git a/services/mediametrics/StatsdLog.h b/services/mediametrics/StatsdLog.h
new file mode 100644
index 0000000..e207bac
--- /dev/null
+++ b/services/mediametrics/StatsdLog.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/SimpleLog.h>
+#include <map>
+#include <mutex>
+#include <sstream>
+
+namespace android::mediametrics {
+
+class StatsdLog {
+public:
+    explicit StatsdLog(size_t lines) : mSimpleLog(lines) {}
+
+    void log(int atom, const std::string& string) {
+        {
+            std::lock_guard lock(mLock);
+            ++mCountMap[atom];
+        }
+        mSimpleLog.log("%s", string.c_str());
+    }
+
+   std::string dumpToString(const char *prefix = "", size_t logLines = 0) const {
+       std::stringstream ss;
+
+       {   // first print out the atom counts
+           std::lock_guard lock(mLock);
+
+           size_t col = 0;
+           for (const auto& count : mCountMap) {
+               if (col == 8) {
+                   col = 0;
+                   ss << "\n" << prefix;
+               } else {
+                   ss << " ";
+               }
+               ss << "[ " << count.first << " : " << count.second << " ]";
+               ++col;
+           }
+           ss << "\n";
+       }
+
+       // then print out the log lines
+       ss << mSimpleLog.dumpToString(prefix, logLines);
+       return ss.str();
+   }
+
+private:
+    SimpleLog mSimpleLog; // internally locked
+    std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
+    mutable std::mutex mLock;
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/StringUtils.h
index 37ed173..01034d9 100644
--- a/services/mediametrics/StringUtils.h
+++ b/services/mediametrics/StringUtils.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <iomanip>
+#include <sstream>
 #include <string>
 #include <vector>
 
@@ -146,4 +148,23 @@
     return {}; // if not a logSessionId, return an empty string.
 }
 
+inline std::string bytesToString(const std::vector<uint8_t>& bytes, size_t maxSize = SIZE_MAX) {
+    if (bytes.size() == 0) {
+        return "{}";
+    }
+    std::stringstream ss;
+    ss << "{";
+    ss << std::hex << std::setfill('0');
+    maxSize = std::min(maxSize, bytes.size());
+    for (size_t i = 0; i < maxSize; ++i) {
+        ss << " " << std::setw(2) << (int)bytes[i];
+    }
+    if (maxSize != bytes.size()) {
+        ss << " ... }";
+    } else {
+        ss << " }";
+    }
+    return ss.str();
+}
+
 } // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 0cb2594..8b0b479 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -320,7 +320,9 @@
 
 void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
     FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    android::mediametrics::AudioAnalytics audioAnalytics;
+    std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
+            std::make_shared<android::mediametrics::StatsdLog>(10);
+    android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
     while (fdp.remaining_bytes()) {
         auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index b7c5296..776f878 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -48,10 +48,7 @@
 // has its own routine to handle this.
 //
 
-bool enabled_statsd = true;
-
-using statsd_pusher = bool (*)(const mediametrics::Item *);
-using statsd_puller = bool (*)(const mediametrics::Item *, AStatsEventList *);
+static bool enabled_statsd = true;
 
 namespace {
 template<typename Handler, typename... Args>
@@ -68,15 +65,17 @@
     }
 
     if (handlers.count(key)) {
-        return (handlers.at(key))(item.get(), args...);
+        return (handlers.at(key))(item, args...);
     }
     return false;
 }
 } // namespace
 
 // give me a record, I'll look at the type and upload appropriately
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item) {
-    static const std::map<std::string, statsd_pusher> statsd_pushers =
+bool dump2Statsd(
+        const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_pusher*> statsd_pushers =
     {
         { "audiopolicy", statsd_audiopolicy },
         { "audiorecord", statsd_audiorecord },
@@ -91,15 +90,16 @@
         { "nuplayer2", statsd_nuplayer },
         { "recorder", statsd_recorder },
     };
-    return dump2StatsdInternal(statsd_pushers, item);
+    return dump2StatsdInternal(statsd_pushers, item, statsdLog);
 }
 
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out) {
-    static const std::map<std::string, statsd_puller> statsd_pullers =
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_puller*> statsd_pullers =
     {
         { "mediadrm", statsd_mediadrm_puller },
     };
-    return dump2StatsdInternal(statsd_pullers, item, out);
+    return dump2StatsdInternal(statsd_pullers, item, out, statsdLog);
 }
 
 } // namespace android
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/iface_statsd.h
index 1b6c79a..c2a8b3c 100644
--- a/services/mediametrics/iface_statsd.h
+++ b/services/mediametrics/iface_statsd.h
@@ -22,26 +22,29 @@
 class Item;
 }
 
-extern bool enabled_statsd;
-
+using statsd_pusher = bool (const std::shared_ptr<const mediametrics::Item>& item,
+         const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 // component specific dumpers
-extern bool statsd_audiopolicy(const mediametrics::Item *);
-extern bool statsd_audiorecord(const mediametrics::Item *);
-extern bool statsd_audiothread(const mediametrics::Item *);
-extern bool statsd_audiotrack(const mediametrics::Item *);
-extern bool statsd_codec(const mediametrics::Item *);
-extern bool statsd_extractor(const mediametrics::Item *);
-extern bool statsd_mediaparser(const mediametrics::Item *);
-extern bool statsd_nuplayer(const mediametrics::Item *);
-extern bool statsd_recorder(const mediametrics::Item *);
+extern statsd_pusher statsd_audiopolicy;
+extern statsd_pusher statsd_audiorecord;
+extern statsd_pusher statsd_audiothread;
+extern statsd_pusher statsd_audiotrack;
+extern statsd_pusher statsd_codec;
+extern statsd_pusher statsd_extractor;
+extern statsd_pusher statsd_mediaparser;
 
-extern bool statsd_mediadrm(const mediametrics::Item *);
-extern bool statsd_drmmanager(const mediametrics::Item *);
+extern statsd_pusher statsd_nuplayer;
+extern statsd_pusher statsd_recorder;
+extern statsd_pusher statsd_mediadrm;
+extern statsd_pusher statsd_drmmanager;
 
+using statsd_puller = bool (const std::shared_ptr<const mediametrics::Item>& item,
+        AStatsEventList *, const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 // component specific pullers
-extern bool statsd_mediadrm_puller(const mediametrics::Item *, AStatsEventList *);
+extern statsd_puller statsd_mediadrm_puller;
 
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item);
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out);
-
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 } // namespace android
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 6ef2f2c..f44b7c4 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_audiopolicy(const mediametrics::Item *item)
+bool statsd_audiopolicy(const std::shared_ptr<const mediametrics::Item>& item,
+       const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -60,35 +60,35 @@
         metrics_proto.set_status(status);
     }
     //string char kAudioPolicyRqstSrc[] = "android.media.audiopolicy.rqst.src";
-    std::string rqst_src;
-    if (item->getString("android.media.audiopolicy.rqst.src", &rqst_src)) {
-        metrics_proto.set_request_source(std::move(rqst_src));
+    std::string request_source;
+    if (item->getString("android.media.audiopolicy.rqst.src", &request_source)) {
+        metrics_proto.set_request_source(request_source);
     }
     //string char kAudioPolicyRqstPkg[] = "android.media.audiopolicy.rqst.pkg";
-    std::string rqst_pkg;
-    if (item->getString("android.media.audiopolicy.rqst.pkg", &rqst_pkg)) {
-        metrics_proto.set_request_package(std::move(rqst_pkg));
+    std::string request_package;
+    if (item->getString("android.media.audiopolicy.rqst.pkg", &request_package)) {
+        metrics_proto.set_request_package(request_package);
     }
     //int32 char kAudioPolicyRqstSession[] = "android.media.audiopolicy.rqst.session";
-    int32_t rqst_session = -1;
-    if (item->getInt32("android.media.audiopolicy.rqst.session", &rqst_session)) {
-        metrics_proto.set_request_session(rqst_session);
+    int32_t request_session = -1;
+    if (item->getInt32("android.media.audiopolicy.rqst.session", &request_session)) {
+        metrics_proto.set_request_session(request_session);
     }
     //string char kAudioPolicyRqstDevice[] = "android.media.audiopolicy.rqst.device";
-    std::string rqst_device;
-    if (item->getString("android.media.audiopolicy.rqst.device", &rqst_device)) {
-        metrics_proto.set_request_device(std::move(rqst_device));
+    std::string request_device;
+    if (item->getString("android.media.audiopolicy.rqst.device", &request_device)) {
+        metrics_proto.set_request_device(request_device);
     }
 
     //string char kAudioPolicyActiveSrc[] = "android.media.audiopolicy.active.src";
-    std::string active_src;
-    if (item->getString("android.media.audiopolicy.active.src", &active_src)) {
-        metrics_proto.set_active_source(std::move(active_src));
+    std::string active_source;
+    if (item->getString("android.media.audiopolicy.active.src", &active_source)) {
+        metrics_proto.set_active_source(active_source);
     }
     //string char kAudioPolicyActivePkg[] = "android.media.audiopolicy.active.pkg";
-    std::string active_pkg;
-    if (item->getString("android.media.audiopolicy.active.pkg", &active_pkg)) {
-        metrics_proto.set_active_package(std::move(active_pkg));
+    std::string active_package;
+    if (item->getString("android.media.audiopolicy.active.pkg", &active_package)) {
+        metrics_proto.set_active_package(active_package);
     }
     //int32 char kAudioPolicyActiveSession[] = "android.media.audiopolicy.active.session";
     int32_t active_session = -1;
@@ -98,27 +98,40 @@
     //string char kAudioPolicyActiveDevice[] = "android.media.audiopolicy.active.device";
     std::string active_device;
     if (item->getString("android.media.audiopolicy.active.device", &active_device)) {
-        metrics_proto.set_active_device(std::move(active_device));
+        metrics_proto.set_active_device(active_device);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize audipolicy metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiopolicy_reported:"
+            << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
-
+            << " status:" << status
+            << " request_source:" << request_source
+            << " request_package:" << request_package
+            << " request_session:" << request_session
+            << " request_device:" << request_device
+            << " active_source:" << active_source
+            << " active_package:" << active_package
+            << " active_session:" << active_session
+            << " active_device:" << active_device
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index db809dc..70a67ae 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -38,16 +38,15 @@
 
 namespace android {
 
-bool statsd_audiorecord(const mediametrics::Item *item)
-{
+bool statsd_audiorecord(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -57,12 +56,12 @@
     //
     std::string encoding;
     if (item->getString("android.media.audiorecord.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
 
     std::string source;
     if (item->getString("android.media.audiorecord.source", &source)) {
-        metrics_proto.set_source(std::move(source));
+        metrics_proto.set_source(source);
     }
 
     int32_t latency = -1;
@@ -80,14 +79,14 @@
         metrics_proto.set_channels(channels);
     }
 
-    int64_t createdMs = -1;
-    if (item->getInt64("android.media.audiorecord.createdMs", &createdMs)) {
-        metrics_proto.set_created_millis(createdMs);
+    int64_t created_millis = -1;
+    if (item->getInt64("android.media.audiorecord.createdMs", &created_millis)) {
+        metrics_proto.set_created_millis(created_millis);
     }
 
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.audiorecord.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.audiorecord.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
     int32_t count = -1;
@@ -95,44 +94,43 @@
         metrics_proto.set_count(count);
     }
 
-    int32_t errcode = -1;
-    if (item->getInt32("android.media.audiorecord.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
-    } else if (item->getInt32("android.media.audiorecord.lastError.code", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+    int32_t error_code = -1;
+    if (item->getInt32("android.media.audiorecord.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
+    } else if (item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
 
-    std::string errfunc;
-    if (item->getString("android.media.audiorecord.errfunc", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
-    } else if (item->getString("android.media.audiorecord.lastError.at", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
+    std::string error_function;
+    if (item->getString("android.media.audiorecord.errfunc", &error_function)) {
+        metrics_proto.set_error_function(error_function);
+    } else if (item->getString("android.media.audiorecord.lastError.at", &error_function)) {
+        metrics_proto.set_error_function(error_function);
     }
 
-    // portId (int32)
     int32_t port_id = -1;
     if (item->getInt32("android.media.audiorecord.portId", &port_id)) {
         metrics_proto.set_port_id(count);
     }
-    // frameCount (int32)
-    int32_t frameCount = -1;
-    if (item->getInt32("android.media.audiorecord.frameCount", &frameCount)) {
-        metrics_proto.set_frame_count(frameCount);
+
+    int32_t frame_count = -1;
+    if (item->getInt32("android.media.audiorecord.frameCount", &frame_count)) {
+        metrics_proto.set_frame_count(frame_count);
     }
-    // attributes (string)
+
     std::string attributes;
     if (item->getString("android.media.audiorecord.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
+        metrics_proto.set_attributes(attributes);
     }
-    // channelMask (int64)
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiorecord.channelMask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
+
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiorecord.channelMask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
     }
-    // startcount (int64)
-    int64_t startcount = -1;
-    if (item->getInt64("android.media.audiorecord.startcount", &startcount)) {
-        metrics_proto.set_start_count(startcount);
+
+    int64_t start_count = -1;
+    if (item->getInt64("android.media.audiorecord.startcount", &start_count)) {
+        metrics_proto.set_start_count(start_count);
     }
 
     std::string serialized;
@@ -145,21 +143,44 @@
     // log_session_id (string)
     std::string logSessionId;
     (void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
-    const auto logSessionIdForStats =
+    const auto log_session_id =
             mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized,
-                                   logSessionIdForStats.c_str());
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized,
+        log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiorecord_reported:"
+            << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " encoding:" << encoding
+            << " source:" << source
+            << " latency:" << latency
+            << " samplerate:" << samplerate
+            << " channels:" << channels
+            << " created_millis:" << created_millis
+            << " duration_millis:" << duration_millis
+            << " count:" << count
+            << " error_code:" << error_code
+            << " error_function:" << error_function
 
+            << " port_id:" << port_id
+            << " frame_count:" << frame_count
+            << " attributes:" << attributes
+            << " channel_mask:" << channel_mask
+            << " start_count:" << start_count
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index 2ad2562..34cc923 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_audiothread(const mediametrics::Item *item)
+bool statsd_audiothread(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -68,17 +68,17 @@
     if (item->getInt32(MM_PREFIX "samplerate", &samplerate)) {
         metrics_proto.set_samplerate(samplerate);
     }
-    std::string workhist;
-    if (item->getString(MM_PREFIX "workMs.hist", &workhist)) {
-        metrics_proto.set_work_millis_hist(std::move(workhist));
+    std::string work_millis_hist;
+    if (item->getString(MM_PREFIX "workMs.hist", &work_millis_hist)) {
+        metrics_proto.set_work_millis_hist(work_millis_hist);
     }
-    std::string latencyhist;
-    if (item->getString(MM_PREFIX "latencyMs.hist", &latencyhist)) {
-        metrics_proto.set_latency_millis_hist(std::move(latencyhist));
+    std::string latency_millis_hist;
+    if (item->getString(MM_PREFIX "latencyMs.hist", &latency_millis_hist)) {
+        metrics_proto.set_latency_millis_hist(latency_millis_hist);
     }
-    std::string warmuphist;
-    if (item->getString(MM_PREFIX "warmupMs.hist", &warmuphist)) {
-        metrics_proto.set_warmup_millis_hist(std::move(warmuphist));
+    std::string warmup_millis_hist;
+    if (item->getString(MM_PREFIX "warmupMs.hist", &warmup_millis_hist)) {
+        metrics_proto.set_warmup_millis_hist(warmup_millis_hist);
     }
     int64_t underruns = -1;
     if (item->getInt64(MM_PREFIX "underruns", &underruns)) {
@@ -88,101 +88,99 @@
     if (item->getInt64(MM_PREFIX "overruns", &overruns)) {
         metrics_proto.set_overruns(overruns);
     }
-    int64_t activeMs = -1;
-    if (item->getInt64(MM_PREFIX "activeMs", &activeMs)) {
-        metrics_proto.set_active_millis(activeMs);
+    int64_t active_millis = -1;
+    if (item->getInt64(MM_PREFIX "activeMs", &active_millis)) {
+        metrics_proto.set_active_millis(active_millis);
     }
-    int64_t durationMs = -1;
-    if (item->getInt64(MM_PREFIX "durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64(MM_PREFIX "durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
-    // item->setInt32(MM_PREFIX "id", (int32_t)mId); // IO handle
     int32_t id = -1;
     if (item->getInt32(MM_PREFIX "id", &id)) {
         metrics_proto.set_id(id);
     }
-    // item->setInt32(MM_PREFIX "portId", (int32_t)mPortId);
+
     int32_t port_id = -1;
-    if (item->getInt32(MM_PREFIX "portId", &id)) {
+    if (item->getInt32(MM_PREFIX "portId", &port_id)) {
         metrics_proto.set_port_id(port_id);
     }
     // item->setCString(MM_PREFIX "type", threadTypeToString(mType));
     std::string type;
     if (item->getString(MM_PREFIX "type", &type)) {
-        metrics_proto.set_type(std::move(type));
+        metrics_proto.set_type(type);
     }
-    // item->setInt32(MM_PREFIX "sampleRate", (int32_t)mSampleRate);
+
     int32_t sample_rate = -1;
     if (item->getInt32(MM_PREFIX "sampleRate", &sample_rate)) {
         metrics_proto.set_sample_rate(sample_rate);
     }
-    // item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
+
     int32_t channel_mask = -1;
     if (item->getInt32(MM_PREFIX "channelMask", &channel_mask)) {
         metrics_proto.set_channel_mask(channel_mask);
     }
-    // item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
+
     std::string encoding;
     if (item->getString(MM_PREFIX "encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
-    // item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
+
     int32_t frame_count = -1;
     if (item->getInt32(MM_PREFIX "frameCount", &frame_count)) {
         metrics_proto.set_frame_count(frame_count);
     }
-    // item->setCString(MM_PREFIX "outDevice", toString(mOutDevice).c_str());
-    std::string outDevice;
-    if (item->getString(MM_PREFIX "outDevice", &outDevice)) {
-        metrics_proto.set_output_device(std::move(outDevice));
-    }
-    // item->setCString(MM_PREFIX "inDevice", toString(mInDevice).c_str());
-    std::string inDevice;
-    if (item->getString(MM_PREFIX "inDevice", &inDevice)) {
-        metrics_proto.set_input_device(std::move(inDevice));
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.mean", mIoJitterMs.getMean());
-    double iojitters_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &iojitters_ms_mean)) {
-        metrics_proto.set_io_jitter_mean_millis(iojitters_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.std", mIoJitterMs.getStdDev());
-    double iojitters_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &iojitters_ms_std)) {
-        metrics_proto.set_io_jitter_stddev_millis(iojitters_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.mean", mProcessTimeMs.getMean());
-    double process_time_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_ms_mean)) {
-        metrics_proto.set_process_time_mean_millis(process_time_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.std", mProcessTimeMs.getStdDev());
-    double process_time_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_ms_std)) {
-        metrics_proto.set_process_time_stddev_millis(process_time_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.mean", tsjitter.getMean());
-    double timestamp_jitter_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_ms_mean)) {
-        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.std", tsjitter.getStdDev());
-    double timestamp_jitter_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_ms_stddev)) {
-        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_ms_stddev);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.mean", mLatencyMs.getMean());
-    double latency_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_ms_mean)) {
-        metrics_proto.set_latency_mean_millis(latency_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.std", mLatencyMs.getStdDev());
-    double latency_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_ms_stddev)) {
-        metrics_proto.set_latency_stddev_millis(latency_ms_stddev);
+
+    std::string output_device;
+    if (item->getString(MM_PREFIX "outDevice", &output_device)) {
+        metrics_proto.set_output_device(output_device);
     }
 
+    std::string input_device;
+    if (item->getString(MM_PREFIX "inDevice", &input_device)) {
+        metrics_proto.set_input_device(input_device);
+    }
+
+    double io_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &io_jitter_mean_millis)) {
+        metrics_proto.set_io_jitter_mean_millis(io_jitter_mean_millis);
+    }
+
+    double io_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &io_jitter_stddev_millis)) {
+        metrics_proto.set_io_jitter_stddev_millis(io_jitter_stddev_millis);
+    }
+
+    double process_time_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_mean_millis)) {
+        metrics_proto.set_process_time_mean_millis(process_time_mean_millis);
+    }
+
+    double process_time_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_stddev_millis)) {
+        metrics_proto.set_process_time_stddev_millis(process_time_stddev_millis);
+    }
+
+    double timestamp_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_mean_millis)) {
+        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_mean_millis);
+    }
+
+    double timestamp_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_stddev_millis)) {
+        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_stddev_millis);
+    }
+
+    double latency_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_mean_millis)) {
+        metrics_proto.set_latency_mean_millis(latency_mean_millis);
+    }
+
+    double latency_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_stddev_millis)) {
+        metrics_proto.set_latency_stddev_millis(latency_stddev_millis);
+    }
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
@@ -190,17 +188,50 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiothread_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " type:" << type
+            << " framecount:" << framecount
+            << " samplerate:" << samplerate
+            << " work_millis_hist:" << work_millis_hist
+            << " latency_millis_hist:" << latency_millis_hist
+            << " warmup_millis_hist:" << warmup_millis_hist
+            << " underruns:" << underruns
+            << " overruns:" << overruns
+            << " active_millis:" << active_millis
+            << " duration_millis:" << duration_millis
 
+            << " id:" << id
+            << " port_id:" << port_id
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
+            << " output_device:" << output_device
+            << " input_device:" << input_device
+            << " io_jitter_mean_millis:" << io_jitter_mean_millis
+            << " io_jitter_stddev_millis:" << io_jitter_stddev_millis
+
+            << " process_time_mean_millis:" << process_time_mean_millis
+            << " process_time_stddev_millis:" << process_time_stddev_millis
+            << " timestamp_jitter_mean_millis:" << timestamp_jitter_mean_millis
+            << " timestamp_jitter_stddev_millis:" << timestamp_jitter_stddev_millis
+            << " latency_mean_millis:" << latency_mean_millis
+            << " latency_stddev_millis:" << latency_stddev_millis
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index fd809c8..fe269a1 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -38,16 +38,16 @@
 
 namespace android {
 
-bool statsd_audiotrack(const mediametrics::Item *item)
+bool statsd_audiotrack(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,52 +58,52 @@
 
     // static constexpr char kAudioTrackStreamType[] = "android.media.audiotrack.streamtype";
     // optional string streamType;
-    std::string streamtype;
-    if (item->getString("android.media.audiotrack.streamtype", &streamtype)) {
-        metrics_proto.set_stream_type(std::move(streamtype));
+    std::string stream_type;
+    if (item->getString("android.media.audiotrack.streamtype", &stream_type)) {
+        metrics_proto.set_stream_type(stream_type);
     }
 
     // static constexpr char kAudioTrackContentType[] = "android.media.audiotrack.type";
     // optional string contentType;
-    std::string contenttype;
-    if (item->getString("android.media.audiotrack.type", &contenttype)) {
-        metrics_proto.set_content_type(std::move(contenttype));
+    std::string content_type;
+    if (item->getString("android.media.audiotrack.type", &content_type)) {
+        metrics_proto.set_content_type(content_type);
     }
 
     // static constexpr char kAudioTrackUsage[] = "android.media.audiotrack.usage";
     // optional string trackUsage;
-    std::string trackusage;
-    if (item->getString("android.media.audiotrack.usage", &trackusage)) {
-        metrics_proto.set_track_usage(std::move(trackusage));
+    std::string track_usage;
+    if (item->getString("android.media.audiotrack.usage", &track_usage)) {
+        metrics_proto.set_track_usage(track_usage);
     }
 
     // static constexpr char kAudioTrackSampleRate[] = "android.media.audiotrack.samplerate";
     // optional int32 samplerate;
-    int32_t samplerate = -1;
-    if (item->getInt32("android.media.audiotrack.samplerate", &samplerate)) {
-        metrics_proto.set_sample_rate(samplerate);
+    int32_t sample_rate = -1;
+    if (item->getInt32("android.media.audiotrack.samplerate", &sample_rate)) {
+        metrics_proto.set_sample_rate(sample_rate);
     }
 
     // static constexpr char kAudioTrackChannelMask[] = "android.media.audiotrack.channelmask";
     // optional int64 channelMask;
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiotrack.channelmask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiotrack.channelmask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
     }
 
     // NB: These are not yet exposed as public Java API constants.
     // static constexpr char kAudioTrackUnderrunFrames[] = "android.media.audiotrack.underrunframes";
     // optional int32 underrunframes;
-    int32_t underrunframes = -1;
-    if (item->getInt32("android.media.audiotrack.underrunframes", &underrunframes)) {
-        metrics_proto.set_underrun_frames(underrunframes);
+    int32_t underrun_frames = -1;
+    if (item->getInt32("android.media.audiotrack.underrunframes", &underrun_frames)) {
+        metrics_proto.set_underrun_frames(underrun_frames);
     }
 
     // static constexpr char kAudioTrackStartupGlitch[] = "android.media.audiotrack.glitch.startup";
     // optional int32 startupglitch;
-    int32_t startupglitch = -1;
-    if (item->getInt32("android.media.audiotrack.glitch.startup", &startupglitch)) {
-        metrics_proto.set_startup_glitch(startupglitch);
+    int32_t startup_glitch = -1;
+    if (item->getInt32("android.media.audiotrack.glitch.startup", &startup_glitch)) {
+        metrics_proto.set_startup_glitch(startup_glitch);
     }
 
     // portId (int32)
@@ -114,7 +114,7 @@
     // encoding (string)
     std::string encoding;
     if (item->getString("android.media.audiotrack.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
     // frameCount (int32)
     int32_t frame_count = -1;
@@ -124,7 +124,7 @@
     // attributes (string)
     std::string attributes;
     if (item->getString("android.media.audiotrack.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
+        metrics_proto.set_attributes(attributes);
     }
 
     std::string serialized;
@@ -137,21 +137,40 @@
     // log_session_id (string)
     std::string logSessionId;
     (void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
-    const auto logSessionIdForStats =
+    const auto log_session_id =
             mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized,
-                                   logSessionIdForStats.c_str());
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized,
+                               log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiotrack_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " stream_type:" << stream_type
+            << " content_type:" << content_type
+            << " track_usage:" << track_usage
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " underrun_frames:" << underrun_frames
+            << " startup_glitch:" << startup_glitch
+            << " port_id:" << port_id
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
 
+            << " attributes:" << attributes
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 1c5ab77..8a2158f 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -38,16 +38,16 @@
 
 namespace android {
 
-bool statsd_codec(const mediametrics::Item *item)
+bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,39 +58,39 @@
     // android.media.mediacodec.codec   string
     std::string codec;
     if (item->getString("android.media.mediacodec.codec", &codec)) {
-        metrics_proto.set_codec(std::move(codec));
+        metrics_proto.set_codec(codec);
     }
-    // android.media.mediacodec.mime    string
+
     std::string mime;
     if (item->getString("android.media.mediacodec.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
+        metrics_proto.set_mime(mime);
     }
-    // android.media.mediacodec.mode    string
+
     std::string mode;
     if ( item->getString("android.media.mediacodec.mode", &mode)) {
-        metrics_proto.set_mode(std::move(mode));
+        metrics_proto.set_mode(mode);
     }
-    // android.media.mediacodec.encoder int32
+
     int32_t encoder = -1;
     if ( item->getInt32("android.media.mediacodec.encoder", &encoder)) {
         metrics_proto.set_encoder(encoder);
     }
-    // android.media.mediacodec.secure  int32
+
     int32_t secure = -1;
     if ( item->getInt32("android.media.mediacodec.secure", &secure)) {
         metrics_proto.set_secure(secure);
     }
-    // android.media.mediacodec.width   int32
+
     int32_t width = -1;
     if ( item->getInt32("android.media.mediacodec.width", &width)) {
         metrics_proto.set_width(width);
     }
-    // android.media.mediacodec.height  int32
+
     int32_t height = -1;
     if ( item->getInt32("android.media.mediacodec.height", &height)) {
         metrics_proto.set_height(height);
     }
-    // android.media.mediacodec.rotation-degrees        int32
+
     int32_t rotation = -1;
     if ( item->getInt32("android.media.mediacodec.rotation-degrees", &rotation)) {
         metrics_proto.set_rotation(rotation);
@@ -100,90 +100,89 @@
     if ( item->getInt32("android.media.mediacodec.crypto", &crypto)) {
         metrics_proto.set_crypto(crypto);
     }
-    // android.media.mediacodec.profile int32
+
     int32_t profile = -1;
     if ( item->getInt32("android.media.mediacodec.profile", &profile)) {
         metrics_proto.set_profile(profile);
     }
-    // android.media.mediacodec.level   int32
+
     int32_t level = -1;
     if ( item->getInt32("android.media.mediacodec.level", &level)) {
         metrics_proto.set_level(level);
     }
-    // android.media.mediacodec.maxwidth        int32
-    int32_t maxwidth = -1;
-    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxwidth)) {
-        metrics_proto.set_max_width(maxwidth);
+
+    int32_t max_width = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
+        metrics_proto.set_max_width(max_width);
     }
-    // android.media.mediacodec.maxheight       int32
-    int32_t maxheight = -1;
-    if ( item->getInt32("android.media.mediacodec.maxheight", &maxheight)) {
-        metrics_proto.set_max_height(maxheight);
+
+    int32_t max_height = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
+        metrics_proto.set_max_height(max_height);
     }
-    // android.media.mediacodec.errcode         int32
-    int32_t errcode = -1;
-    if ( item->getInt32("android.media.mediacodec.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+
+    int32_t error_code = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
-    // android.media.mediacodec.errstate        string
-    std::string errstate;
-    if ( item->getString("android.media.mediacodec.errstate", &errstate)) {
-        metrics_proto.set_error_state(std::move(errstate));
+
+    std::string error_state;
+    if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
+        metrics_proto.set_error_state(error_state);
     }
-    // android.media.mediacodec.latency.max  int64
+
     int64_t latency_max = -1;
     if ( item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
         metrics_proto.set_latency_max(latency_max);
     }
-    // android.media.mediacodec.latency.min  int64
+
     int64_t latency_min = -1;
     if ( item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
         metrics_proto.set_latency_min(latency_min);
     }
-    // android.media.mediacodec.latency.avg  int64
+
     int64_t latency_avg = -1;
     if ( item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
         metrics_proto.set_latency_avg(latency_avg);
     }
-    // android.media.mediacodec.latency.n    int64
+
     int64_t latency_count = -1;
     if ( item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
         metrics_proto.set_latency_count(latency_count);
     }
-    // android.media.mediacodec.latency.unknown    int64
+
     int64_t latency_unknown = -1;
     if ( item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
         metrics_proto.set_latency_unknown(latency_unknown);
     }
-    // android.media.mediacodec.queueSecureInputBufferError  int32
-    if (int32_t queueSecureInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
-                &queueSecureInputBufferError)) {
-        metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
+
+    int32_t queue_secure_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
+                &queue_secure_input_buffer_error)) {
+        metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
     }
-    // android.media.mediacodec.queueInputBufferError  int32
-    if (int32_t queueInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueInputBufferError",
-                &queueInputBufferError)) {
-        metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
+
+    int32_t queue_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueInputBufferError",
+                &queue_input_buffer_error)) {
+        metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
     }
     // android.media.mediacodec.latency.hist    NOT EMITTED
 
-    // android.media.mediacodec.bitrate_mode string
     std::string bitrate_mode;
     if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
-        metrics_proto.set_bitrate_mode(std::move(bitrate_mode));
+        metrics_proto.set_bitrate_mode(bitrate_mode);
     }
-    // android.media.mediacodec.bitrate int32
+
     int32_t bitrate = -1;
     if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
         metrics_proto.set_bitrate(bitrate);
     }
-    // android.media.mediacodec.lifetimeMs int64
-    int64_t lifetimeMs = -1;
-    if ( item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMs)) {
-        lifetimeMs = mediametrics::bucket_time_minutes(lifetimeMs);
-        metrics_proto.set_lifetime_millis(lifetimeMs);
+
+    int64_t lifetime_millis = -1;
+    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
+        lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
+        metrics_proto.set_lifetime_millis(lifetime_millis);
     }
 
     // new for S; need to plumb through to westworld
@@ -201,18 +200,51 @@
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_codec_reported:"
+            << android::util::MEDIAMETRICS_CODEC_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+            << " codec:" << codec
+            << " mime:" << mime
+            << " mode:" << mode
+            << " encoder:" << encoder
+            << " secure:" << secure
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " crypto:" << crypto
+            << " profile:" << profile
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " level:" << level
+            << " max_width:" << max_width
+            << " max_height:" << max_height
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " latency_max:" << latency_max
+            << " latency_min:" << latency_min
+            << " latency_avg:" << latency_avg
+            << " latency_count:" << latency_count
+            << " latency_unknown:" << latency_unknown
 
+            << " queue_input_buffer_error:" << queue_input_buffer_error
+            << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
+            << " bitrate_mode:" << bitrate_mode
+            << " bitrate:" << bitrate
+            << " lifetime_millis:" << lifetime_millis
+            // TODO: add when log_session_id is merged.
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 071c549..27fd089 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -32,6 +32,7 @@
 #include <pwd.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "iface_statsd.h"
 
 #include <statslog.h>
@@ -43,53 +44,60 @@
 namespace android {
 
 // mediadrm
-bool statsd_mediadrm(const mediametrics::Item *item)
+bool statsd_mediadrm(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string vendor;
     (void) item->getString("vendor", &vendor);
     std::string description;
     (void) item->getString("description", &description);
 
-    if (enabled_statsd) {
-        // This field is left here for backward compatibility.
-        // This field is not used anymore.
-        const std::string  kUnusedField("unused");
-        android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
-        android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   vendor.c_str(),
-                                   description.c_str(),
-                                   bf_serialized);
-    } else {
-        ALOGV("NOT sending: mediadrm data(%s, %s)", vendor.c_str(), description.c_str());
-    }
+    // This field is left here for backward compatibility.
+    // This field is not used anymore.
+    const std::string  kUnusedField("unused");
+    android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        vendor.c_str(),
+        description.c_str(),
+        bf_serialized);
 
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_mediadrm_reported:"
+            << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " vendor:" << vendor
+            << " description:" << description
+            // omitting serialized
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
     return true;
 }
 
 // drmmanager
-bool statsd_drmmanager(const mediametrics::Item *item)
+bool statsd_drmmanager(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     using namespace std::string_literals;
     if (item == nullptr) return false;
 
-    if (!enabled_statsd) {
-        ALOGV("NOT sending: drmmanager data");
-        return true;
-    }
-
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string plugin_id;
     (void) item->getString("plugin_id", &plugin_id);
@@ -107,8 +115,9 @@
         item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
     }
 
-    android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
-                               timestamp, pkgName.c_str(), pkgVersionCode, mediaApexVersion,
+    const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
                                plugin_id.c_str(), description.c_str(),
                                method_id, mime_types.c_str(),
                                methodCounts[0], methodCounts[1], methodCounts[2],
@@ -117,6 +126,25 @@
                                methodCounts[9], methodCounts[10], methodCounts[11],
                                methodCounts[12]);
 
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_drmmanager_reported:"
+            << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " plugin_id:" << plugin_id
+            << " description:" << description
+            << " method_id:" << method_id
+            << " mime_types:" << mime_types;
+
+    for (size_t i = 0; i < methodCounts.size(); ++i) {
+        log << " method_" << i << ":" << methodCounts[i];
+    }
+    log << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
     return true;
 }
 
@@ -144,17 +172,14 @@
 } // namespace
 
 // |out| and its contents are memory-managed by statsd.
-bool statsd_mediadrm_puller(const mediametrics::Item* item, AStatsEventList* out)
+bool statsd_mediadrm_puller(
+        const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) {
         return false;
     }
 
-    if (!enabled_statsd) {
-        ALOGV("NOT pulling: mediadrm activity");
-        return true;
-    }
-
     std::string serialized_metrics;
     (void) item->getString("serialized_metrics", &serialized_metrics);
     const auto framework_raw(base64DecodeNoPad(serialized_metrics));
@@ -178,6 +203,19 @@
     AStatsEvent_writeByteArray(event, framework_raw.data(), framework_raw.size());
     AStatsEvent_writeByteArray(event, plugin_raw.data(), plugin_raw.size());
     AStatsEvent_build(event);
+
+    std::stringstream log;
+    log << "pulled:" << " {"
+            << " media_drm_activity_info:"
+            << android::util::MEDIA_DRM_ACTIVITY_INFO
+            << " package_name:" << item->getPkgName()
+            << " package_version_code:" << item->getPkgVersionCode()
+            << " vendor:" << vendor
+            << " description:" << description
+            << " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
+            << " vendor_metrics:" <<  mediametrics::stringutils::bytesToString(plugin_raw, 8)
+            << " }";
+    statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 4180e0c..e228f07 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_extractor(const mediametrics::Item *item)
+bool statsd_extractor(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -55,26 +55,25 @@
     // flesh out the protobuf we'll hand off with our data
     //
 
-    // android.media.mediaextractor.fmt         string
-    std::string fmt;
-    if (item->getString("android.media.mediaextractor.fmt", &fmt)) {
-        metrics_proto.set_format(std::move(fmt));
-    }
-    // android.media.mediaextractor.mime        string
-    std::string mime;
-    if (item->getString("android.media.mediaextractor.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
-    }
-    // android.media.mediaextractor.ntrk        int32
-    int32_t ntrk = -1;
-    if (item->getInt32("android.media.mediaextractor.ntrk", &ntrk)) {
-        metrics_proto.set_tracks(ntrk);
+    std::string format;
+    if (item->getString("android.media.mediaextractor.fmt", &format)) {
+        metrics_proto.set_format(format);
     }
 
-    // android.media.mediaextractor.entry       string
+    std::string mime;
+    if (item->getString("android.media.mediaextractor.mime", &mime)) {
+        metrics_proto.set_mime(mime);
+    }
+
+    int32_t tracks = -1;
+    if (item->getInt32("android.media.mediaextractor.ntrk", &tracks)) {
+        metrics_proto.set_tracks(tracks);
+    }
+
     std::string entry_point_string;
+    stats::mediametrics::ExtractorData::EntryPoint entry_point =
+            stats::mediametrics::ExtractorData_EntryPoint_OTHER;
     if (item->getString("android.media.mediaextractor.entry", &entry_point_string)) {
-      stats::mediametrics::ExtractorData::EntryPoint entry_point;
       if (entry_point_string == "sdk") {
         entry_point = stats::mediametrics::ExtractorData_EntryPoint_SDK;
       } else if (entry_point_string == "ndk-with-jvm") {
@@ -93,17 +92,30 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_extractor_reported:"
+            << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " format:" << format
+            << " mime:" << mime
+            << " tracks:" << tracks
+            << " entry_point:" << entry_point_string << "(" << entry_point << ")"
 
+            // TODO: Add MediaExtractor log_session_id
+            // << " log_session_id:" << log_session_id
+
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 262b2ae..f543425 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -36,16 +36,15 @@
 
 namespace android {
 
-bool statsd_mediaparser(const mediametrics::Item *item)
+bool statsd_mediaparser(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
-    if (item == nullptr) {
-        return false;
-    }
+    static constexpr bool enabled_statsd = true; // TODO: Remove, dup with dump2StatsdInternal().
+    if (item == nullptr) return false;
 
-    // statsd wrapper data.
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
 
     std::string parserName;
     item->getString("android.media.mediaparser.parserName", &parserName);
@@ -82,9 +81,9 @@
 
     if (enabled_statsd) {
         (void) android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
-                                   timestamp,
-                                   pkgName.c_str(),
-                                   pkgVersionCode,
+                                   timestamp_nanos,
+                                   package_name.c_str(),
+                                   package_version_code,
                                    parserName.c_str(),
                                    createdByName,
                                    parserPool.c_str(),
@@ -99,7 +98,29 @@
     } else {
         ALOGV("NOT sending MediaParser media metrics.");
     }
-
+    // TODO: Cleanup after playback_id is merged.
+    std::stringstream log;
+    log << "result:" << "(result)" << " {"
+            << " mediametrics_mediaparser_reported:"
+            << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " parser_name:" << parserName
+            << " created_by_name:" << createdByName
+            << " parser_pool:" << parserPool
+            << " last_exception:" << lastException
+            << " resource_byte_count:" << resourceByteCount
+            << " duration_millis:" << durationMillis
+            << " track_mime_types:" << trackMimeTypes
+            << " track_codecs:" << trackCodecs
+            << " altered_parameters:" << alteredParameters
+            << " video_width:" << videoWidth
+            << " video_height:" << videoHeight
+            // TODO: Add MediaParser playback_id
+            // << " playback_id:" << playbackId
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index a8d0f55..33da81e 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -41,16 +41,16 @@
  *  handles nuplayer AND nuplayer2
  *  checks for the union of what the two players generate
  */
-bool statsd_nuplayer(const mediametrics::Item *item)
+bool statsd_nuplayer(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -60,15 +60,16 @@
     //
 
     // differentiate between nuplayer and nuplayer2
-    metrics_proto.set_whichplayer(item->getKey().c_str());
+    std::string whichPlayer = item->getKey();
+    metrics_proto.set_whichplayer(whichPlayer.c_str());
 
     std::string video_mime;
     if (item->getString("android.media.mediaplayer.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     std::string video_codec;
     if (item->getString("android.media.mediaplayer.video.codec", &video_codec)) {
-        metrics_proto.set_video_codec(std::move(video_codec));
+        metrics_proto.set_video_codec(video_codec);
     }
 
     int32_t width = -1;
@@ -92,32 +93,32 @@
     if (item->getInt64("android.media.mediaplayer.startupdropped", &frames_dropped_startup)) {
         metrics_proto.set_frames_dropped_startup(frames_dropped_startup);
     }
-    double fps = -1.0;
-    if (item->getDouble("android.media.mediaplayer.fps", &fps)) {
-        metrics_proto.set_framerate(fps);
+    double framerate = -1.0;
+    if (item->getDouble("android.media.mediaplayer.fps", &framerate)) {
+        metrics_proto.set_framerate(framerate);
     }
 
     std::string audio_mime;
     if (item->getString("android.media.mediaplayer.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     std::string audio_codec;
     if (item->getString("android.media.mediaplayer.audio.codec", &audio_codec)) {
-        metrics_proto.set_audio_codec(std::move(audio_codec));
+        metrics_proto.set_audio_codec(audio_codec);
     }
 
-    int64_t duration_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_ms)) {
-        metrics_proto.set_duration_millis(duration_ms);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
-    int64_t playing_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_ms)) {
-        metrics_proto.set_playing_millis(playing_ms);
+    int64_t playing_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_millis)) {
+        metrics_proto.set_playing_millis(playing_millis);
     }
 
-    int32_t err = -1;
-    if (item->getInt32("android.media.mediaplayer.err", &err)) {
-        metrics_proto.set_error(err);
+    int32_t error = -1;
+    if (item->getInt32("android.media.mediaplayer.err", &error)) {
+        metrics_proto.set_error(error);
     }
     int32_t error_code = -1;
     if (item->getInt32("android.media.mediaplayer.errcode", &error_code)) {
@@ -125,45 +126,74 @@
     }
     std::string error_state;
     if (item->getString("android.media.mediaplayer.errstate", &error_state)) {
-        metrics_proto.set_error_state(std::move(error_state));
+        metrics_proto.set_error_state(error_state);
     }
 
     std::string data_source_type;
     if (item->getString("android.media.mediaplayer.dataSource", &data_source_type)) {
-        metrics_proto.set_data_source_type(std::move(data_source_type));
+        metrics_proto.set_data_source_type(data_source_type);
     }
 
-    int64_t rebufferingMs = -1;
-    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebufferingMs)) {
-        metrics_proto.set_rebuffering_millis(rebufferingMs);
+    int64_t rebuffering_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebuffering_millis)) {
+        metrics_proto.set_rebuffering_millis(rebuffering_millis);
     }
     int32_t rebuffers = -1;
     if (item->getInt32("android.media.mediaplayer.rebuffers", &rebuffers)) {
         metrics_proto.set_rebuffers(rebuffers);
     }
-    int32_t rebufferExit = -1;
-    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebufferExit)) {
-        metrics_proto.set_rebuffer_at_exit(rebufferExit);
+    int32_t rebuffer_at_exit = -1;
+    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebuffer_at_exit)) {
+        metrics_proto.set_rebuffer_at_exit(rebuffer_at_exit);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize nuplayer metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_nuplayer_reported:"
+            << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " whichPlayer:" << whichPlayer
+            << " video_mime:" << video_mime
+            << " video_codec:" << video_codec
+            << " width:" << width
+            << " height:" << height
+            << " frames:" << frames
+            << " frames_dropped:" << frames_dropped
+            << " framerate:" << framerate
+            << " audio_mime:" << audio_mime
+            << " audio_codec:" << media_apex_version
+
+            << " duration_millis:" << duration_millis
+            << " playing_millis:" << playing_millis
+            << " error:" << error
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " data_source_type:" << data_source_type
+            << " rebuffering_millis:" << rebuffering_millis
+            << " rebuffers:" << rebuffers
+            << " rebuffer_at_exit:" << rebuffer_at_exit
+            << " frames_dropped_startup:" << frames_dropped_startup
+
+            // TODO NuPlayer - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 2e5ada4..23b884f 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_recorder(const mediametrics::Item *item)
+bool statsd_recorder(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,22 +58,22 @@
     // string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
     std::string audio_mime;
     if (item->getString("android.media.mediarecorder.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     // string kRecorderVideoMime = "android.media.mediarecorder.video.mime";
     std::string video_mime;
     if (item->getString("android.media.mediarecorder.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     // int32 kRecorderVideoProfile = "android.media.mediarecorder.video-encoder-profile";
-    int32_t videoProfile = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &videoProfile)) {
-        metrics_proto.set_video_profile(videoProfile);
+    int32_t video_profile = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &video_profile)) {
+        metrics_proto.set_video_profile(video_profile);
     }
     // int32 kRecorderVideoLevel = "android.media.mediarecorder.video-encoder-level";
-    int32_t videoLevel = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &videoLevel)) {
-        metrics_proto.set_video_level(videoLevel);
+    int32_t video_level = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &video_level)) {
+        metrics_proto.set_video_level(video_level);
     }
     // int32 kRecorderWidth = "android.media.mediarecorder.width";
     int32_t width = -1;
@@ -97,73 +97,73 @@
     }
 
     // int32 kRecorderCaptureFps = "android.media.mediarecorder.capture-fps";
-    int32_t captureFps = -1;
-    if (item->getInt32("android.media.mediarecorder.capture-fps", &captureFps)) {
-        metrics_proto.set_capture_fps(captureFps);
+    int32_t capture_fps = -1;
+    if (item->getInt32("android.media.mediarecorder.capture-fps", &capture_fps)) {
+        metrics_proto.set_capture_fps(capture_fps);
     }
     // double kRecorderCaptureFpsEnable = "android.media.mediarecorder.capture-fpsenable";
-    double captureFpsEnable = -1;
-    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &captureFpsEnable)) {
-        metrics_proto.set_capture_fps_enable(captureFpsEnable);
+    double capture_fps_enable = -1;
+    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &capture_fps_enable)) {
+        metrics_proto.set_capture_fps_enable(capture_fps_enable);
     }
 
     // int64 kRecorderDurationMs = "android.media.mediarecorder.durationMs";
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.mediarecorder.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
     // int64 kRecorderPaused = "android.media.mediarecorder.pausedMs";
-    int64_t pausedMs = -1;
-    if (item->getInt64("android.media.mediarecorder.pausedMs", &pausedMs)) {
-        metrics_proto.set_paused_millis(pausedMs);
+    int64_t paused_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.pausedMs", &paused_millis)) {
+        metrics_proto.set_paused_millis(paused_millis);
     }
     // int32 kRecorderNumPauses = "android.media.mediarecorder.NPauses";
-    int32_t pausedCount = -1;
-    if (item->getInt32("android.media.mediarecorder.NPauses", &pausedCount)) {
-        metrics_proto.set_paused_count(pausedCount);
+    int32_t paused_count = -1;
+    if (item->getInt32("android.media.mediarecorder.NPauses", &paused_count)) {
+        metrics_proto.set_paused_count(paused_count);
     }
 
     // int32 kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
-    int32_t audioBitrate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audioBitrate)) {
-        metrics_proto.set_audio_bitrate(audioBitrate);
+    int32_t audio_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audio_bitrate)) {
+        metrics_proto.set_audio_bitrate(audio_bitrate);
     }
     // int32 kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
-    int32_t audioChannels = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-channels", &audioChannels)) {
-        metrics_proto.set_audio_channels(audioChannels);
+    int32_t audio_channels = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-channels", &audio_channels)) {
+        metrics_proto.set_audio_channels(audio_channels);
     }
     // int32 kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
-    int32_t audioSampleRate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audioSampleRate)) {
-        metrics_proto.set_audio_samplerate(audioSampleRate);
+    int32_t audio_samplerate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audio_samplerate)) {
+        metrics_proto.set_audio_samplerate(audio_samplerate);
     }
 
     // int32 kRecorderMovieTimescale = "android.media.mediarecorder.movie-timescale";
-    int32_t movieTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movieTimescale)) {
-        metrics_proto.set_movie_timescale(movieTimescale);
+    int32_t movie_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movie_timescale)) {
+        metrics_proto.set_movie_timescale(movie_timescale);
     }
     // int32 kRecorderAudioTimescale = "android.media.mediarecorder.audio-timescale";
-    int32_t audioTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audioTimescale)) {
-        metrics_proto.set_audio_timescale(audioTimescale);
+    int32_t audio_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audio_timescale)) {
+        metrics_proto.set_audio_timescale(audio_timescale);
     }
     // int32 kRecorderVideoTimescale = "android.media.mediarecorder.video-timescale";
-    int32_t videoTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.video-timescale", &videoTimescale)) {
-        metrics_proto.set_video_timescale(videoTimescale);
+    int32_t video_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.video-timescale", &video_timescale)) {
+        metrics_proto.set_video_timescale(video_timescale);
     }
 
     // int32 kRecorderVideoBitrate = "android.media.mediarecorder.video-bitrate";
-    int32_t videoBitRate = -1;
-    if (item->getInt32("android.media.mediarecorder.video-bitrate", &videoBitRate)) {
-        metrics_proto.set_video_bitrate(videoBitRate);
+    int32_t video_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.video-bitrate", &video_bitrate)) {
+        metrics_proto.set_video_bitrate(video_bitrate);
     }
     // int32 kRecorderVideoIframeInterval = "android.media.mediarecorder.video-iframe-interval";
-    int32_t iFrameInterval = -1;
-    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iFrameInterval)) {
-        metrics_proto.set_iframe_interval(iFrameInterval);
+    int32_t iframe_interval = -1;
+    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iframe_interval)) {
+        metrics_proto.set_iframe_interval(iframe_interval);
     }
 
     std::string serialized;
@@ -172,17 +172,47 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_recorder_reported:"
+            << android::util::MEDIAMETRICS_RECORDER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " audio_mime:" << audio_mime
+            << " video_mime:" << video_mime
+            << " video_profile:" << video_profile
+            << " video_level:" << video_level
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " framerate:" << framerate
+            << " capture_fps:" << capture_fps
+            << " capture_fps_enable:" << capture_fps_enable
 
+            << " duration_millis:" << duration_millis
+            << " paused_millis:" << paused_millis
+            << " paused_count:" << paused_count
+            << " audio_bitrate:" << audio_bitrate
+            << " audio_channels:" << audio_channels
+            << " audio_samplerate:" << audio_samplerate
+            << " movie_timescale:" << movie_timescale
+            << " audio_timescale:" << audio_timescale
+            << " video_timescale:" << video_timescale
+            << " video_bitrate:" << video_bitrate
+
+            << " iframe_interval:" << iframe_interval
+            // TODO Recorder - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index ac9c7fa..2336d6f 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -809,7 +809,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -817,7 +819,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -845,7 +847,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -853,7 +857,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -877,7 +881,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index 4df5a9f..cb180ec 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -25,6 +25,7 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libbinder",
         "libbinder_ndk",
         "liblog",
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 4727e48..20e4bfb 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -449,13 +449,17 @@
     template <bool expectation = success>
     bool getClientUids(int32_t sessionId, std::vector<int32_t>* clientUids) {
         constexpr bool shouldSucceed = (expectation == success);
-        bool result;
-        Status status = mClient->getClientUids(sessionId, clientUids, &result);
+        std::optional<std::vector<int32_t>> aidl_return;
+        Status status = mClient->getClientUids(sessionId, &aidl_return);
 
         EXPECT_TRUE(status.isOk());
-        EXPECT_EQ(result, shouldSucceed);
+        bool success = (aidl_return != std::nullopt);
+        if (success) {
+            *clientUids = *aidl_return;
+        }
+        EXPECT_EQ(success, shouldSucceed);
 
-        return status.isOk() && (result == shouldSucceed);
+        return status.isOk() && (success == shouldSucceed);
     }
 
     int32_t mClientId;
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index faea58f..13dd3d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -38,6 +38,10 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
+AAudioServiceEndpoint::~AAudioServiceEndpoint() {
+    ALOGD("%s() called", __func__);
+}
+
 std::string AAudioServiceEndpoint::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index 72090c2..a7f63d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -43,7 +43,7 @@
         , public AAudioStreamParameters {
 public:
 
-    virtual ~AAudioServiceEndpoint() = default;
+    virtual ~AAudioServiceEndpoint();
 
     virtual std::string dump() const;
 
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 556710d..7294a58 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -51,8 +51,6 @@
         : mMmapStream(nullptr)
         , mAAudioService(audioService) {}
 
-AAudioServiceEndpointMMAP::~AAudioServiceEndpointMMAP() {}
-
 std::string AAudioServiceEndpointMMAP::dump() const {
     std::stringstream result;
 
@@ -357,7 +355,10 @@
 // This is called by AudioFlinger when it wants to destroy a stream.
 void AAudioServiceEndpointMMAP::onTearDown(audio_port_handle_t portHandle) {
     ALOGD("%s(portHandle = %d) called", __func__, portHandle);
-    std::thread asyncTask(&AAudioServiceEndpointMMAP::handleTearDownAsync, this, portHandle);
+    android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+    std::thread asyncTask([holdEndpoint, portHandle]() {
+        holdEndpoint->handleTearDownAsync(portHandle);
+    });
     asyncTask.detach();
 }
 
@@ -378,9 +379,11 @@
     ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
     if (getDeviceId() != deviceId) {
         if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-            std::thread asyncTask([this, deviceId]() {
-                disconnectRegisteredStreams();
-                setDeviceId(deviceId);
+            android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+            std::thread asyncTask([holdEndpoint, deviceId]() {
+                ALOGD("onRoutingChanged() asyncTask launched");
+                holdEndpoint->disconnectRegisteredStreams();
+                holdEndpoint->setDeviceId(deviceId);
             });
             asyncTask.detach();
         } else {
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 24b161d..5a53885 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -44,7 +44,7 @@
 public:
     explicit AAudioServiceEndpointMMAP(android::AAudioService &audioService);
 
-    virtual ~AAudioServiceEndpointMMAP();
+    virtual ~AAudioServiceEndpointMMAP() = default;
 
     std::string dump() const override;
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 694094c..dbacd75 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -67,8 +67,7 @@
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
-                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
-                        || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
+                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
                         "service stream %p still open, state = %d",
                         this, getState());
 }
@@ -229,7 +228,7 @@
     aaudio_result_t result = AAUDIO_OK;
 
     if (auto state = getState();
-        state == AAUDIO_STREAM_STATE_CLOSED || state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        state == AAUDIO_STREAM_STATE_CLOSED || isDisconnected_l()) {
         ALOGW("%s() already CLOSED, returns INVALID_STATE, handle = %d",
                 __func__, getHandle());
         return AAUDIO_ERROR_INVALID_STATE;
@@ -261,8 +260,14 @@
     sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
     setState(AAUDIO_STREAM_STATE_STARTED);
     mThreadEnabled.store(true);
+    // Make sure this object does not get deleted before the run() method
+    // can protect it by making a strong pointer.
+    incStrong(nullptr); // See run() method.
     result = mTimestampThread.start(this);
-    if (result != AAUDIO_OK) goto error;
+    if (result != AAUDIO_OK) {
+        decStrong(nullptr); // run() can't do it so we have to do it here.
+        goto error;
+    }
 
     return result;
 
@@ -291,10 +296,6 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp();
-
     result = stopTimestampThread();
     if (result != AAUDIO_OK) {
         disconnect_l();
@@ -340,10 +341,12 @@
 
     setState(AAUDIO_STREAM_STATE_STOPPING);
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp(); // warning - this calls a virtual function
+    // Temporarily unlock because we are joining the timestamp thread and it may try
+    // to acquire mLock.
+    mLock.unlock();
     result = stopTimestampThread();
+    mLock.lock();
+
     if (result != AAUDIO_OK) {
         disconnect_l();
         return result;
@@ -403,15 +406,21 @@
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
     ALOGD("%s() %s entering >>>>>>>>>>>>>> TIMESTAMPS", __func__, getTypeText());
+    // Hold onto the ref counted stream until the end.
+    android::sp<AAudioServiceStreamBase> holdStream(this);
     TimestampScheduler timestampScheduler;
+    // Balance the incStrong from when the thread was launched.
+    holdStream->decStrong(nullptr);
+
     timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
     timestampScheduler.start(AudioClock::getNanoseconds());
     int64_t nextTime = timestampScheduler.nextAbsoluteTime();
     int32_t loopCount = 0;
+    aaudio_result_t result = AAUDIO_OK;
     while(mThreadEnabled.load()) {
         loopCount++;
         if (AudioClock::getNanoseconds() >= nextTime) {
-            aaudio_result_t result = sendCurrentTimestamp();
+            result = sendCurrentTimestamp();
             if (result != AAUDIO_OK) {
                 ALOGE("%s() timestamp thread got result = %d", __func__, result);
                 break;
@@ -423,6 +432,11 @@
             AudioClock::sleepUntilNanoTime(nextTime);
         }
     }
+    // This was moved from the calls in stop_l() and pause_l(), which could cause a deadlock
+    // if it resulted in a call to disconnect.
+    if (result == AAUDIO_OK) {
+        (void) sendCurrentTimestamp();
+    }
     ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< TIMESTAMPS",
           __func__, getTypeText(), loopCount);
 }
@@ -433,8 +447,7 @@
 }
 
 void AAudioServiceStreamBase::disconnect_l() {
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
-        && getState() != AAUDIO_STREAM_STATE_CLOSED) {
+    if (!isDisconnected_l() && getState() != AAUDIO_STREAM_STATE_CLOSED) {
 
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
@@ -442,7 +455,7 @@
             .record();
 
         sendServiceEvent(AAUDIO_SERVICE_EVENT_DISCONNECTED);
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected_l(true);
     }
 }
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 06c9f21..c42df0f 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -80,7 +80,7 @@
     // because we had to wait until we generated the handle.
     void logOpen(aaudio_handle_t streamHandle);
 
-    aaudio_result_t close();
+    aaudio_result_t close() EXCLUDES(mLock);
 
     /**
      * Start the flow of audio data.
@@ -88,7 +88,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
      */
-    aaudio_result_t start();
+    aaudio_result_t start() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data so that start() can resume without loss of data.
@@ -96,7 +96,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
     */
-    aaudio_result_t pause();
+    aaudio_result_t pause() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data after the currently queued data has finished playing.
@@ -105,14 +105,14 @@
      * An AAUDIO_SERVICE_EVENT_STOPPED will be sent to the client when complete.
      *
      */
-    aaudio_result_t stop();
+    aaudio_result_t stop() EXCLUDES(mLock);
 
     /**
      * Discard any data held by the underlying HAL or Service.
      *
      * An AAUDIO_SERVICE_EVENT_FLUSHED will be sent to the client when complete.
      */
-    aaudio_result_t flush();
+    aaudio_result_t flush() EXCLUDES(mLock);
 
     virtual aaudio_result_t startClient(const android::AudioClient& client,
                                         const audio_attributes_t *attr __unused,
@@ -126,9 +126,9 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority);
+    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority) EXCLUDES(mLock);
 
-    aaudio_result_t unregisterAudioThread(pid_t clientThreadId);
+    aaudio_result_t unregisterAudioThread(pid_t clientThreadId) EXCLUDES(mLock);
 
     bool isRunning() const {
         return mState == AAUDIO_STREAM_STATE_STARTED;
@@ -137,7 +137,7 @@
     /**
      * Fill in a parcelable description of stream.
      */
-    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable);
+    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable) EXCLUDES(mLock);
 
     void setRegisteredThread(pid_t pid) {
         mRegisteredClientThread = pid;
@@ -153,7 +153,7 @@
 
     void run() override; // to implement Runnable
 
-    void disconnect();
+    void disconnect() EXCLUDES(mLock);
 
     const android::AudioClient &getAudioClient() {
         return mMmapClient;
@@ -248,7 +248,7 @@
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
 
-    aaudio_result_t sendCurrentTimestamp();
+    aaudio_result_t sendCurrentTimestamp() EXCLUDES(mLock);
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
@@ -265,6 +265,13 @@
 
     aaudio_stream_state_t   mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
 
+    bool isDisconnected_l() const REQUIRES(mLock) {
+        return mDisconnected;
+    }
+    void setDisconnected_l(bool flag) REQUIRES(mLock) {
+        mDisconnected = flag;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -322,6 +329,8 @@
     // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
     std::atomic<bool>       mSuspended{false};
 
+    bool                    mDisconnected GUARDED_BY(mLock) {false};
+
 protected:
     // Locking order is important.
     // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 6ba1725..667465a 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -73,7 +73,8 @@
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames,
+            int64_t *timeNanos) EXCLUDES(mLock) override;
 
     aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index 16338db..bdf826c 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -67,8 +67,9 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->addPid(getHidlDemuxPid(pid),
-            static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter());
+    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
+            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
+    Result res = mDescrambler->addPid(getHidlDemuxPid(pid), halFilter);
     if (res != Result::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
@@ -82,8 +83,9 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->removePid(getHidlDemuxPid(pid),
-            static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter());
+    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
+            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
+    Result res = mDescrambler->removePid(getHidlDemuxPid(pid), halFilter);
     if (res != Result::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 39a6723..e957b83 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -57,10 +57,10 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    MQDesc dvrMQDesc;
+    MQDesc filterMQDesc;
     Result res;
     mFilter->getQueueDesc([&](Result r, const MQDesc& desc) {
-        dvrMQDesc = desc;
+        filterMQDesc = desc;
         res = r;
     });
     if (res != Result::SUCCESS) {
@@ -69,7 +69,7 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
-                dvrMQDesc,  &aidlMQDesc);
+                filterMQDesc,  &aidlMQDesc);
     *_aidl_return = move(aidlMQDesc);
     return Status::ok();
 }
@@ -471,7 +471,7 @@
         res = r;
         if (res == Result::SUCCESS) {
             TunerFilterSharedHandleInfo info{
-                .handle = dupToAidl(hidl_handle(avMemory.getNativeHandle())),
+                .handle = dupToAidl(avMemory),
                 .size = static_cast<int64_t>(avMemSize),
             };
             *_aidl_return = move(info);
@@ -480,7 +480,10 @@
         }
     });
 
-    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
 }
 
 Status TunerFilter::releaseAvHandle(
@@ -497,7 +500,6 @@
     return Status::ok();
 }
 
-
 Status TunerFilter::start() {
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 4a5acf5..77248d4 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -48,7 +48,10 @@
 
     sp<ILnbCallback> lnbCallback = new LnbCallback(tunerLnbCallback);
     Result status = mLnb->setCallback(lnbCallback);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setVoltage(int voltage) {
@@ -58,7 +61,10 @@
     }
 
     Result status = mLnb->setVoltage(static_cast<LnbVoltage>(voltage));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setTone(int tone) {
@@ -68,7 +74,10 @@
     }
 
     Result status = mLnb->setTone(static_cast<LnbTone>(tone));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setSatellitePosition(int position) {
@@ -78,7 +87,10 @@
     }
 
     Result status = mLnb->setSatellitePosition(static_cast<LnbPosition>(position));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) {
@@ -88,7 +100,10 @@
     }
 
     Result status = mLnb->sendDiseqcMessage(diseqcMessage);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::close() {
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 25e1ad9..ea9da30 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -38,7 +38,10 @@
     }
 
     Result status = mTimeFilter->setTimeStamp(timeStamp);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerTimeFilter::clearTimeStamp() {
@@ -48,7 +51,10 @@
     }
 
     Result status = mTimeFilter->clearTimeStamp();
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
@@ -66,8 +72,9 @@
             });
     if (status != Result::SUCCESS) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return Status::ok();
 }
 
 Status TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
@@ -85,8 +92,9 @@
             });
     if (status != Result::SUCCESS) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return Status::ok();
 }
 
 Status TunerTimeFilter::close() {