Merge "NuPlayerDecoder: check mCodec before dereferencing" into oc-mr1-dev
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 8b76cdf..629d75a 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -115,11 +115,13 @@
      * <p>The mode control selects how the image data is converted from the
      * sensor's native color into linear sRGB color.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_color_correction_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When auto-white balance (AWB) is enabled with ACAMERA_CONTROL_AWB_MODE, this
      * control is overridden by the AWB routine. When AWB is disabled, the
@@ -164,17 +166,19 @@
      * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
      * @see ACAMERA_CONTROL_AWB_MODE
      */
-    ACAMERA_COLOR_CORRECTION_MODE =                             // byte (enum)
+    ACAMERA_COLOR_CORRECTION_MODE =                             // byte (acamera_metadata_enum_android_color_correction_mode_t)
             ACAMERA_COLOR_CORRECTION_START,
     /**
      * <p>A color transform matrix to use to transform
      * from sensor RGB color space to output linear sRGB color space.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is either set by the camera device when the request
      * ACAMERA_COLOR_CORRECTION_MODE is not TRANSFORM_MATRIX, or
@@ -196,11 +200,13 @@
      * <p>Gains applying to Bayer raw color channels for
      * white-balance.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>These per-channel gains are either set by the camera device
      * when the request ACAMERA_COLOR_CORRECTION_MODE is not
@@ -221,11 +227,13 @@
     /**
      * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
      * can not focus on the same point after exiting from the lens. This metadata defines
@@ -239,7 +247,7 @@
      * applying aberration correction.</p>
      * <p>LEGACY devices will always be in FAST mode.</p>
      */
-    ACAMERA_COLOR_CORRECTION_ABERRATION_MODE =                  // byte (enum)
+    ACAMERA_COLOR_CORRECTION_ABERRATION_MODE =                  // byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)
             ACAMERA_COLOR_CORRECTION_START + 3,
     /**
      * <p>List of aberration correction modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE that are
@@ -247,10 +255,12 @@
      *
      * @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE.  If no
      * aberration correction modes are available for a device, this list will solely include
@@ -269,11 +279,13 @@
      * <p>The desired setting for the camera device's auto-exposure
      * algorithm's antibanding compensation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Some kinds of lighting fixtures, such as some fluorescent
      * lights, flicker at the rate of the power supply frequency
@@ -310,17 +322,19 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_STATISTICS_SCENE_FLICKER
      */
-    ACAMERA_CONTROL_AE_ANTIBANDING_MODE =                       // byte (enum)
+    ACAMERA_CONTROL_AE_ANTIBANDING_MODE =                       // byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)
             ACAMERA_CONTROL_START,
     /**
      * <p>Adjustment to auto-exposure (AE) target image
      * brightness.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The adjustment is measured as a count of steps, with the
      * step size defined by ACAMERA_CONTROL_AE_COMPENSATION_STEP and the
@@ -350,11 +364,13 @@
      * <p>Whether auto-exposure (AE) is currently locked to its latest
      * calculated values.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
      * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
@@ -398,17 +414,19 @@
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      * @see ACAMERA_SENSOR_SENSITIVITY
      */
-    ACAMERA_CONTROL_AE_LOCK =                                   // byte (enum)
+    ACAMERA_CONTROL_AE_LOCK =                                   // byte (acamera_metadata_enum_android_control_ae_lock_t)
             ACAMERA_CONTROL_START + 2,
     /**
      * <p>The desired mode for the camera device's
      * auto-exposure routine.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control is only effective if ACAMERA_CONTROL_MODE is
      * AUTO.</p>
@@ -436,16 +454,18 @@
      * @see ACAMERA_SENSOR_FRAME_DURATION
      * @see ACAMERA_SENSOR_SENSITIVITY
      */
-    ACAMERA_CONTROL_AE_MODE =                                   // byte (enum)
+    ACAMERA_CONTROL_AE_MODE =                                   // byte (acamera_metadata_enum_android_control_ae_mode_t)
             ACAMERA_CONTROL_START + 3,
     /**
      * <p>List of metering areas to use for auto-exposure adjustment.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[5*area_count]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not available if android.control.maxRegionsAe is 0.
      * Otherwise will always be present.</p>
@@ -486,11 +506,13 @@
      * adjust the capture frame rate to maintain good
      * exposure.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Only constrains auto-exposure (AE) algorithm, not
      * manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
@@ -505,11 +527,13 @@
      * <p>Whether the camera device will trigger a precapture
      * metering sequence when it processes this request.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This entry is normally set to IDLE, or is not
      * included at all in the request settings. When included and
@@ -563,17 +587,19 @@
      * @see ACAMERA_CONTROL_AF_TRIGGER
      * @see ACAMERA_CONTROL_CAPTURE_INTENT
      */
-    ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER =                     // byte (enum)
+    ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER =                     // byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)
             ACAMERA_CONTROL_START + 6,
     /**
      * <p>Whether auto-focus (AF) is currently enabled, and what
      * mode it is set to.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_af_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Only effective if ACAMERA_CONTROL_MODE = AUTO and the lens is not fixed focus
      * (i.e. <code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE &gt; 0</code>). Also note that
@@ -590,16 +616,18 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    ACAMERA_CONTROL_AF_MODE =                                   // byte (enum)
+    ACAMERA_CONTROL_AF_MODE =                                   // byte (acamera_metadata_enum_android_control_af_mode_t)
             ACAMERA_CONTROL_START + 7,
     /**
      * <p>List of metering areas to use for auto-focus.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[5*area_count]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not available if android.control.maxRegionsAf is 0.
      * Otherwise will always be present.</p>
@@ -638,11 +666,13 @@
     /**
      * <p>Whether the camera device will trigger autofocus for this request.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_af_trigger_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This entry is normally set to IDLE, or is not
      * included at all in the request settings.</p>
@@ -665,17 +695,19 @@
      * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
      * @see ACAMERA_CONTROL_AF_STATE
      */
-    ACAMERA_CONTROL_AF_TRIGGER =                                // byte (enum)
+    ACAMERA_CONTROL_AF_TRIGGER =                                // byte (acamera_metadata_enum_android_control_af_trigger_t)
             ACAMERA_CONTROL_START + 9,
     /**
      * <p>Whether auto-white balance (AWB) is currently locked to its
      * latest calculated values.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
      * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
@@ -699,18 +731,20 @@
      *
      * @see ACAMERA_CONTROL_AWB_MODE
      */
-    ACAMERA_CONTROL_AWB_LOCK =                                  // byte (enum)
+    ACAMERA_CONTROL_AWB_LOCK =                                  // byte (acamera_metadata_enum_android_control_awb_lock_t)
             ACAMERA_CONTROL_START + 10,
     /**
      * <p>Whether auto-white balance (AWB) is currently setting the color
      * transform fields, and what its illumination target
      * is.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control is only effective if ACAMERA_CONTROL_MODE is AUTO.</p>
      * <p>When set to the ON mode, the camera device's auto-white balance
@@ -739,17 +773,19 @@
      * @see ACAMERA_CONTROL_AWB_LOCK
      * @see ACAMERA_CONTROL_MODE
      */
-    ACAMERA_CONTROL_AWB_MODE =                                  // byte (enum)
+    ACAMERA_CONTROL_AWB_MODE =                                  // byte (acamera_metadata_enum_android_control_awb_mode_t)
             ACAMERA_CONTROL_START + 11,
     /**
      * <p>List of metering areas to use for auto-white-balance illuminant
      * estimation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[5*area_count]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not available if android.control.maxRegionsAwb is 0.
      * Otherwise will always be present.</p>
@@ -791,11 +827,13 @@
      * of this capture, to help the camera device to decide optimal 3A
      * strategy.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_capture_intent_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control (except for MANUAL) is only effective if
      * <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
@@ -807,16 +845,18 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      */
-    ACAMERA_CONTROL_CAPTURE_INTENT =                            // byte (enum)
+    ACAMERA_CONTROL_CAPTURE_INTENT =                            // byte (acamera_metadata_enum_android_control_capture_intent_t)
             ACAMERA_CONTROL_START + 13,
     /**
      * <p>A special color effect to apply.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_effect_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When this mode is set, a color effect will be applied
      * to images produced by the camera device. The interpretation
@@ -825,17 +865,19 @@
      * depended on to be consistent (or present) across all
      * devices.</p>
      */
-    ACAMERA_CONTROL_EFFECT_MODE =                               // byte (enum)
+    ACAMERA_CONTROL_EFFECT_MODE =                               // byte (acamera_metadata_enum_android_control_effect_mode_t)
             ACAMERA_CONTROL_START + 14,
     /**
      * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
      * routines.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
      * by the camera device is disabled. The application must set the fields for
@@ -856,16 +898,18 @@
      *
      * @see ACAMERA_CONTROL_AF_MODE
      */
-    ACAMERA_CONTROL_MODE =                                      // byte (enum)
+    ACAMERA_CONTROL_MODE =                                      // byte (acamera_metadata_enum_android_control_mode_t)
             ACAMERA_CONTROL_START + 15,
     /**
      * <p>Control for which scene mode is currently active.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_scene_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
      * capture settings.</p>
@@ -883,17 +927,19 @@
      * @see ACAMERA_CONTROL_AWB_MODE
      * @see ACAMERA_CONTROL_MODE
      */
-    ACAMERA_CONTROL_SCENE_MODE =                                // byte (enum)
+    ACAMERA_CONTROL_SCENE_MODE =                                // byte (acamera_metadata_enum_android_control_scene_mode_t)
             ACAMERA_CONTROL_START + 16,
     /**
      * <p>Whether video stabilization is
      * active.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Video stabilization automatically warps images from
      * the camera in order to stabilize motion between consecutive frames.</p>
@@ -923,7 +969,7 @@
      * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      */
-    ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE =                  // byte (enum)
+    ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE =                  // byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)
             ACAMERA_CONTROL_START + 17,
     /**
      * <p>List of auto-exposure antibanding modes for ACAMERA_CONTROL_AE_ANTIBANDING_MODE that are
@@ -931,10 +977,12 @@
      *
      * @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all of the auto-exposure anti-banding modes may be
      * supported by a given camera device. This field lists the
@@ -952,10 +1000,12 @@
      *
      * @see ACAMERA_CONTROL_AE_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all the auto-exposure modes may be supported by a
      * given camera device, especially if no flash unit is
@@ -980,10 +1030,12 @@
      *
      * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>For devices at the LEGACY level or above:</p>
      * <ul>
@@ -1025,12 +1077,13 @@
      * @see ACAMERA_CONTROL_AE_COMPENSATION_STEP
      * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_CONTROL_AE_COMPENSATION_RANGE =                     // int32[2]
             ACAMERA_CONTROL_START + 21,
@@ -1038,10 +1091,12 @@
      * <p>Smallest step by which the exposure compensation
      * can be changed.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the unit for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION. For example, if this key has
      * a value of <code>1/2</code>, then a setting of <code>-2</code> for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION means
@@ -1059,10 +1114,12 @@
      *
      * @see ACAMERA_CONTROL_AF_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all the auto-focus modes may be supported by a
      * given camera device. This entry lists the valid modes for
@@ -1086,10 +1143,12 @@
      *
      * @see ACAMERA_CONTROL_EFFECT_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains the color effect modes that can be applied to
      * images produced by the camera device.
@@ -1111,10 +1170,12 @@
      *
      * @see ACAMERA_CONTROL_SCENE_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains scene modes that can be set for the camera device.
      * Only scene modes that have been fully implemented for the
@@ -1136,10 +1197,12 @@
      *
      * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>OFF will always be listed.</p>
      */
@@ -1151,10 +1214,12 @@
      *
      * @see ACAMERA_CONTROL_AWB_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all the auto-white-balance modes may be supported by a
      * given camera device. This entry lists the valid modes for
@@ -1183,22 +1248,25 @@
      * @see ACAMERA_CONTROL_AF_REGIONS
      * @see ACAMERA_CONTROL_AWB_REGIONS
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_CONTROL_MAX_REGIONS =                               // int32[3]
             ACAMERA_CONTROL_START + 28,
     /**
      * <p>Current state of the auto-exposure (AE) algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Switching between or enabling AE modes (ACAMERA_CONTROL_AE_MODE) always
      * resets the AE state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1257,15 +1325,17 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_SCENE_MODE
      */
-    ACAMERA_CONTROL_AE_STATE =                                  // byte (enum)
+    ACAMERA_CONTROL_AE_STATE =                                  // byte (acamera_metadata_enum_android_control_ae_state_t)
             ACAMERA_CONTROL_START + 31,
     /**
      * <p>Current state of auto-focus (AF) algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_af_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Switching between or enabling AF modes (ACAMERA_CONTROL_AF_MODE) always
      * resets the AF state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1357,15 +1427,17 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_SCENE_MODE
      */
-    ACAMERA_CONTROL_AF_STATE =                                  // byte (enum)
+    ACAMERA_CONTROL_AF_STATE =                                  // byte (acamera_metadata_enum_android_control_af_state_t)
             ACAMERA_CONTROL_START + 32,
     /**
      * <p>Current state of auto-white balance (AWB) algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Switching between or enabling AWB modes (ACAMERA_CONTROL_AWB_MODE) always
      * resets the AWB state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1408,37 +1480,41 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_SCENE_MODE
      */
-    ACAMERA_CONTROL_AWB_STATE =                                 // byte (enum)
+    ACAMERA_CONTROL_AWB_STATE =                                 // byte (acamera_metadata_enum_android_control_awb_state_t)
             ACAMERA_CONTROL_START + 34,
     /**
      * <p>Whether the camera device supports ACAMERA_CONTROL_AE_LOCK</p>
      *
      * @see ACAMERA_CONTROL_AE_LOCK
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_available_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
      * list <code>true</code>. This includes FULL devices.</p>
      */
-    ACAMERA_CONTROL_AE_LOCK_AVAILABLE =                         // byte (enum)
+    ACAMERA_CONTROL_AE_LOCK_AVAILABLE =                         // byte (acamera_metadata_enum_android_control_ae_lock_available_t)
             ACAMERA_CONTROL_START + 36,
     /**
      * <p>Whether the camera device supports ACAMERA_CONTROL_AWB_LOCK</p>
      *
      * @see ACAMERA_CONTROL_AWB_LOCK
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_available_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
      * always list <code>true</code>. This includes FULL devices.</p>
      */
-    ACAMERA_CONTROL_AWB_LOCK_AVAILABLE =                        // byte (enum)
+    ACAMERA_CONTROL_AWB_LOCK_AVAILABLE =                        // byte (acamera_metadata_enum_android_control_awb_lock_available_t)
             ACAMERA_CONTROL_START + 37,
     /**
      * <p>List of control modes for ACAMERA_CONTROL_MODE that are supported by this camera
@@ -1446,10 +1522,12 @@
      *
      * @see ACAMERA_CONTROL_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains control modes that can be set for the camera device.
      * LEGACY mode devices will always support AUTO mode. LIMITED and FULL
@@ -1463,10 +1541,12 @@
      *
      * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Devices support post RAW sensitivity boost  will advertise
      * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
@@ -1484,11 +1564,13 @@
      * <p>The amount of additional sensitivity boost applied to output images
      * after RAW sensor data is captured.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Some camera devices support additional digital sensitivity boosting in the
      * camera processing pipeline after sensor RAW image is captured.
@@ -1521,11 +1603,13 @@
      *
      * @see ACAMERA_CONTROL_CAPTURE_INTENT
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_enable_zsl_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
      * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
@@ -1552,7 +1636,7 @@
      * @see ACAMERA_CONTROL_CAPTURE_INTENT
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
-    ACAMERA_CONTROL_ENABLE_ZSL =                                // byte (enum)
+    ACAMERA_CONTROL_ENABLE_ZSL =                                // byte (acamera_metadata_enum_android_control_enable_zsl_t)
             ACAMERA_CONTROL_START + 41,
     ACAMERA_CONTROL_END,
 
@@ -1560,11 +1644,13 @@
      * <p>Operation mode for edge
      * enhancement.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_edge_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
      * no enhancement will be applied by the camera device.</p>
@@ -1586,7 +1672,7 @@
      * The camera device may adjust its internal edge enhancement parameters for best
      * image quality based on the android.reprocess.effectiveExposureFactor, if it is set.</p>
      */
-    ACAMERA_EDGE_MODE =                                         // byte (enum)
+    ACAMERA_EDGE_MODE =                                         // byte (acamera_metadata_enum_android_edge_mode_t)
             ACAMERA_EDGE_START,
     /**
      * <p>List of edge enhancement modes for ACAMERA_EDGE_MODE that are supported by this camera
@@ -1594,10 +1680,12 @@
      *
      * @see ACAMERA_EDGE_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Full-capability camera devices must always support OFF; camera devices that support
      * YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
@@ -1610,11 +1698,13 @@
     /**
      * <p>The desired mode for for the camera device's flash control.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_flash_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control is only effective when flash unit is available
      * (<code>ACAMERA_FLASH_INFO_AVAILABLE == true</code>).</p>
@@ -1635,16 +1725,18 @@
      * @see ACAMERA_FLASH_INFO_AVAILABLE
      * @see ACAMERA_FLASH_STATE
      */
-    ACAMERA_FLASH_MODE =                                        // byte (enum)
+    ACAMERA_FLASH_MODE =                                        // byte (acamera_metadata_enum_android_flash_mode_t)
             ACAMERA_FLASH_START + 2,
     /**
      * <p>Current state of the flash
      * unit.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_flash_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When the camera device doesn't have flash unit
      * (i.e. <code>ACAMERA_FLASH_INFO_AVAILABLE == false</code>), this state will always be UNAVAILABLE.
@@ -1664,7 +1756,7 @@
      * @see ACAMERA_FLASH_INFO_AVAILABLE
      * @see ACAMERA_FLASH_MODE
      */
-    ACAMERA_FLASH_STATE =                                       // byte (enum)
+    ACAMERA_FLASH_STATE =                                       // byte (acamera_metadata_enum_android_flash_state_t)
             ACAMERA_FLASH_START + 5,
     ACAMERA_FLASH_END,
 
@@ -1672,33 +1764,37 @@
      * <p>Whether this camera device has a
      * flash unit.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_flash_info_available_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Will be <code>false</code> if no flash is available.</p>
      * <p>If there is no flash unit, none of the flash controls do
      * anything.</p>
      */
-    ACAMERA_FLASH_INFO_AVAILABLE =                              // byte (enum)
+    ACAMERA_FLASH_INFO_AVAILABLE =                              // byte (acamera_metadata_enum_android_flash_info_available_t)
             ACAMERA_FLASH_INFO_START,
     ACAMERA_FLASH_INFO_END,
 
     /**
      * <p>Operational mode for hot pixel correction.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_hot_pixel_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
      * that do not accurately measure the incoming light (i.e. pixels that
      * are stuck at an arbitrary value or are oversensitive).</p>
      */
-    ACAMERA_HOT_PIXEL_MODE =                                    // byte (enum)
+    ACAMERA_HOT_PIXEL_MODE =                                    // byte (acamera_metadata_enum_android_hot_pixel_mode_t)
             ACAMERA_HOT_PIXEL_START,
     /**
      * <p>List of hot pixel correction modes for ACAMERA_HOT_PIXEL_MODE that are supported by this
@@ -1706,10 +1802,12 @@
      *
      * @see ACAMERA_HOT_PIXEL_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>FULL mode camera devices will always support FAST.</p>
      */
@@ -1721,13 +1819,14 @@
      * <p>GPS coordinates to include in output JPEG
      * EXIF.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: double[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_GPS_COORDINATES =                              // double[3]
             ACAMERA_JPEG_START,
@@ -1735,13 +1834,14 @@
      * <p>32 characters describing GPS algorithm to
      * include in EXIF.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_GPS_PROCESSING_METHOD =                        // byte
             ACAMERA_JPEG_START + 1,
@@ -1749,24 +1849,27 @@
      * <p>Time GPS fix was made to include in
      * EXIF.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_GPS_TIMESTAMP =                                // int64
             ACAMERA_JPEG_START + 2,
     /**
      * <p>The orientation for a JPEG image.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The clockwise rotation angle in degrees, relative to the orientation
      * to the camera, that the JPEG picture needs to be rotated by, to be viewed
@@ -1805,11 +1908,13 @@
      * <p>Compression quality of the final JPEG
      * image.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>85-95 is typical usage range.</p>
      */
@@ -1819,24 +1924,27 @@
      * <p>Compression quality of JPEG
      * thumbnail.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_THUMBNAIL_QUALITY =                            // byte
             ACAMERA_JPEG_START + 5,
     /**
      * <p>Resolution of embedded JPEG thumbnail.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
      * but the captured JPEG will still be a valid image.</p>
@@ -1871,10 +1979,12 @@
      *
      * @see ACAMERA_JPEG_THUMBNAIL_SIZE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
      * thumbnail should be generated.</p>
@@ -1902,11 +2012,13 @@
      * <p>The desired lens aperture size, as a ratio of lens focal length to the
      * effective aperture diameter.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Setting this value is only supported on the camera devices that have a variable
      * aperture lens.</p>
@@ -1934,11 +2046,13 @@
     /**
      * <p>The desired setting for the lens neutral density filter(s).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control will not be supported on most camera devices.</p>
      * <p>Lens filters are typically used to lower the amount of light the
@@ -1960,11 +2074,13 @@
     /**
      * <p>The desired lens focal length; used for optical zoom.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This setting controls the physical focal length of the camera
      * device's lens. Changing the focal length changes the field of
@@ -1986,11 +2102,13 @@
      * <p>Desired distance to plane of sharpest focus,
      * measured from frontmost surface of the lens.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Should be zero for fixed-focus cameras</p>
      */
@@ -2000,11 +2118,13 @@
      * <p>Sets whether the camera device uses optical image stabilization (OIS)
      * when capturing images.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>OIS is used to compensate for motion blur due to small
      * movements of the camera during capture. Unlike digital image
@@ -2027,30 +2147,33 @@
      * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
      * @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
      */
-    ACAMERA_LENS_OPTICAL_STABILIZATION_MODE =                   // byte (enum)
+    ACAMERA_LENS_OPTICAL_STABILIZATION_MODE =                   // byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)
             ACAMERA_LENS_START + 4,
     /**
      * <p>Direction the camera faces relative to
      * device screen.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_facing_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
-    ACAMERA_LENS_FACING =                                       // byte (enum)
+    ACAMERA_LENS_FACING =                                       // byte (acamera_metadata_enum_android_lens_facing_t)
             ACAMERA_LENS_START + 5,
     /**
      * <p>The orientation of the camera relative to the sensor
      * coordinate system.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The four coefficients that describe the quaternion
      * rotation from the Android sensor coordinate system to a
@@ -2084,11 +2207,13 @@
     /**
      * <p>Position of the camera optical center.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The position of the camera device's lens optical center,
      * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
@@ -2129,10 +2254,12 @@
      * <p>The range of scene distances that are in
      * sharp focus (depth of field).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If variable focus not supported, can still report
      * fixed depth of field range</p>
@@ -2142,10 +2269,12 @@
     /**
      * <p>Current lens status.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>For lens parameters ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
      * ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE, when changes are requested,
@@ -2176,17 +2305,19 @@
      * @see ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS
      * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    ACAMERA_LENS_STATE =                                        // byte (enum)
+    ACAMERA_LENS_STATE =                                        // byte (acamera_metadata_enum_android_lens_state_t)
             ACAMERA_LENS_START + 9,
     /**
      * <p>The parameters for this camera device's intrinsic
      * calibration.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The five calibration parameters that describe the
      * transform from camera-centric 3D coordinates to sensor
@@ -2245,11 +2376,13 @@
      * <p>The correction coefficients to correct for this camera device's
      * radial and tangential lens distortion.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[6]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
      * kappa_3]</code> and two tangential distortion coefficients
@@ -2290,10 +2423,12 @@
      *
      * @see ACAMERA_LENS_APERTURE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the camera device doesn't support a variable lens aperture,
      * this list will contain only one value, which is the fixed aperture size.</p>
@@ -2308,10 +2443,12 @@
      *
      * @see ACAMERA_LENS_FILTER_DENSITY
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If a neutral density filter is not supported by this camera device,
      * this list will contain only 0. Otherwise, this list will include every
@@ -2325,10 +2462,12 @@
      *
      * @see ACAMERA_LENS_FOCAL_LENGTH
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If optical zoom is not supported, this list will only contain
      * a single value corresponding to the fixed focal length of the
@@ -2343,10 +2482,12 @@
      *
      * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If OIS is not supported by a given camera device, this list will
      * contain only OFF.</p>
@@ -2356,10 +2497,12 @@
     /**
      * <p>Hyperfocal distance for this lens.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the lens is not fixed focus, the camera device will report this
      * field when ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED.</p>
@@ -2372,10 +2515,12 @@
      * <p>Shortest distance from frontmost surface
      * of the lens that can be brought into sharp focus.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the lens is fixed-focus, this will be
      * 0.</p>
@@ -2385,10 +2530,12 @@
     /**
      * <p>Dimensions of lens shading map.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The map should be on the order of 30-40 rows and columns, and
      * must be smaller than 64x64.</p>
@@ -2398,10 +2545,12 @@
     /**
      * <p>The lens focus distance calibration quality.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The lens focus distance calibration quality determines the reliability of
      * focus related metadata entries, i.e. ACAMERA_LENS_FOCUS_DISTANCE,
@@ -2422,18 +2571,20 @@
      * @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
      * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION =              // byte (enum)
+    ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION =              // byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)
             ACAMERA_LENS_INFO_START + 7,
     ACAMERA_LENS_INFO_END,
 
     /**
      * <p>Mode of operation for the noise reduction algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_noise_reduction_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The noise reduction algorithm attempts to improve image quality by removing
      * excessive noise added by the capture process, especially in dark conditions.</p>
@@ -2463,7 +2614,7 @@
      *
      * @see ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
      */
-    ACAMERA_NOISE_REDUCTION_MODE =                              // byte (enum)
+    ACAMERA_NOISE_REDUCTION_MODE =                              // byte (acamera_metadata_enum_android_noise_reduction_mode_t)
             ACAMERA_NOISE_REDUCTION_START,
     /**
      * <p>List of noise reduction modes for ACAMERA_NOISE_REDUCTION_MODE that are supported
@@ -2471,10 +2622,12 @@
      *
      * @see ACAMERA_NOISE_REDUCTION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Full-capability camera devices will always support OFF and FAST.</p>
      * <p>Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
@@ -2489,10 +2642,12 @@
      * <p>The maximum numbers of different types of output streams
      * that can be configured and used simultaneously by a camera device.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is a 3 element tuple that contains the max number of output simultaneous
      * streams for raw sensor, processed (but not stalling), and processed (and stalling)
@@ -2523,10 +2678,12 @@
      * through from when it was exposed to when the final completed result
      * was available to the framework.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Depending on what settings are used in the request, and
      * what streams are configured, the data may undergo less processing,
@@ -2542,10 +2699,12 @@
      * has to go through from when it's exposed to when it's available
      * to the framework.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A typical minimum value for this is 2 (one stage to expose,
      * one stage to readout) from the sensor. The ISP then usually adds
@@ -2568,10 +2727,12 @@
      * <p>Defines how many sub-components
      * a result will be composed of.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>In order to combat the pipeline latency, partial results
      * may be delivered to the application layer from the camera device as
@@ -2592,10 +2753,12 @@
      * <p>List of capabilities that this camera device
      * advertises as fully supporting.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A capability is a contract that the camera device makes in order
      * to be able to satisfy one or more use cases.</p>
@@ -2620,16 +2783,18 @@
      * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
      * @see ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS
      */
-    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES =                    // byte[n] (enum)
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES =                    // byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)
             ACAMERA_REQUEST_START + 12,
     /**
      * <p>A list of all keys that the camera device has available
      * to use with {@link ACaptureRequest}.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Attempting to set a key into a CaptureRequest that is not
      * listed here will result in an invalid request and will be rejected
@@ -2648,10 +2813,12 @@
      * to query with {@link ACameraMetadata} from
      * {@link ACameraCaptureSession_captureCallback_result}.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Attempting to get a key from a CaptureResult that is not
      * listed here will always return a <code>null</code> value. Getting a key from
@@ -2679,10 +2846,12 @@
      * to query with {@link ACameraMetadata} from
      * {@link ACameraManager_getCameraCharacteristics}.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This entry follows the same rules as
      * ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS (except that it applies for
@@ -2698,11 +2867,13 @@
     /**
      * <p>The desired region of the sensor to read out for this capture.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control can be used to implement digital zoom.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
@@ -2748,10 +2919,12 @@
      *
      * @see ACAMERA_SCALER_CROP_REGION
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This represents the maximum amount of zooming possible by
      * the camera device, or equivalently, the minimum cropping
@@ -2767,10 +2940,12 @@
      * camera device supports
      * (i.e. format, width, height, output/input stream).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The configurations are listed as <code>(format, width, height, input?)</code>
      * tuples.</p>
@@ -2805,16 +2980,18 @@
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS =            // int32[n*4] (enum)
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS =            // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)
             ACAMERA_SCALER_START + 10,
     /**
      * <p>This lists the minimum frame duration for each
      * format/size combination.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This should correspond to the frame duration when only that
      * stream is active, with all processing (typically in android.*.mode)
@@ -2836,10 +3013,12 @@
      * <p>This lists the maximum stall duration for each
      * output format/size combination.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A stall duration is how much extra time would get added
      * to the normal minimum frame duration for a repeating request
@@ -2904,10 +3083,12 @@
     /**
      * <p>The crop type that this camera device supports.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_scaler_cropping_type_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When passing a non-centered crop region (ACAMERA_SCALER_CROP_REGION) to a camera
      * device that only supports CENTER_ONLY cropping, the camera device will move the
@@ -2922,7 +3103,7 @@
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    ACAMERA_SCALER_CROPPING_TYPE =                              // byte (enum)
+    ACAMERA_SCALER_CROPPING_TYPE =                              // byte (acamera_metadata_enum_android_scaler_cropping_type_t)
             ACAMERA_SCALER_START + 13,
     ACAMERA_SCALER_END,
 
@@ -2930,11 +3111,13 @@
      * <p>Duration each pixel is exposed to
      * light.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the sensor can't expose this exact duration, it will shorten the
      * duration exposed to the nearest possible value (rather than expose longer).
@@ -2951,11 +3134,13 @@
      * <p>Duration from start of frame exposure to
      * start of next frame exposure.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The maximum frame rate that can be supported by a camera subsystem is
      * a function of many factors:</p>
@@ -3037,11 +3222,13 @@
      * <p>The amount of gain applied to sensor data
      * before processing.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The sensitivity is the standard ISO sensitivity value,
      * as defined in ISO 12232:2006.</p>
@@ -3072,10 +3259,12 @@
      * @see ACAMERA_SENSOR_COLOR_TRANSFORM1
      * @see ACAMERA_SENSOR_FORWARD_MATRIX1
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The values in this key correspond to the values defined for the
      * EXIF LightSource tag. These illuminants are standard light sources
@@ -3092,7 +3281,7 @@
      * @see ACAMERA_SENSOR_FORWARD_MATRIX1
      * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
      */
-    ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 =                      // byte (enum)
+    ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 =                      // byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)
             ACAMERA_SENSOR_START + 3,
     /**
      * <p>The standard reference illuminant used as the scene light source when
@@ -3104,10 +3293,12 @@
      * @see ACAMERA_SENSOR_COLOR_TRANSFORM2
      * @see ACAMERA_SENSOR_FORWARD_MATRIX2
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>See ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 for more details.</p>
      * <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
@@ -3125,10 +3316,12 @@
      * <p>A per-device calibration transform matrix that maps from the
      * reference sensor colorspace to the actual device sensor colorspace.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to correct for per-device variations in the
      * sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3148,10 +3341,12 @@
      * reference sensor colorspace to the actual device sensor colorspace
      * (this is the colorspace of the raw buffer data).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to correct for per-device variations in the
      * sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3172,10 +3367,12 @@
      * <p>A matrix that transforms color values from CIE XYZ color space to
      * reference sensor color space.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert from the standard CIE XYZ color
      * space to the reference sensor colorspace, and is used when processing
@@ -3198,10 +3395,12 @@
      * <p>A matrix that transforms color values from CIE XYZ color space to
      * reference sensor color space.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert from the standard CIE XYZ color
      * space to the reference sensor colorspace, and is used when processing
@@ -3226,10 +3425,12 @@
      * <p>A matrix that transforms white balanced camera colors from the reference
      * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
      * is used when processing raw buffer data.</p>
@@ -3250,10 +3451,12 @@
      * <p>A matrix that transforms white balanced camera colors from the reference
      * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
      * is used when processing raw buffer data.</p>
@@ -3276,10 +3479,12 @@
      * <p>A fixed black level offset for each of the color filter arrangement
      * (CFA) mosaic channels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This key specifies the zero light value for each of the CFA mosaic
      * channels in the camera sensor.  The maximal value output by the
@@ -3310,10 +3515,12 @@
      * <p>Maximum sensitivity that is implemented
      * purely through analog gain.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>For ACAMERA_SENSOR_SENSITIVITY values less than or
      * equal to this, all applied gain must be analog. For
@@ -3328,10 +3535,12 @@
      * <p>Clockwise angle through which the output image needs to be rotated to be
      * upright on the device screen in its native orientation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Also defines the direction of rolling shutter readout, which is from top to bottom in
      * the sensor's coordinate system.</p>
@@ -3342,10 +3551,12 @@
      * <p>Time at start of exposure of first
      * row of the image sensor active array, in nanoseconds.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The timestamps are also included in all image
      * buffers produced for the same capture, and will be identical
@@ -3374,10 +3585,12 @@
      * <p>The estimated camera neutral color in the native sensor colorspace at
      * the time of capture.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This value gives the neutral color point encoded as an RGB value in the
      * native sensor color space.  The neutral color point indicates the
@@ -3391,10 +3604,12 @@
     /**
      * <p>Noise model coefficients for each CFA mosaic channel.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: double[2*CFA Channels]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This key contains two noise model coefficients for each CFA channel
      * corresponding to the sensor amplification (S) and sensor readout
@@ -3421,10 +3636,12 @@
     /**
      * <p>The worst-case divergence between Bayer green channels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This value is an estimate of the worst case split between the
      * Bayer green channels in the red and blue rows in the sensor color
@@ -3465,11 +3682,13 @@
      *
      * @see ACAMERA_SENSOR_TEST_PATTERN_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Each color channel is treated as an unsigned 32-bit integer.
      * The camera device then uses the most significant X bits
@@ -3484,11 +3703,13 @@
      * <p>When enabled, the sensor sends a test pattern instead of
      * doing a real exposure from the camera.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When a test pattern is enabled, all manual sensor controls specified
      * by ACAMERA_SENSOR_* will be ignored. All other controls should
@@ -3498,7 +3719,7 @@
      * would not actually affect it).</p>
      * <p>Defaults to OFF.</p>
      */
-    ACAMERA_SENSOR_TEST_PATTERN_MODE =                          // int32 (enum)
+    ACAMERA_SENSOR_TEST_PATTERN_MODE =                          // int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)
             ACAMERA_SENSOR_START + 24,
     /**
      * <p>List of sensor test pattern modes for ACAMERA_SENSOR_TEST_PATTERN_MODE
@@ -3506,10 +3727,12 @@
      *
      * @see ACAMERA_SENSOR_TEST_PATTERN_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Defaults to OFF, and always includes OFF if defined.</p>
      */
@@ -3519,10 +3742,12 @@
      * <p>Duration between the start of first row exposure
      * and the start of last row exposure.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the exposure time skew between the first and last
      * row exposure start times. The first row and the last row are
@@ -3539,10 +3764,12 @@
      * <p>List of disjoint rectangles indicating the sensor
      * optically shielded black pixel regions.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4*num_regions]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>In most camera sensors, the active array is surrounded by some
      * optically shielded pixel areas. By blocking light, these pixels
@@ -3569,10 +3796,12 @@
      * <p>A per-frame dynamic black level offset for each of the color filter
      * arrangement (CFA) mosaic channels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Camera sensor black levels may vary dramatically for different
      * capture settings (e.g. ACAMERA_SENSOR_SENSITIVITY). The fixed black
@@ -3610,10 +3839,12 @@
     /**
      * <p>Maximum raw value output by sensor for this frame.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Since the ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may change for different
      * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY), the white
@@ -3637,10 +3868,12 @@
      * <p>The area of the image sensor which corresponds to active pixels after any geometric
      * distortion correction has been applied.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
      * the region that actually receives light from the scene) after any geometric correction
@@ -3668,10 +3901,12 @@
      *
      * @see ACAMERA_SENSOR_SENSITIVITY
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The values are the standard ISO sensitivity values,
      * as defined in ISO 12232:2006.</p>
@@ -3683,14 +3918,15 @@
      * represents the colors in the top-left 2x2 section of
      * the sensor, in reading order.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
-    ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =              // byte (enum)
+    ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =              // byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)
             ACAMERA_SENSOR_INFO_START + 2,
     /**
      * <p>The range of image exposure times for ACAMERA_SENSOR_EXPOSURE_TIME supported
@@ -3698,12 +3934,13 @@
      *
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE =                   // int64[2]
             ACAMERA_SENSOR_INFO_START + 3,
@@ -3713,10 +3950,12 @@
      *
      * @see ACAMERA_SENSOR_FRAME_DURATION
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Attempting to use frame durations beyond the maximum will result in the frame
      * duration being clipped to the maximum. See that control for a full definition of frame
@@ -3731,10 +3970,12 @@
      * <p>The physical dimensions of the full pixel
      * array.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the physical size of the sensor pixel
      * array defined by ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
@@ -3747,10 +3988,12 @@
      * <p>Dimensions of the full pixel array, possibly
      * including black calibration pixels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The pixel count of the full pixel array of the image sensor, which covers
      * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area.  This represents the full pixel dimensions of
@@ -3773,10 +4016,12 @@
     /**
      * <p>Maximum raw value output by sensor.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This specifies the fully-saturated encoding level for the raw
      * sample values from the sensor.  This is typically caused by the
@@ -3802,26 +4047,30 @@
     /**
      * <p>The time base source for sensor capture start timestamps.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
      * may not based on a time source that can be compared to other system time sources.</p>
      * <p>This characteristic defines the source for the timestamps, and therefore whether they
      * can be compared against other system time sources/timestamps.</p>
      */
-    ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE =                      // byte (enum)
+    ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE =                      // byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)
             ACAMERA_SENSOR_INFO_START + 8,
     /**
      * <p>Whether the RAW images output from this camera device are subject to
      * lens shading correction.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If TRUE, all images produced by the camera device in the RAW image formats will
      * have lens shading correction already applied to it. If FALSE, the images will
@@ -3830,16 +4079,18 @@
      * <p>This key will be <code>null</code> for all devices do not report this information.
      * Devices with RAW capability will always report this information in this key.</p>
      */
-    ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED =                  // byte (enum)
+    ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED =                  // byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)
             ACAMERA_SENSOR_INFO_START + 9,
     /**
      * <p>The area of the image sensor which corresponds to active pixels prior to the
      * application of any geometric distortion correction.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
@@ -3906,11 +4157,13 @@
      * <p>Quality of lens shading correction applied
      * to the image data.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_shading_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to OFF mode, no lens shading correction will be applied by the
      * camera device, and an identity lens shading map data will be provided
@@ -3940,17 +4193,19 @@
      * @see ACAMERA_CONTROL_AWB_MODE
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      */
-    ACAMERA_SHADING_MODE =                                      // byte (enum)
+    ACAMERA_SHADING_MODE =                                      // byte (acamera_metadata_enum_android_shading_mode_t)
             ACAMERA_SHADING_START,
     /**
      * <p>List of lens shading modes for ACAMERA_SHADING_MODE that are supported by this camera device.</p>
      *
      * @see ACAMERA_SHADING_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains lens shading modes that can be set for the camera device.
      * Camera devices that support the MANUAL_POST_PROCESSING capability will always
@@ -3965,41 +4220,47 @@
      * <p>Operating mode for the face detector
      * unit.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Whether face detection is enabled, and whether it
      * should output just the basic fields or the full set of
      * fields.</p>
      */
-    ACAMERA_STATISTICS_FACE_DETECT_MODE =                       // byte (enum)
+    ACAMERA_STATISTICS_FACE_DETECT_MODE =                       // byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)
             ACAMERA_STATISTICS_START,
     /**
      * <p>Operating mode for hot pixel map generation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If set to <code>true</code>, a hot pixel map is returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.
      * If set to <code>false</code>, no hot pixel map will be returned.</p>
      *
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
      */
-    ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE =                     // byte (enum)
+    ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE =                     // byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)
             ACAMERA_STATISTICS_START + 3,
     /**
      * <p>List of unique IDs for detected faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
      * to the camera device.  A face that leaves the field of view and later returns may be
@@ -4014,10 +4275,12 @@
      * <p>List of landmarks for detected
      * faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*6]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
@@ -4032,10 +4295,12 @@
      * <p>List of the bounding rectangles for detected
      * faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
@@ -4051,10 +4316,12 @@
      * <p>List of the face confidence scores for
      * detected faces</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.</p>
      *
@@ -4067,10 +4334,12 @@
      * that lists the coefficients used to correct for vignetting and color shading,
      * for each Bayer color channel of RAW image data.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4*n*m]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The map provided here is the same map that is used by the camera device to
      * correct both color shading and vignetting for output non-RAW images.</p>
@@ -4144,10 +4413,12 @@
      * <p>The camera device estimated scene illumination lighting
      * frequency.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_scene_flicker_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Many light sources, such as most fluorescent lights, flicker at a rate
      * that depends on the local utility power standards. This flicker must be
@@ -4167,15 +4438,17 @@
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_CONTROL_MODE
      */
-    ACAMERA_STATISTICS_SCENE_FLICKER =                          // byte (enum)
+    ACAMERA_STATISTICS_SCENE_FLICKER =                          // byte (acamera_metadata_enum_android_statistics_scene_flicker_t)
             ACAMERA_STATISTICS_START + 14,
     /**
      * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
      * <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
@@ -4193,11 +4466,13 @@
      * <p>Whether the camera device will output the lens
      * shading map in output result metadata.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to ON,
      * ACAMERA_STATISTICS_LENS_SHADING_MAP will be provided in
@@ -4206,7 +4481,7 @@
      *
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP
      */
-    ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE =                  // byte (enum)
+    ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE =                  // byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)
             ACAMERA_STATISTICS_START + 16,
     ACAMERA_STATISTICS_END,
 
@@ -4216,10 +4491,12 @@
      *
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>OFF is always supported.</p>
      */
@@ -4229,12 +4506,13 @@
      * <p>The maximum number of simultaneously detectable
      * faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT =                    // int32
             ACAMERA_STATISTICS_INFO_START + 2,
@@ -4244,10 +4522,12 @@
      *
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If no hotpixel map output is available for this camera device, this will contain only
      * <code>false</code>.</p>
@@ -4261,10 +4541,12 @@
      *
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If no lens shading map output is available for this camera device, this key will
      * contain only OFF.</p>
@@ -4282,11 +4564,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n*2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
      *
@@ -4301,11 +4585,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n*2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
      *
@@ -4320,11 +4606,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n*2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Each channel's curve is defined by an array of control points:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED =
@@ -4375,11 +4663,13 @@
     /**
      * <p>High-level global contrast/gamma/tonemapping control.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_tonemap_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When switching to an application-defined contrast curve by setting
      * ACAMERA_TONEMAP_MODE to CONTRAST_CURVE, the curve is defined
@@ -4402,16 +4692,18 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      */
-    ACAMERA_TONEMAP_MODE =                                      // byte (enum)
+    ACAMERA_TONEMAP_MODE =                                      // byte (acamera_metadata_enum_android_tonemap_mode_t)
             ACAMERA_TONEMAP_START + 3,
     /**
      * <p>Maximum number of supported points in the
      * tonemap curve that can be used for android.tonemap.curve.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the actual number of points provided by the application (in ACAMERA_TONEMAPCURVE_*) is
      * less than this maximum, the camera device will resample the curve to its internal
@@ -4428,10 +4720,12 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
      * at least one of below mode combinations:</p>
@@ -4449,11 +4743,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The tonemap curve will be defined the following formula:
      * * OUT = pow(IN, 1.0 / gamma)
@@ -4474,11 +4770,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_tonemap_preset_curve_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The tonemap curve will be defined by specified standard.</p>
      * <p>sRGB (approximated by 16 control points):</p>
@@ -4488,17 +4786,19 @@
      * <p>Note that above figures show a 16 control points approximation of preset
      * curves. Camera devices may apply a different approximation to the curve.</p>
      */
-    ACAMERA_TONEMAP_PRESET_CURVE =                              // byte (enum)
+    ACAMERA_TONEMAP_PRESET_CURVE =                              // byte (acamera_metadata_enum_android_tonemap_preset_curve_t)
             ACAMERA_TONEMAP_START + 7,
     ACAMERA_TONEMAP_END,
 
     /**
      * <p>Generally classifies the overall set of the camera device functionality.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_info_supported_hardware_level_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The supported hardware level is a high-level description of the camera device's
      * capabilities, summarizing several capabilities into one field.  Each level adds additional
@@ -4551,7 +4851,7 @@
      * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
      * @see ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
      */
-    ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL =                     // byte (enum)
+    ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL =                     // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
             ACAMERA_INFO_START,
     ACAMERA_INFO_END,
 
@@ -4559,11 +4859,13 @@
      * <p>Whether black-level compensation is locked
      * to its current values, or is free to vary.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_black_level_lock_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Whether the black level offset was locked for this frame.  Should be
      * ON if ACAMERA_BLACK_LEVEL_LOCK was ON in the capture request, unless
@@ -4572,7 +4874,7 @@
      *
      * @see ACAMERA_BLACK_LEVEL_LOCK
      */
-    ACAMERA_BLACK_LEVEL_LOCK =                                  // byte (enum)
+    ACAMERA_BLACK_LEVEL_LOCK =                                  // byte (acamera_metadata_enum_android_black_level_lock_t)
             ACAMERA_BLACK_LEVEL_START,
     ACAMERA_BLACK_LEVEL_END,
 
@@ -4581,10 +4883,12 @@
      * with which the output result (metadata + buffers) has been fully
      * synchronized.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64 (acamera_metadata_enum_android_sync_frame_number_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When a request is submitted to the camera device, there is usually a
      * delay of several frames before the controls get applied. A camera
@@ -4638,17 +4942,19 @@
      * @see ACAMERA_REQUEST_PIPELINE_MAX_DEPTH
      * @see ACAMERA_SYNC_FRAME_NUMBER
      */
-    ACAMERA_SYNC_FRAME_NUMBER =                                 // int64 (enum)
+    ACAMERA_SYNC_FRAME_NUMBER =                                 // int64 (acamera_metadata_enum_android_sync_frame_number_t)
             ACAMERA_SYNC_START,
     /**
      * <p>The maximum number of frames that can occur after a request
      * (different than the previous) has been submitted, and before the
      * result's state becomes synchronized.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32 (acamera_metadata_enum_android_sync_max_latency_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This defines the maximum distance (in number of metadata results),
      * between the frame number of the request that has new controls to apply
@@ -4657,7 +4963,7 @@
      * must occur before the camera device knows for a fact that the new
      * submitted camera settings have been applied in outgoing frames.</p>
      */
-    ACAMERA_SYNC_MAX_LATENCY =                                  // int32 (enum)
+    ACAMERA_SYNC_MAX_LATENCY =                                  // int32 (acamera_metadata_enum_android_sync_max_latency_t)
             ACAMERA_SYNC_START + 1,
     ACAMERA_SYNC_END,
 
@@ -4666,10 +4972,12 @@
      * configurations that this camera device supports
      * (i.e. format, width, height, output/input stream).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>These are output stream configurations for use with
      * dataSpace HAL_DATASPACE_DEPTH. The configurations are
@@ -4683,16 +4991,18 @@
      * android.depth.maxDepthSamples, 1, OUTPUT)</code> in addition to
      * the entries for HAL_PIXEL_FORMAT_Y16.</p>
      */
-    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS =       // int32[n*4] (enum)
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS =       // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)
             ACAMERA_DEPTH_START + 1,
     /**
      * <p>This lists the minimum frame duration for each
      * format/size combination for depth output formats.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This should correspond to the frame duration when only that
      * stream is active, with all processing (typically in android.*.mode)
@@ -4714,10 +5024,12 @@
      * <p>This lists the maximum stall duration for each
      * output format/size combination for depth streams.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A stall duration is how much extra time would get added
      * to the normal minimum frame duration for a repeating request
@@ -4737,10 +5049,12 @@
      * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
      * YUV_420_888, JPEG, or RAW) simultaneously.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If TRUE, including both depth and color outputs in a single
      * capture request is not supported. An application must interleave color
@@ -4751,7 +5065,7 @@
      * measure depth values, which causes the color image to be
      * corrupted during depth measurement.</p>
      */
-    ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE =                          // byte (enum)
+    ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE =                          // byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)
             ACAMERA_DEPTH_START + 4,
     ACAMERA_DEPTH_END,
 
@@ -6966,6 +7280,7 @@
 
 } acamera_metadata_enum_android_depth_depth_is_exclusive_t;
 
+
 #endif /* __ANDROID_API__ >= 24 */
 
 __END_DECLS
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index d7c2e87..d70282b 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -870,7 +870,9 @@
 
             sp<IMemory> mem =
                     retriever->getFrameAtTime(-1,
-                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+                            MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+                            HAL_PIXEL_FORMAT_RGB_565,
+                            false /*metaOnly*/);
 
             if (mem != NULL) {
                 failed = false;
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 3150e3c..bc37557 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -257,6 +257,11 @@
                 plugin = hPlugin;
             }
         );
+
+    if (!hResult.isOk()) {
+        ALOGE("createPlugin remote call failed");
+    }
+
     return plugin;
 }
 
@@ -408,8 +413,11 @@
     if (mPlugin == NULL) {
         mInitCheck = ERROR_UNSUPPORTED;
     } else {
-        mInitCheck = OK;
-        mPlugin->setListener(this);
+        if (!mPlugin->setListener(this).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        } else {
+            mInitCheck = OK;
+        }
     }
 
     return mInitCheck;
@@ -424,12 +432,14 @@
     closeOpenSessions();
     reportMetrics();
     setListener(NULL);
-    if (mPlugin != NULL) {
-        mPlugin->setListener(NULL);
-    }
-    mPlugin.clear();
     mInitCheck = NO_INIT;
 
+    if (mPlugin != NULL) {
+        if (!mPlugin->setListener(NULL).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        }
+    }
+    mPlugin.clear();
     return OK;
 }
 
@@ -486,18 +496,21 @@
         return mInitCheck;
     }
 
-    Status status = mPlugin->closeSession(toHidlVec(sessionId));
-    if (status == Status::OK) {
-        DrmSessionManager::Instance()->removeSession(sessionId);
-        for (size_t i = 0; i < mOpenSessions.size(); i++) {
-            if (mOpenSessions[i] == sessionId) {
-                mOpenSessions.removeAt(i);
-                break;
+    Return<Status> status = mPlugin->closeSession(toHidlVec(sessionId));
+    if (status.isOk()) {
+        if (status == Status::OK) {
+            DrmSessionManager::Instance()->removeSession(sessionId);
+            for (size_t i = 0; i < mOpenSessions.size(); i++) {
+                if (mOpenSessions[i] == sessionId) {
+                    mOpenSessions.removeAt(i);
+                    break;
+                }
             }
         }
+        reportMetrics();
+        return toStatusT(status);
     }
-    reportMetrics();
-    return toStatusT(status);
+    return DEAD_OBJECT;
 }
 
 status_t DrmHal::getKeyRequest(Vector<uint8_t> const &sessionId,
@@ -997,11 +1010,14 @@
     Mutex::Autolock autoLock(mLock);
     closeOpenSessions();
     setListener(NULL);
+    mInitCheck = NO_INIT;
+
     if (mPlugin != NULL) {
-        mPlugin->setListener(NULL);
+        if (!mPlugin->setListener(NULL).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        }
     }
     mPlugin.clear();
-    mInitCheck = NO_INIT;
 }
 
 void DrmHal::writeByteArray(Parcel &obj, hidl_vec<uint8_t> const &vec)
diff --git a/media/libaaudio/examples/input_monitor/src/input_monitor.cpp b/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
index edf644a..2dfd0a7 100644
--- a/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
+++ b/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
@@ -27,9 +27,11 @@
 #include "AAudioSimpleRecorder.h"
 
 // TODO support FLOAT
-#define REQUIRED_FORMAT  AAUDIO_FORMAT_PCM_I16
+#define REQUIRED_FORMAT    AAUDIO_FORMAT_PCM_I16
 #define MIN_FRAMES_TO_READ 48  /* arbitrary, 1 msec at 48000 Hz */
 
+static const int FRAMES_PER_LINE = 20000;
+
 int main(int argc, const char **argv)
 {
     AAudioArgsParser   argParser;
@@ -46,7 +48,10 @@
     int32_t framesPerRead = 0;
     int32_t framesToRecord = 0;
     int32_t framesLeft = 0;
+    int32_t nextFrameCount = 0;
+    int32_t frameCount = 0;
     int32_t xRunCount = 0;
+    int64_t previousFramePosition = -1;
     int16_t *data = nullptr;
     float peakLevel = 0.0;
     int loopCounter = 0;
@@ -56,7 +61,7 @@
     // in a buffer if we hang or crash.
     setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
 
-    printf("%s - Monitor input level using AAudio\n", argv[0]);
+    printf("%s - Monitor input level using AAudio V0.1.1\n", argv[0]);
 
     argParser.setFormat(REQUIRED_FORMAT);
     if (argParser.parseArgs(argc, argv)) {
@@ -133,6 +138,7 @@
             goto finish;
         }
         framesLeft -= actual;
+        frameCount += actual;
 
         // Peak finder.
         for (int frameIndex = 0; frameIndex < actual; frameIndex++) {
@@ -143,9 +149,36 @@
         }
 
         // Display level as stars, eg. "******".
-        if ((loopCounter++ % 10) == 0) {
+        if (frameCount > nextFrameCount) {
             displayPeakLevel(peakLevel);
             peakLevel = 0.0;
+            nextFrameCount += FRAMES_PER_LINE;
+        }
+
+        // Print timestamps.
+        int64_t framePosition = 0;
+        int64_t frameTime = 0;
+        aaudio_result_t timeResult;
+        timeResult = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+                                               &framePosition, &frameTime);
+
+        if (timeResult == AAUDIO_OK) {
+            if (framePosition > (previousFramePosition + FRAMES_PER_LINE)) {
+                int64_t realTime = getNanoseconds();
+                int64_t framesRead = AAudioStream_getFramesRead(aaudioStream);
+
+                double latencyMillis = calculateLatencyMillis(framesRead, realTime,
+                                                              framePosition, frameTime,
+                                                              actualSampleRate);
+
+                printf("--- timestamp: result = %4d, position = %lld, at %lld nanos"
+                               ", latency = %7.2f msec\n",
+                       timeResult,
+                       (long long) framePosition,
+                       (long long) frameTime,
+                       latencyMillis);
+                previousFramePosition = framePosition;
+            }
         }
     }
 
diff --git a/media/libaaudio/examples/loopback/jni/Android.mk b/media/libaaudio/examples/loopback/jni/Android.mk
index dc933e3..d78f286 100644
--- a/media/libaaudio/examples/loopback/jni/Android.mk
+++ b/media/libaaudio/examples/loopback/jni/Android.mk
@@ -4,7 +4,8 @@
 LOCAL_MODULE_TAGS := tests
 LOCAL_C_INCLUDES := \
     $(call include-path-for, audio-utils) \
-    frameworks/av/media/libaaudio/include
+    frameworks/av/media/libaaudio/include \
+    frameworks/av/media/libaaudio/examples/utils
 
 # NDK recommends using this kind of relative path instead of an absolute path.
 LOCAL_SRC_FILES:= ../src/loopback.cpp
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
new file mode 100644
index 0000000..21cf341
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
@@ -0,0 +1,794 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Tools for measuring latency and for detecting glitches.
+ * These classes are pure math and can be used with any audio system.
+ */
+
+#ifndef AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H
+#define AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H
+
+#include <algorithm>
+#include <assert.h>
+#include <cctype>
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+// Tag for machine readable results as property = value pairs
+#define LOOPBACK_RESULT_TAG      "RESULT: "
+#define LOOPBACK_SAMPLE_RATE     48000
+
+#define MILLIS_PER_SECOND        1000
+
+#define MAX_ZEROTH_PARTIAL_BINS  40
+
+static const float s_Impulse[] = {
+        0.0f, 0.0f, 0.0f, 0.0f, 0.2f, // silence on each side of the impulse
+        0.5f, 0.9999f, 0.0f, -0.9999, -0.5f, // bipolar
+        -0.2f, 0.0f, 0.0f, 0.0f, 0.0f
+};
+
+class PseudoRandom {
+public:
+    PseudoRandom() {}
+    PseudoRandom(int64_t seed)
+            :    mSeed(seed)
+    {}
+
+    /**
+     * Returns the next random double from -1.0 to 1.0
+     *
+     * @return value from -1.0 to 1.0
+     */
+     double nextRandomDouble() {
+        return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
+    }
+
+    /** Calculate random 32 bit number using linear-congruential method. */
+    int32_t nextRandomInteger() {
+        // Use values for 64-bit sequence from MMIX by Donald Knuth.
+        mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
+        return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
+    }
+
+private:
+    int64_t mSeed = 99887766;
+};
+
+static double calculateCorrelation(const float *a,
+                                   const float *b,
+                                   int windowSize)
+{
+    double correlation = 0.0;
+    double sumProducts = 0.0;
+    double sumSquares = 0.0;
+
+    // Correlate a against b.
+    for (int i = 0; i < windowSize; i++) {
+        float s1 = a[i];
+        float s2 = b[i];
+        // Use a normalized cross-correlation.
+        sumProducts += s1 * s2;
+        sumSquares += ((s1 * s1) + (s2 * s2));
+    }
+
+    if (sumSquares >= 0.00000001) {
+        correlation = (float) (2.0 * sumProducts / sumSquares);
+    }
+    return correlation;
+}
+
+static int calculateCorrelations(const float *haystack, int haystackSize,
+                                 const float *needle, int needleSize,
+                                 float *results, int resultSize)
+{
+    int maxCorrelations = haystackSize - needleSize;
+    int numCorrelations = std::min(maxCorrelations, resultSize);
+
+    for (int ic = 0; ic < numCorrelations; ic++) {
+        double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
+        results[ic] = correlation;
+    }
+
+    return numCorrelations;
+}
+
+/*==========================================================================================*/
+/**
+ * Scan until we get a correlation of a single scan that goes over the tolerance level,
+ * peaks then drops back down.
+ */
+static double findFirstMatch(const float *haystack, int haystackSize,
+                             const float *needle, int needleSize, double threshold  )
+{
+    int ic;
+    // How many correlations can we calculate?
+    int numCorrelations = haystackSize - needleSize;
+    double maxCorrelation = 0.0;
+    int peakIndex = -1;
+    double location = -1.0;
+    const double backThresholdScaler = 0.5;
+
+    for (ic = 0; ic < numCorrelations; ic++) {
+        double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
+
+        if( (correlation > maxCorrelation) ) {
+            maxCorrelation = correlation;
+            peakIndex = ic;
+        }
+
+        //printf("PaQa_FindFirstMatch: ic = %4d, correlation = %8f, maxSum = %8f\n",
+        //    ic, correlation, maxSum );
+        // Are we past what we were looking for?
+        if((maxCorrelation > threshold) && (correlation < backThresholdScaler * maxCorrelation)) {
+            location = peakIndex;
+            break;
+        }
+    }
+
+    return location;
+}
+
+typedef struct LatencyReport_s {
+    double latencyInFrames;
+    double confidence;
+} LatencyReport;
+
+// Apply a technique similar to Harmonic Product Spectrum Analysis to find echo fundamental.
+// Using first echo instead of the original impulse for a better match.
+static int measureLatencyFromEchos(const float *haystack, int haystackSize,
+                            const float *needle, int needleSize,
+                            LatencyReport *report) {
+    const double threshold = 0.1;
+
+    // Find first peak
+    int first = (int) (findFirstMatch(haystack,
+                                      haystackSize,
+                                      needle,
+                                      needleSize,
+                                      threshold) + 0.5);
+
+    // Use first echo as the needle for the other echos because
+    // it will be more similar.
+    needle = &haystack[first];
+    int again = (int) (findFirstMatch(haystack,
+                                      haystackSize,
+                                      needle,
+                                      needleSize,
+                                      threshold) + 0.5);
+
+    printf("first = %d, again at %d\n", first, again);
+    first = again;
+
+    // Allocate results array
+    int remaining = haystackSize - first;
+    const int maxReasonableLatencyFrames = 48000 * 2; // arbitrary but generous value
+    int numCorrelations = std::min(remaining, maxReasonableLatencyFrames);
+    float *correlations = new float[numCorrelations];
+    float *harmonicSums = new float[numCorrelations](); // set to zero
+
+    // Generate correlation for every position.
+    numCorrelations = calculateCorrelations(&haystack[first], remaining,
+                                            needle, needleSize,
+                                            correlations, numCorrelations);
+
+    // Add higher harmonics mapped onto lower harmonics.
+    // This reinforces the "fundamental" echo.
+    const int numEchoes = 10;
+    for (int partial = 1; partial < numEchoes; partial++) {
+        for (int i = 0; i < numCorrelations; i++) {
+            harmonicSums[i / partial] += correlations[i] / partial;
+        }
+    }
+
+    // Find highest peak in correlation array.
+    float maxCorrelation = 0.0;
+    float sumOfPeaks = 0.0;
+    int peakIndex = 0;
+    const int skip = MAX_ZEROTH_PARTIAL_BINS; // skip low bins
+    for (int i = skip; i < numCorrelations; i++) {
+        if (harmonicSums[i] > maxCorrelation) {
+            maxCorrelation = harmonicSums[i];
+            sumOfPeaks += maxCorrelation;
+            peakIndex = i;
+            printf("maxCorrelation = %f at %d\n", maxCorrelation, peakIndex);
+        }
+    }
+
+    report->latencyInFrames = peakIndex;
+    if (sumOfPeaks < 0.0001) {
+        report->confidence = 0.0;
+    } else {
+        report->confidence = maxCorrelation / sumOfPeaks;
+    }
+
+    delete[] correlations;
+    delete[] harmonicSums;
+    return 0;
+}
+
+class AudioRecording
+{
+public:
+    AudioRecording() {
+    }
+    ~AudioRecording() {
+        delete[] mData;
+    }
+
+    void allocate(int maxFrames) {
+        delete[] mData;
+        mData = new float[maxFrames];
+        mMaxFrames = maxFrames;
+    }
+
+    // Write SHORT data from the first channel.
+    int write(int16_t *inputData, int inputChannelCount, int numFrames) {
+        // stop at end of buffer
+        if ((mFrameCounter + numFrames) > mMaxFrames) {
+            numFrames = mMaxFrames - mFrameCounter;
+        }
+        for (int i = 0; i < numFrames; i++) {
+            mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
+        }
+        return numFrames;
+    }
+
+    // Write FLOAT data from the first channel.
+    int write(float *inputData, int inputChannelCount, int numFrames) {
+        // stop at end of buffer
+        if ((mFrameCounter + numFrames) > mMaxFrames) {
+            numFrames = mMaxFrames - mFrameCounter;
+        }
+        for (int i = 0; i < numFrames; i++) {
+            mData[mFrameCounter++] = inputData[i * inputChannelCount];
+        }
+        return numFrames;
+    }
+
+    int size() {
+        return mFrameCounter;
+    }
+
+    float *getData() {
+        return mData;
+    }
+
+    int save(const char *fileName, bool writeShorts = true) {
+        int written = 0;
+        const int chunkSize = 64;
+        FILE *fid = fopen(fileName, "wb");
+        if (fid == NULL) {
+            return -errno;
+        }
+
+        if (writeShorts) {
+            int16_t buffer[chunkSize];
+            int32_t framesLeft = mFrameCounter;
+            int32_t cursor = 0;
+            while (framesLeft) {
+                int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
+                for (int i = 0; i < framesToWrite; i++) {
+                    buffer[i] = (int16_t) (mData[cursor++] * 32767);
+                }
+                written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
+                framesLeft -= framesToWrite;
+            }
+        } else {
+            written = (int) fwrite(mData, sizeof(float), mFrameCounter, fid);
+        }
+        fclose(fid);
+        return written;
+    }
+
+private:
+    float  *mData = nullptr;
+    int32_t mFrameCounter = 0;
+    int32_t mMaxFrames = 0;
+};
+
+// ====================================================================================
+class LoopbackProcessor {
+public:
+    virtual ~LoopbackProcessor() = default;
+
+
+    virtual void reset() {}
+
+    virtual void process(float *inputData, int inputChannelCount,
+                 float *outputData, int outputChannelCount,
+                 int numFrames) = 0;
+
+
+    virtual void report() = 0;
+
+    virtual void printStatus() {};
+
+    virtual bool isDone() {
+        return false;
+    }
+
+    void setSampleRate(int32_t sampleRate) {
+        mSampleRate = sampleRate;
+    }
+
+    int32_t getSampleRate() {
+        return mSampleRate;
+    }
+
+    // Measure peak amplitude of buffer.
+    static float measurePeakAmplitude(float *inputData, int inputChannelCount, int numFrames) {
+        float peak = 0.0f;
+        for (int i = 0; i < numFrames; i++) {
+            float pos = fabs(*inputData);
+            if (pos > peak) {
+                peak = pos;
+            }
+            inputData += inputChannelCount;
+        }
+        return peak;
+    }
+
+
+private:
+    int32_t mSampleRate = LOOPBACK_SAMPLE_RATE;
+};
+
+class PeakDetector {
+public:
+    float process(float input) {
+        float output = mPrevious * mDecay;
+        if (input > output) {
+            output = input;
+        }
+        mPrevious = output;
+        return output;
+    }
+
+private:
+    float  mDecay = 0.99f;
+    float  mPrevious = 0.0f;
+};
+
+
+static void printAudioScope(float sample) {
+    const int maxStars = 80
+    ; // arbitrary, fits on one line
+    char c = '*';
+    if (sample < -1.0) {
+        sample = -1.0;
+        c = '$';
+    } else if (sample > 1.0) {
+        sample = 1.0;
+        c = '$';
+    }
+    int numSpaces = (int) (((sample + 1.0) * 0.5) * maxStars);
+    for (int i = 0; i < numSpaces; i++) {
+        putchar(' ');
+    }
+    printf("%c\n", c);
+}
+
+// ====================================================================================
+/**
+ * Measure latency given a loopback stream data.
+ * Uses a state machine to cycle through various stages including:
+ *
+ */
+class EchoAnalyzer : public LoopbackProcessor {
+public:
+
+    EchoAnalyzer() : LoopbackProcessor() {
+        audioRecorder.allocate(2 * LOOPBACK_SAMPLE_RATE);
+    }
+
+    void reset() override {
+        mDownCounter = 200;
+        mLoopCounter = 0;
+        mMeasuredLoopGain = 0.0f;
+        mEchoGain = 1.0f;
+        mState = STATE_INITIAL_SILENCE;
+    }
+
+    virtual bool isDone() {
+        return mState == STATE_DONE;
+    }
+
+    void setGain(float gain) {
+        mEchoGain = gain;
+    }
+
+    float getGain() {
+        return mEchoGain;
+    }
+
+    void report() override {
+
+        printf("EchoAnalyzer ---------------\n");
+        printf(LOOPBACK_RESULT_TAG "measured.gain          = %f\n", mMeasuredLoopGain);
+        printf(LOOPBACK_RESULT_TAG "echo.gain              = %f\n", mEchoGain);
+        printf(LOOPBACK_RESULT_TAG "frame.count            = %d\n", mFrameCounter);
+        printf(LOOPBACK_RESULT_TAG "test.state             = %d\n", mState);
+        if (mMeasuredLoopGain >= 0.9999) {
+            printf("   ERROR - clipping, turn down volume slightly\n");
+        } else {
+            const float *needle = s_Impulse;
+            int needleSize = (int) (sizeof(s_Impulse) / sizeof(float));
+            float *haystack = audioRecorder.getData();
+            int haystackSize = audioRecorder.size();
+            int result = measureLatencyFromEchos(haystack, haystackSize,
+                                                 needle, needleSize,
+                                                 &latencyReport);
+            if (latencyReport.confidence < 0.01) {
+                printf("   ERROR - confidence too low = %f\n", latencyReport.confidence);
+            } else {
+                double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
+                printf(LOOPBACK_RESULT_TAG "latency.frames        = %8.2f\n", latencyReport.latencyInFrames);
+                printf(LOOPBACK_RESULT_TAG "latency.msec          = %8.2f\n", latencyMillis);
+                printf(LOOPBACK_RESULT_TAG "latency.confidence    = %8.6f\n", latencyReport.confidence);
+            }
+        }
+
+        {
+#define ECHO_FILENAME "/data/oboe_echo.raw"
+            int written = audioRecorder.save(ECHO_FILENAME);
+            printf("Echo wrote %d mono samples to %s on Android device\n", written, ECHO_FILENAME);
+        }
+    }
+
+    void printStatus() override {
+        printf("state = %d, echo gain = %f ", mState, mEchoGain);
+    }
+
+    static void sendImpulse(float *outputData, int outputChannelCount) {
+        for (float sample : s_Impulse) {
+            *outputData = sample;
+            outputData += outputChannelCount;
+        }
+    }
+
+    void process(float *inputData, int inputChannelCount,
+                 float *outputData, int outputChannelCount,
+                 int numFrames) override {
+        int channelsValid = std::min(inputChannelCount, outputChannelCount);
+        float peak = 0.0f;
+        int numWritten;
+        int numSamples;
+
+        echo_state_t nextState = mState;
+
+        switch (mState) {
+            case STATE_INITIAL_SILENCE:
+                // Output silence at the beginning.
+                numSamples = numFrames * outputChannelCount;
+                for (int i = 0; i < numSamples; i++) {
+                    outputData[i] = 0;
+                }
+                if (mDownCounter-- <= 0) {
+                    nextState = STATE_MEASURING_GAIN;
+                    //printf("%5d: switch to STATE_MEASURING_GAIN\n", mLoopCounter);
+                    mDownCounter = 8;
+                }
+                break;
+
+            case STATE_MEASURING_GAIN:
+                sendImpulse(outputData, outputChannelCount);
+                peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+                // If we get several in a row then go to next state.
+                if (peak > mPulseThreshold) {
+                    if (mDownCounter-- <= 0) {
+                        nextState = STATE_WAITING_FOR_SILENCE;
+                        //printf("%5d: switch to STATE_WAITING_FOR_SILENCE, measured peak = %f\n",
+                        //       mLoopCounter, peak);
+                        mDownCounter = 8;
+                        mMeasuredLoopGain = peak;  // assumes original pulse amplitude is one
+                        // Calculate gain that will give us a nice decaying echo.
+                        mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
+                    }
+                } else {
+                    mDownCounter = 8;
+                }
+                break;
+
+            case STATE_WAITING_FOR_SILENCE:
+                // Output silence.
+                numSamples = numFrames * outputChannelCount;
+                for (int i = 0; i < numSamples; i++) {
+                    outputData[i] = 0;
+                }
+                peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+                // If we get several in a row then go to next state.
+                if (peak < mSilenceThreshold) {
+                    if (mDownCounter-- <= 0) {
+                        nextState = STATE_SENDING_PULSE;
+                        //printf("%5d: switch to STATE_SENDING_PULSE\n", mLoopCounter);
+                        mDownCounter = 8;
+                    }
+                } else {
+                    mDownCounter = 8;
+                }
+                break;
+
+            case STATE_SENDING_PULSE:
+                audioRecorder.write(inputData, inputChannelCount, numFrames);
+                sendImpulse(outputData, outputChannelCount);
+                nextState = STATE_GATHERING_ECHOS;
+                //printf("%5d: switch to STATE_GATHERING_ECHOS\n", mLoopCounter);
+                break;
+
+            case STATE_GATHERING_ECHOS:
+                numWritten = audioRecorder.write(inputData, inputChannelCount, numFrames);
+                peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+                if (peak > mMeasuredLoopGain) {
+                    mMeasuredLoopGain = peak;  // AGC might be raising gain so adjust it on the fly.
+                    // Recalculate gain that will give us a nice decaying echo.
+                    mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
+                }
+                // Echo input to output.
+                for (int i = 0; i < numFrames; i++) {
+                    int ic;
+                    for (ic = 0; ic < channelsValid; ic++) {
+                        outputData[ic] = inputData[ic] * mEchoGain;
+                    }
+                    for (; ic < outputChannelCount; ic++) {
+                        outputData[ic] = 0;
+                    }
+                    inputData += inputChannelCount;
+                    outputData += outputChannelCount;
+                }
+                if (numWritten  < numFrames) {
+                    nextState = STATE_DONE;
+                    //printf("%5d: switch to STATE_DONE\n", mLoopCounter);
+                }
+                break;
+
+            case STATE_DONE:
+            default:
+                break;
+        }
+
+        mState = nextState;
+        mLoopCounter++;
+    }
+
+private:
+
+    enum echo_state_t {
+        STATE_INITIAL_SILENCE,
+        STATE_MEASURING_GAIN,
+        STATE_WAITING_FOR_SILENCE,
+        STATE_SENDING_PULSE,
+        STATE_GATHERING_ECHOS,
+        STATE_DONE
+    };
+
+    int           mDownCounter = 500;
+    int           mLoopCounter = 0;
+    int           mLoopStart = 1000;
+    float         mPulseThreshold = 0.02f;
+    float         mSilenceThreshold = 0.002f;
+    float         mMeasuredLoopGain = 0.0f;
+    float         mDesiredEchoGain = 0.95f;
+    float         mEchoGain = 1.0f;
+    echo_state_t  mState = STATE_INITIAL_SILENCE;
+    int32_t       mFrameCounter = 0;
+
+    AudioRecording     audioRecorder;
+    LatencyReport      latencyReport;
+    PeakDetector       mPeakDetector;
+};
+
+
+// ====================================================================================
+/**
+ * Output a steady sinewave and analyze the return signal.
+ *
+ * Use a cosine transform to measure the predicted magnitude and relative phase of the
+ * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
+ */
+class SineAnalyzer : public LoopbackProcessor {
+public:
+
+    void report() override {
+        printf("SineAnalyzer ------------------\n");
+        printf(LOOPBACK_RESULT_TAG "peak.amplitude     = %7.5f\n", mPeakAmplitude);
+        printf(LOOPBACK_RESULT_TAG "sine.magnitude     = %7.5f\n", mMagnitude);
+        printf(LOOPBACK_RESULT_TAG "phase.offset       = %7.5f\n", mPhaseOffset);
+        printf(LOOPBACK_RESULT_TAG "ref.phase          = %7.5f\n", mPhase);
+        printf(LOOPBACK_RESULT_TAG "frames.accumulated = %6d\n", mFramesAccumulated);
+        printf(LOOPBACK_RESULT_TAG "sine.period        = %6d\n", mPeriod);
+        printf(LOOPBACK_RESULT_TAG "test.state         = %6d\n", mState);
+        printf(LOOPBACK_RESULT_TAG "frame.count        = %6d\n", mFrameCounter);
+        // Did we ever get a lock?
+        bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
+        if (!gotLock) {
+            printf("ERROR - failed to lock on reference sine tone\n");
+        } else {
+            // Only print if meaningful.
+            printf(LOOPBACK_RESULT_TAG "glitch.count       = %6d\n", mGlitchCount);
+        }
+    }
+
+    void printStatus() override {
+        printf("  state = %d, glitches = %d,", mState, mGlitchCount);
+    }
+
+    double calculateMagnitude(double *phasePtr = NULL) {
+        if (mFramesAccumulated == 0) {
+            return 0.0;
+        }
+        double sinMean = mSinAccumulator / mFramesAccumulated;
+        double cosMean = mCosAccumulator / mFramesAccumulated;
+        double magnitude = 2.0 * sqrt( (sinMean * sinMean) + (cosMean * cosMean ));
+        if( phasePtr != NULL )
+        {
+            double phase = M_PI_2 - atan2( sinMean, cosMean );
+            *phasePtr = phase;
+        }
+        return magnitude;
+    }
+
+    /**
+     * @param inputData contains microphone data with sine signal feedback
+     * @param outputData contains the reference sine wave
+     */
+    void process(float *inputData, int inputChannelCount,
+                 float *outputData, int outputChannelCount,
+                 int numFrames) override {
+        float sample;
+        float peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+        if (peak > mPeakAmplitude) {
+            mPeakAmplitude = peak;
+        }
+
+        for (int i = 0; i < numFrames; i++) {
+            float sample = inputData[i * inputChannelCount];
+
+            float sinOut = sinf(mPhase);
+
+            switch (mState) {
+                case STATE_IMMUNE:
+                case STATE_WAITING_FOR_SIGNAL:
+                    break;
+                case STATE_WAITING_FOR_LOCK:
+                    mSinAccumulator += sample * sinOut;
+                    mCosAccumulator += sample * cosf(mPhase);
+                    mFramesAccumulated++;
+                    // Must be a multiple of the period or the calculation will not be accurate.
+                    if (mFramesAccumulated == mPeriod * 4) {
+                        mPhaseOffset = 0.0;
+                        mMagnitude = calculateMagnitude(&mPhaseOffset);
+                        if (mMagnitude > mThreshold) {
+                            if (fabs(mPreviousPhaseOffset - mPhaseOffset) < 0.001) {
+                                mState = STATE_LOCKED;
+                                //printf("%5d: switch to STATE_LOCKED\n", mFrameCounter);
+                            }
+                            mPreviousPhaseOffset = mPhaseOffset;
+                        }
+                        resetAccumulator();
+                    }
+                    break;
+
+                case STATE_LOCKED: {
+                    // Predict next sine value
+                    float predicted = sinf(mPhase + mPhaseOffset) * mMagnitude;
+                    // printf("    predicted = %f, actual = %f\n", predicted, sample);
+
+                    float diff = predicted - sample;
+                    if (fabs(diff) > mTolerance) {
+                        mGlitchCount++;
+                        //printf("%5d: Got a glitch # %d, predicted = %f, actual = %f\n",
+                        //       mFrameCounter, mGlitchCount, predicted, sample);
+                        mState = STATE_IMMUNE;
+                        //printf("%5d: switch to STATE_IMMUNE\n", mFrameCounter);
+                        mDownCounter = mPeriod;  // Set duration of IMMUNE state.
+                    }
+                } break;
+            }
+
+            // Output sine wave so we can measure it.
+            outputData[i * outputChannelCount] = (sinOut * mOutputAmplitude)
+                    + (mWhiteNoise.nextRandomDouble() * mNoiseAmplitude);
+            // printf("%5d: sin(%f) = %f, %f\n", i, mPhase, sinOut,  mPhaseIncrement);
+
+            // advance and wrap phase
+            mPhase += mPhaseIncrement;
+            if (mPhase > M_PI) {
+                mPhase -= (2.0 * M_PI);
+            }
+
+            mFrameCounter++;
+        }
+
+        // Do these once per buffer.
+        switch (mState) {
+            case STATE_IMMUNE:
+                mDownCounter -= numFrames;
+                if (mDownCounter <= 0) {
+                    mState = STATE_WAITING_FOR_SIGNAL;
+                    //printf("%5d: switch to STATE_WAITING_FOR_SIGNAL\n", mFrameCounter);
+                }
+                break;
+            case STATE_WAITING_FOR_SIGNAL:
+                if (peak > mThreshold) {
+                    mState = STATE_WAITING_FOR_LOCK;
+                    //printf("%5d: switch to STATE_WAITING_FOR_LOCK\n", mFrameCounter);
+                    resetAccumulator();
+                }
+                break;
+            case STATE_WAITING_FOR_LOCK:
+            case STATE_LOCKED:
+                break;
+        }
+
+    }
+
+    void resetAccumulator() {
+        mFramesAccumulated = 0;
+        mSinAccumulator = 0.0;
+        mCosAccumulator = 0.0;
+    }
+
+    void reset() override {
+        mGlitchCount = 0;
+        mState = STATE_IMMUNE;
+        mPhaseIncrement = 2.0 * M_PI / mPeriod;
+        printf("phaseInc = %f for period %d\n", mPhaseIncrement, mPeriod);
+        resetAccumulator();
+    }
+
+private:
+
+    enum sine_state_t {
+        STATE_IMMUNE,
+        STATE_WAITING_FOR_SIGNAL,
+        STATE_WAITING_FOR_LOCK,
+        STATE_LOCKED
+    };
+
+    int     mPeriod = 79;
+    double  mPhaseIncrement = 0.0;
+    double  mPhase = 0.0;
+    double  mPhaseOffset = 0.0;
+    double  mPreviousPhaseOffset = 0.0;
+    double  mMagnitude = 0.0;
+    double  mThreshold = 0.005;
+    double  mTolerance = 0.01;
+    int32_t mFramesAccumulated = 0;
+    double  mSinAccumulator = 0.0;
+    double  mCosAccumulator = 0.0;
+    int32_t mGlitchCount = 0;
+    double  mPeakAmplitude = 0.0;
+    int     mDownCounter = 4000;
+    int32_t mFrameCounter = 0;
+    float   mOutputAmplitude = 0.75;
+
+    int32_t mZeroCrossings = 0;
+
+    PseudoRandom  mWhiteNoise;
+    float   mNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
+
+    sine_state_t  mState = STATE_IMMUNE;
+};
+
+
+#undef LOOPBACK_SAMPLE_RATE
+#undef LOOPBACK_RESULT_TAG
+
+#endif /* AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H */
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 57d45cd..df0df04 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-// Play an impulse and then record it.
-// Measure the round trip latency.
+// Audio loopback tests to measure the round trip latency and glitches.
 
 #include <algorithm>
 #include <assert.h>
@@ -28,460 +27,38 @@
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
 
+#include "AAudioSimplePlayer.h"
+#include "AAudioSimpleRecorder.h"
+#include "AAudioExampleUtils.h"
+#include "LoopbackAnalyzer.h"
+
 // Tag for machine readable results as property = value pairs
 #define RESULT_TAG              "RESULT: "
 #define SAMPLE_RATE             48000
 #define NUM_SECONDS             5
 #define NUM_INPUT_CHANNELS      1
 #define FILENAME                "/data/oboe_input.raw"
-
-#define NANOS_PER_MICROSECOND ((int64_t)1000)
-#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
-#define MILLIS_PER_SECOND     1000
-#define NANOS_PER_SECOND      (NANOS_PER_MILLISECOND * MILLIS_PER_SECOND)
-
-#define MAX_ZEROTH_PARTIAL_BINS   40
-
-static const float s_Impulse[] = {
-        0.0f, 0.0f, 0.0f, 0.0f, 0.0f, // silence on each side of the impulse
-        0.5f, 0.9f, 0.0f, -0.9f, -0.5f, // bipolar
-        0.0f, 0.0f, 0.0f, 0.0f, 0.0f};
-
-
-static double calculateCorrelation(const float *a,
-                                   const float *b,
-                                   int windowSize)
-{
-    double correlation = 0.0;
-    double sumProducts = 0.0;
-    double sumSquares = 0.0;
-
-    // Correlate a against b.
-    for (int i = 0; i < windowSize; i++) {
-        float s1 = a[i];
-        float s2 = b[i];
-        // Use a normalized cross-correlation.
-        sumProducts += s1 * s2;
-        sumSquares += ((s1 * s1) + (s2 * s2));
-    }
-
-    if (sumSquares >= 0.00000001) {
-        correlation = (float) (2.0 * sumProducts / sumSquares);
-    }
-    return correlation;
-}
-
-static int calculateCorrelations(const float *haystack, int haystackSize,
-                                 const float *needle, int needleSize,
-                                 float *results, int resultSize)
-{
-    int ic;
-    int maxCorrelations = haystackSize - needleSize;
-    int numCorrelations = std::min(maxCorrelations, resultSize);
-
-    for (ic = 0; ic < numCorrelations; ic++) {
-        double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
-        results[ic] = correlation;
-    }
-
-    return numCorrelations;
-}
-
-/*==========================================================================================*/
-/**
- * Scan until we get a correlation of a single scan that goes over the tolerance level,
- * peaks then drops back down.
- */
-static double findFirstMatch(const float *haystack, int haystackSize,
-                             const float *needle, int needleSize, double threshold  )
-{
-    int ic;
-    // How many correlations can we calculate?
-    int numCorrelations = haystackSize - needleSize;
-    double maxCorrelation = 0.0;
-    int peakIndex = -1;
-    double location = -1.0;
-
-    for (ic = 0; ic < numCorrelations; ic++) {
-        double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
-
-        if( (correlation > maxCorrelation) ) {
-            maxCorrelation = correlation;
-            peakIndex = ic;
-        }
-
-        //printf("PaQa_FindFirstMatch: ic = %4d, correlation = %8f, maxSum = %8f\n",
-        //    ic, correlation, maxSum );
-        // Are we past what we were looking for?
-        if((maxCorrelation > threshold) && (correlation < 0.5 * maxCorrelation)) {
-            location = peakIndex;
-            break;
-        }
-    }
-
-    return location;
-}
-
-typedef struct LatencyReport_s {
-    double latencyInFrames;
-    double confidence;
-} LatencyReport;
-
-// Apply a technique similar to Harmonic Product Spectrum Analysis to find echo fundamental.
-// Using first echo instead of the original impulse for a better match.
-int measureLatencyFromEchos(const float *haystack, int haystackSize,
-                          const float *needle, int needleSize,
-                          LatencyReport *report) {
-    double threshold = 0.1;
-
-    // Find first peak
-    int first = (int) (findFirstMatch(haystack,
-                                      haystackSize,
-                                      needle,
-                                      needleSize,
-                                      threshold) + 0.5);
-
-    // Use first echo as the needle for the other echos because
-    // it will be more similar.
-    needle = &haystack[first];
-    int again = (int) (findFirstMatch(haystack,
-                                      haystackSize,
-                                      needle,
-                                      needleSize,
-                                      threshold) + 0.5);
-
-    printf("first = %d, again at %d\n", first, again);
-    first = again;
-
-    // Allocate results array
-    int remaining = haystackSize - first;
-    int generous = 48000 * 2;
-    int numCorrelations = std::min(remaining, generous);
-    float *correlations = new float[numCorrelations];
-    float *harmonicSums = new float[numCorrelations](); // cleared to zero
-
-    // Generate correlation for every position.
-    numCorrelations = calculateCorrelations(&haystack[first], remaining,
-                                            needle, needleSize,
-                                            correlations, numCorrelations);
-
-    // Add higher harmonics mapped onto lower harmonics.
-    // This reinforces the "fundamental" echo.
-    const int numEchoes = 10;
-    for (int partial = 1; partial < numEchoes; partial++) {
-        for (int i = 0; i < numCorrelations; i++) {
-            harmonicSums[i / partial] += correlations[i] / partial;
-        }
-    }
-
-    // Find highest peak in correlation array.
-    float maxCorrelation = 0.0;
-    float sumOfPeaks = 0.0;
-    int peakIndex = 0;
-    const int skip = MAX_ZEROTH_PARTIAL_BINS; // skip low bins
-    for (int i = skip; i < numCorrelations; i++) {
-        if (harmonicSums[i] > maxCorrelation) {
-            maxCorrelation = harmonicSums[i];
-            sumOfPeaks += maxCorrelation;
-            peakIndex = i;
-            printf("maxCorrelation = %f at %d\n", maxCorrelation, peakIndex);
-        }
-    }
-
-    report->latencyInFrames = peakIndex;
-    if (sumOfPeaks < 0.0001) {
-        report->confidence = 0.0;
-    } else {
-        report->confidence = maxCorrelation / sumOfPeaks;
-    }
-
-    delete[] correlations;
-    delete[] harmonicSums;
-    return 0;
-}
-
-class AudioRecording
-{
-public:
-    AudioRecording() {
-    }
-    ~AudioRecording() {
-        delete[] mData;
-    }
-
-    void allocate(int maxFrames) {
-        delete[] mData;
-        mData = new float[maxFrames];
-        mMaxFrames = maxFrames;
-    }
-
-    // Write SHORT data from the first channel.
-    int write(int16_t *inputData, int inputChannelCount, int numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
-        }
-        return numFrames;
-    }
-
-    // Write FLOAT data from the first channel.
-    int write(float *inputData, int inputChannelCount, int numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount];
-        }
-        return numFrames;
-    }
-
-    int size() {
-        return mFrameCounter;
-    }
-
-    float *getData() {
-        return mData;
-    }
-
-    int save(const char *fileName, bool writeShorts = true) {
-        int written = 0;
-        const int chunkSize = 64;
-        FILE *fid = fopen(fileName, "wb");
-        if (fid == NULL) {
-            return -errno;
-        }
-
-        if (writeShorts) {
-            int16_t buffer[chunkSize];
-            int32_t framesLeft = mFrameCounter;
-            int32_t cursor = 0;
-            while (framesLeft) {
-                int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
-                for (int i = 0; i < framesToWrite; i++) {
-                    buffer[i] = (int16_t) (mData[cursor++] * 32767);
-                }
-                written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
-                framesLeft -= framesToWrite;
-            }
-        } else {
-            written = fwrite(mData, sizeof(float), mFrameCounter, fid);
-        }
-        fclose(fid);
-        return written;
-    }
-
-private:
-    float  *mData = nullptr;
-    int32_t mFrameCounter = 0;
-    int32_t mMaxFrames = 0;
-};
-
-// ====================================================================================
-class LoopbackProcessor {
-public:
-    virtual ~LoopbackProcessor() = default;
-
-    virtual void process(float *inputData, int inputChannelCount,
-                 float *outputData, int outputChannelCount,
-                 int numFrames) = 0;
-
-
-    virtual void report() = 0;
-
-    void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() {
-        return mSampleRate;
-    }
-
-private:
-    int32_t mSampleRate = SAMPLE_RATE;
-};
-
-
-// ====================================================================================
-class EchoAnalyzer : public LoopbackProcessor {
-public:
-
-    EchoAnalyzer() : LoopbackProcessor() {
-        audioRecorder.allocate(NUM_SECONDS * SAMPLE_RATE);
-    }
-
-    void setGain(float gain) {
-        mGain = gain;
-    }
-
-    float getGain() {
-        return mGain;
-    }
-
-    void report() override {
-
-        const float *needle = s_Impulse;
-        int needleSize = (int)(sizeof(s_Impulse) / sizeof(float));
-        float *haystack = audioRecorder.getData();
-        int haystackSize = audioRecorder.size();
-        int result = measureLatencyFromEchos(haystack, haystackSize,
-                                              needle, needleSize,
-                                              &latencyReport);
-        if (latencyReport.confidence < 0.01) {
-            printf(" ERROR - confidence too low = %f\n", latencyReport.confidence);
-        } else {
-            double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
-            printf(RESULT_TAG "latency.frames     = %8.2f\n", latencyReport.latencyInFrames);
-            printf(RESULT_TAG "latency.msec       = %8.2f\n", latencyMillis);
-            printf(RESULT_TAG "latency.confidence = %8.6f\n", latencyReport.confidence);
-        }
-    }
-
-    void process(float *inputData, int inputChannelCount,
-                 float *outputData, int outputChannelCount,
-                 int numFrames) override {
-        int channelsValid = std::min(inputChannelCount, outputChannelCount);
-
-        audioRecorder.write(inputData, inputChannelCount, numFrames);
-
-        if (mLoopCounter < mLoopStart) {
-            // Output silence at the beginning.
-            for (int i = 0; i < numFrames; i++) {
-                int ic;
-                for (ic = 0; ic < outputChannelCount; ic++) {
-                    outputData[ic] = 0;
-                }
-                inputData += inputChannelCount;
-                outputData += outputChannelCount;
-            }
-        } else if (mLoopCounter == mLoopStart) {
-            // Send a bipolar impulse that we can easily detect.
-            for (float sample : s_Impulse) {
-                *outputData = sample;
-                outputData += outputChannelCount;
-            }
-        } else {
-            // Echo input to output.
-            for (int i = 0; i < numFrames; i++) {
-                int ic;
-                for (ic = 0; ic < channelsValid; ic++) {
-                    outputData[ic] = inputData[ic] * mGain;
-                }
-                for (; ic < outputChannelCount; ic++) {
-                    outputData[ic] = 0;
-                }
-                inputData += inputChannelCount;
-                outputData += outputChannelCount;
-            }
-        }
-
-        mLoopCounter++;
-    }
-
-private:
-    int   mLoopCounter = 0;
-    int   mLoopStart = 1000;
-    float mGain = 1.0f;
-
-    AudioRecording     audioRecorder;
-    LatencyReport      latencyReport;
-};
-
-
-// ====================================================================================
-class SineAnalyzer : public LoopbackProcessor {
-public:
-
-    void report() override {
-        double magnitude = calculateMagnitude();
-        printf("sine magnitude = %7.5f\n", magnitude);
-        printf("sine frames    = %7d\n", mFrameCounter);
-        printf("sine frequency = %7.1f Hz\n", mFrequency);
-    }
-
-    double calculateMagnitude(double *phasePtr = NULL) {
-        if (mFrameCounter == 0) {
-            return 0.0;
-        }
-        double sinMean = mSinAccumulator / mFrameCounter;
-        double cosMean = mCosAccumulator / mFrameCounter;
-        double magnitude = 2.0 * sqrt( (sinMean * sinMean) + (cosMean * cosMean ));
-        if( phasePtr != NULL )
-        {
-            double phase = atan2( sinMean, cosMean );
-            *phasePtr = phase;
-        }
-        return magnitude;
-    }
-
-    void process(float *inputData, int inputChannelCount,
-                 float *outputData, int outputChannelCount,
-                 int numFrames) override {
-        double phaseIncrement = 2.0 * M_PI * mFrequency / getSampleRate();
-
-        for (int i = 0; i < numFrames; i++) {
-            // Multiply input by sine/cosine
-            float sample = inputData[i * inputChannelCount];
-            float sinOut = sinf(mPhase);
-            mSinAccumulator += sample * sinOut;
-            mCosAccumulator += sample * cosf(mPhase);
-            // Advance and wrap phase
-            mPhase += phaseIncrement;
-            if (mPhase > (2.0 * M_PI)) {
-                mPhase -= (2.0 * M_PI);
-            }
-
-            // Output sine wave so we can measure it.
-            outputData[i * outputChannelCount] = sinOut;
-        }
-        mFrameCounter += numFrames;
-
-        double magnitude = calculateMagnitude();
-        if (mWaiting) {
-            if (magnitude < 0.001) {
-                // discard silence
-                mFrameCounter = 0;
-                mSinAccumulator = 0.0;
-                mCosAccumulator = 0.0;
-            } else {
-                mWaiting = false;
-            }
-        }
-    };
-
-    void setFrequency(int32_t frequency) {
-        mFrequency = frequency;
-    }
-
-    int32_t getFrequency() {
-        return mFrequency;
-    }
-
-private:
-    double  mFrequency = 300.0;
-    double  mPhase = 0.0;
-    int32_t mFrameCounter = 0;
-    double  mSinAccumulator = 0.0;
-    double  mCosAccumulator = 0.0;
-    bool    mWaiting = true;
-};
+#define APP_VERSION             "0.1.22"
 
-// TODO make this a class that manages its own buffer allocation
 struct LoopbackData {
     AAudioStream      *inputStream = nullptr;
     int32_t            inputFramesMaximum = 0;
     int16_t           *inputData = nullptr;
+    int16_t            peakShort = 0;
     float             *conversionBuffer = nullptr;
     int32_t            actualInputChannelCount = 0;
     int32_t            actualOutputChannelCount = 0;
     int32_t            inputBuffersToDiscard = 10;
+    int32_t            minNumFrames = INT32_MAX;
+    int32_t            maxNumFrames = 0;
+    bool               isDone = false;
 
-    aaudio_result_t    inputError;
+    aaudio_result_t    inputError = AAUDIO_OK;
+    aaudio_result_t    outputError = AAUDIO_OK;
+
     SineAnalyzer       sineAnalyzer;
     EchoAnalyzer       echoAnalyzer;
+    AudioRecording     audioRecorder;
     LoopbackProcessor *loopbackProcessor;
 };
 
@@ -517,6 +94,13 @@
         return AAUDIO_CALLBACK_RESULT_STOP;
     }
 
+    if (numFrames > myData->maxNumFrames) {
+        myData->maxNumFrames = numFrames;
+    }
+    if (numFrames < myData->minNumFrames) {
+        myData->minNumFrames = numFrames;
+    }
+
     if (myData->inputBuffersToDiscard > 0) {
         // Drain the input.
         do {
@@ -524,6 +108,7 @@
                                        numFrames, 0);
             if (framesRead < 0) {
                 myData->inputError = framesRead;
+                printf("ERROR in read = %d", framesRead);
                 result = AAUDIO_CALLBACK_RESULT_STOP;
             } else if (framesRead > 0) {
                 myData->inputBuffersToDiscard--;
@@ -534,9 +119,14 @@
                                        numFrames, 0);
         if (framesRead < 0) {
             myData->inputError = framesRead;
+            printf("ERROR in read = %d", framesRead);
             result = AAUDIO_CALLBACK_RESULT_STOP;
         } else if (framesRead > 0) {
 
+            myData->audioRecorder.write(myData->inputData,
+                                        myData->actualInputChannelCount,
+                                        numFrames);
+
             int32_t numSamples = framesRead * myData->actualInputChannelCount;
             convertPcm16ToFloat(myData->inputData, myData->conversionBuffer, numSamples);
 
@@ -545,12 +135,25 @@
                                               outputData,
                                               myData->actualOutputChannelCount,
                                               framesRead);
+            myData->isDone = myData->loopbackProcessor->isDone();
+            if (myData->isDone) {
+                result = AAUDIO_CALLBACK_RESULT_STOP;
+            }
         }
     }
 
     return result;
 }
 
+static void MyErrorCallbackProc(
+        AAudioStream *stream __unused,
+        void *userData __unused,
+        aaudio_result_t error)
+{
+    printf("Error Callback, error: %d\n",(int)error);
+    LoopbackData *myData = (LoopbackData *) userData;
+    myData->outputError = error;
+}
 
 static void usage() {
     printf("loopback: -n{numBursts} -p{outPerf} -P{inPerf} -t{test} -g{gain} -f{freq}\n");
@@ -571,7 +174,7 @@
 }
 
 static aaudio_performance_mode_t parsePerformanceMode(char c) {
-    aaudio_performance_mode_t mode = AAUDIO_PERFORMANCE_MODE_NONE;
+    aaudio_performance_mode_t mode = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
     c = tolower(c);
     switch (c) {
         case 'n':
@@ -612,86 +215,117 @@
     return testMode;
 }
 
+void printAudioGraph(AudioRecording &recording, int numSamples) {
+    int32_t start = recording.size() / 2;
+    int32_t end = start + numSamples;
+    if (end >= recording.size()) {
+        end = recording.size() - 1;
+    }
+    float *data = recording.getData();
+    // Normalize data so we can see it better.
+    float maxSample = 0.01;
+    for (int32_t i = start; i < end; i++) {
+        float samplePos = fabs(data[i]);
+        if (samplePos > maxSample) {
+            maxSample = samplePos;
+        }
+    }
+    float gain = 0.98f / maxSample;
+    for (int32_t i = start; i < end; i++) {
+        float sample = data[i];
+        printf("%5.3f ", sample); // actual value
+        sample *= gain;
+        printAudioScope(sample);
+    }
+}
+
+
 // ====================================================================================
 // TODO break up this large main() function into smaller functions
 int main(int argc, const char **argv)
 {
-    aaudio_result_t result = AAUDIO_OK;
-    LoopbackData loopbackData;
-    AAudioStream *outputStream = nullptr;
 
-    int requestedInputChannelCount = NUM_INPUT_CHANNELS;
-    const int requestedOutputChannelCount = AAUDIO_UNSPECIFIED;
-    const int requestedSampleRate = SAMPLE_RATE;
-    int actualSampleRate = 0;
+    AAudioArgsParser     argParser;
+    AAudioSimplePlayer   player;
+    AAudioSimpleRecorder recorder;
+    LoopbackData         loopbackData;
+    AAudioStream        *outputStream = nullptr;
+
+    aaudio_result_t      result = AAUDIO_OK;
+    aaudio_sharing_mode_t requestedInputSharingMode     = AAUDIO_SHARING_MODE_SHARED;
+    int                   requestedInputChannelCount = NUM_INPUT_CHANNELS;
+    const int             requestedOutputChannelCount = AAUDIO_UNSPECIFIED;
+    int                   actualSampleRate = 0;
     const aaudio_format_t requestedInputFormat = AAUDIO_FORMAT_PCM_I16;
     const aaudio_format_t requestedOutputFormat = AAUDIO_FORMAT_PCM_FLOAT;
-    aaudio_format_t actualInputFormat;
-    aaudio_format_t actualOutputFormat;
+    aaudio_format_t       actualInputFormat;
+    aaudio_format_t       actualOutputFormat;
+    aaudio_performance_mode_t outputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+    aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+
     int testMode = TEST_ECHO_LATENCY;
-    double frequency = 1000.0;
     double gain = 1.0;
 
-    const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
-    //const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_SHARED;
-    aaudio_sharing_mode_t       actualSharingMode;
-
-    AAudioStreamBuilder  *builder = nullptr;
     aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNINITIALIZED;
     int32_t framesPerBurst = 0;
     float *outputData = NULL;
     double deviation;
     double latency;
-    aaudio_performance_mode_t outputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
-    aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
-
     int32_t burstsPerBuffer = 1; // single buffered
 
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, NULL, _IONBF, (size_t) 0);
+
+    printf("%s - Audio loopback using AAudio V" APP_VERSION "\n", argv[0]);
+
     for (int i = 1; i < argc; i++) {
         const char *arg = argv[i];
-        if (arg[0] == '-') {
-            char option = arg[1];
-            switch (option) {
-                case 'c':
-                    requestedInputChannelCount = atoi(&arg[2]);
-                    break;
-                case 'f':
-                    frequency = atof(&arg[2]);
-                    break;
-                case 'g':
-                    gain = atof(&arg[2]);
-                    break;
-                case 'm':
-                    AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
-                    break;
-                case 'n':
-                    burstsPerBuffer = atoi(&arg[2]);
-                    break;
-                case 'p':
-                    outputPerformanceLevel = parsePerformanceMode(arg[2]);
-                    break;
-                case 'P':
-                    inputPerformanceLevel = parsePerformanceMode(arg[2]);
-                    break;
-                case 't':
-                    testMode = parseTestMode(arg[2]);
-                    break;
-                default:
-                    usage();
-                    exit(0);
-                    break;
+        if (argParser.parseArg(arg)) {
+            // Handle options that are not handled by the ArgParser
+            if (arg[0] == '-') {
+                char option = arg[1];
+                switch (option) {
+                    case 'C':
+                        requestedInputChannelCount = atoi(&arg[2]);
+                        break;
+                    case 'g':
+                        gain = atof(&arg[2]);
+                        break;
+                    case 'P':
+                        inputPerformanceLevel = parsePerformanceMode(arg[2]);
+                        break;
+                    case 'X':
+                        requestedInputSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
+                        break;
+                    case 't':
+                        testMode = parseTestMode(arg[2]);
+                        break;
+                    default:
+                        usage();
+                        exit(EXIT_FAILURE);
+                        break;
+                }
+            } else {
+                usage();
+                exit(EXIT_FAILURE);
+                break;
             }
-        } else {
-            usage();
-            exit(0);
-            break;
         }
+
     }
 
+    if (inputPerformanceLevel < 0) {
+        printf("illegal inputPerformanceLevel = %d\n", inputPerformanceLevel);
+        exit(EXIT_FAILURE);
+    }
+
+    int32_t requestedDuration = argParser.getDurationSeconds();
+    int32_t recordingDuration = std::min(60, requestedDuration);
+    loopbackData.audioRecorder.allocate(recordingDuration * SAMPLE_RATE);
 
     switch(testMode) {
         case TEST_SINE_MAGNITUDE:
-            loopbackData.sineAnalyzer.setFrequency(frequency);
             loopbackData.loopbackProcessor = &loopbackData.sineAnalyzer;
             break;
         case TEST_ECHO_LATENCY:
@@ -703,106 +337,44 @@
             break;
     }
 
-    // Make printf print immediately so that debug info is not stuck
-    // in a buffer if we hang or crash.
-    setvbuf(stdout, NULL, _IONBF, (size_t) 0);
-
-    printf("%s - Audio loopback using AAudio\n", argv[0]);
-
-    // Use an AAudioStreamBuilder to contain requested parameters.
-    result = AAudio_createStreamBuilder(&builder);
-    if (result < 0) {
-        goto finish;
-    }
-
-    // Request common stream properties.
-    AAudioStreamBuilder_setSampleRate(builder, requestedSampleRate);
-    AAudioStreamBuilder_setFormat(builder, requestedInputFormat);
-    AAudioStreamBuilder_setSharingMode(builder, requestedSharingMode);
-
-    // Open the input stream.
-    AAudioStreamBuilder_setDirection(builder, AAUDIO_DIRECTION_INPUT);
-    AAudioStreamBuilder_setPerformanceMode(builder, inputPerformanceLevel);
-    AAudioStreamBuilder_setChannelCount(builder, requestedInputChannelCount);
-
-    result = AAudioStreamBuilder_openStream(builder, &loopbackData.inputStream);
-    printf("AAudioStreamBuilder_openStream(input) returned %d = %s\n",
-           result, AAudio_convertResultToText(result));
-    if (result < 0) {
-        goto finish;
-    }
-
-    // Create an output stream using the Builder.
-    AAudioStreamBuilder_setDirection(builder, AAUDIO_DIRECTION_OUTPUT);
-    AAudioStreamBuilder_setFormat(builder, requestedOutputFormat);
-    AAudioStreamBuilder_setPerformanceMode(builder, outputPerformanceLevel);
-    AAudioStreamBuilder_setChannelCount(builder, requestedOutputChannelCount);
-    AAudioStreamBuilder_setDataCallback(builder, MyDataCallbackProc, &loopbackData);
-
-    result = AAudioStreamBuilder_openStream(builder, &outputStream);
-    printf("AAudioStreamBuilder_openStream(output) returned %d = %s\n",
-           result, AAudio_convertResultToText(result));
+    printf("OUTPUT stream ----------------------------------------\n");
+    argParser.setFormat(requestedOutputFormat);
+    result = player.open(argParser, MyDataCallbackProc, MyErrorCallbackProc, &loopbackData);
     if (result != AAUDIO_OK) {
+        fprintf(stderr, "ERROR -  player.open() returned %d\n", result);
         goto finish;
     }
+    outputStream = player.getStream();
+    argParser.compareWithStream(outputStream);
 
-    printf("Stream INPUT ---------------------\n");
-    loopbackData.actualInputChannelCount = AAudioStream_getChannelCount(loopbackData.inputStream);
-    printf("    channelCount: requested = %d, actual = %d\n", requestedInputChannelCount,
-           loopbackData.actualInputChannelCount);
-    printf("    framesPerBurst = %d\n", AAudioStream_getFramesPerBurst(loopbackData.inputStream));
-    printf("    bufferSize     = %d\n",
-           AAudioStream_getBufferSizeInFrames(loopbackData.inputStream));
-    printf("    bufferCapacity = %d\n",
-           AAudioStream_getBufferCapacityInFrames(loopbackData.inputStream));
+    actualOutputFormat = AAudioStream_getFormat(outputStream);
+    assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
 
-    actualSharingMode = AAudioStream_getSharingMode(loopbackData.inputStream);
-    printf("    sharingMode: requested = %d, actual = %d\n",
-           requestedSharingMode, actualSharingMode);
-
-    actualInputFormat = AAudioStream_getFormat(loopbackData.inputStream);
-    printf("    dataFormat: requested = %d, actual = %d\n",
-           requestedInputFormat, actualInputFormat);
-    assert(actualInputFormat == AAUDIO_FORMAT_PCM_I16);
-
-    printf("    is MMAP used?         = %s\n", AAudioStream_isMMapUsed(loopbackData.inputStream)
-                                               ? "yes" : "no");
-
-
-    printf("Stream OUTPUT ---------------------\n");
-    // Check to see what kind of stream we actually got.
-    actualSampleRate = AAudioStream_getSampleRate(outputStream);
-    printf("    sampleRate: requested = %d, actual = %d\n", requestedSampleRate, actualSampleRate);
-    loopbackData.echoAnalyzer.setSampleRate(actualSampleRate);
-
-    loopbackData.actualOutputChannelCount = AAudioStream_getChannelCount(outputStream);
-    printf("    channelCount: requested = %d, actual = %d\n", requestedOutputChannelCount,
-           loopbackData.actualOutputChannelCount);
-
-    actualSharingMode = AAudioStream_getSharingMode(outputStream);
-    printf("    sharingMode: requested = %d, actual = %d\n",
-           requestedSharingMode, actualSharingMode);
+    printf("INPUT stream ----------------------------------------\n");
+    // Use different parameters for the input.
+    argParser.setNumberOfBursts(AAUDIO_UNSPECIFIED);
+    argParser.setFormat(requestedInputFormat);
+    argParser.setPerformanceMode(inputPerformanceLevel);
+    argParser.setChannelCount(requestedInputChannelCount);
+    argParser.setSharingMode(requestedInputSharingMode);
+    result = recorder.open(argParser);
+    if (result != AAUDIO_OK) {
+        fprintf(stderr, "ERROR -  recorder.open() returned %d\n", result);
+        goto finish;
+    }
+    loopbackData.inputStream = recorder.getStream();
+    argParser.compareWithStream(loopbackData.inputStream);
 
     // This is the number of frames that are read in one chunk by a DMA controller
     // or a DSP or a mixer.
     framesPerBurst = AAudioStream_getFramesPerBurst(outputStream);
-    printf("    framesPerBurst = %d\n", framesPerBurst);
 
-    result = AAudioStream_setBufferSizeInFrames(outputStream, burstsPerBuffer * framesPerBurst);
-    if (result < 0) { // may be positive buffer size
-        fprintf(stderr, "ERROR - AAudioStream_setBufferSize() returned %d\n", result);
-        goto finish;
-    }
-    printf("    bufferSize     = %d\n", AAudioStream_getBufferSizeInFrames(outputStream));
-    printf("    bufferCapacity = %d\n", AAudioStream_getBufferCapacityInFrames(outputStream));
+    actualInputFormat = AAudioStream_getFormat(outputStream);
+    assert(actualInputFormat == AAUDIO_FORMAT_PCM_I16);
 
-    actualOutputFormat = AAudioStream_getFormat(outputStream);
-    printf("    dataFormat: requested = %d, actual = %d\n",
-           requestedOutputFormat, actualOutputFormat);
-    assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
 
-    printf("    is MMAP used?         = %s\n", AAudioStream_isMMapUsed(outputStream)
-                                               ? "yes" : "no");
+    loopbackData.actualInputChannelCount = recorder.getChannelCount();
+    loopbackData.actualOutputChannelCount = player.getChannelCount();
 
     // Allocate a buffer for the audio data.
     loopbackData.inputFramesMaximum = 32 * framesPerBurst;
@@ -813,49 +385,82 @@
     loopbackData.conversionBuffer = new float[loopbackData.inputFramesMaximum *
                                               loopbackData.actualInputChannelCount];
 
+    loopbackData.loopbackProcessor->reset();
 
-    // Start output first so input stream runs low.
-    result = AAudioStream_requestStart(outputStream);
+    result = recorder.start();
     if (result != AAUDIO_OK) {
-        fprintf(stderr, "ERROR - AAudioStream_requestStart(output) returned %d = %s\n",
-                result, AAudio_convertResultToText(result));
+        printf("ERROR - AAudioStream_requestStart(input) returned %d = %s\n",
+               result, AAudio_convertResultToText(result));
         goto finish;
     }
 
-    result = AAudioStream_requestStart(loopbackData.inputStream);
+    result = player.start();
     if (result != AAUDIO_OK) {
-        fprintf(stderr, "ERROR - AAudioStream_requestStart(input) returned %d = %s\n",
-                result, AAudio_convertResultToText(result));
+        printf("ERROR - AAudioStream_requestStart(output) returned %d = %s\n",
+               result, AAudio_convertResultToText(result));
         goto finish;
     }
 
     printf("------- sleep while the callback runs --------------\n");
     fflush(stdout);
-    sleep(NUM_SECONDS);
+    for (int i = requestedDuration; i > 0 ; i--) {
+        if (loopbackData.inputError != AAUDIO_OK) {
+            printf("  ERROR on input stream\n");
+            break;
+        } else if (loopbackData.outputError != AAUDIO_OK) {
+                printf("  ERROR on output stream\n");
+                break;
+        } else if (loopbackData.isDone) {
+                printf("  test says it is done!\n");
+                break;
+        } else {
+            sleep(1);
+            printf("%4d: ", i);
+            loopbackData.loopbackProcessor->printStatus();
 
+            int64_t inputFramesWritten = AAudioStream_getFramesWritten(loopbackData.inputStream);
+            int64_t inputFramesRead = AAudioStream_getFramesRead(loopbackData.inputStream);
+            int64_t outputFramesWritten = AAudioStream_getFramesWritten(outputStream);
+            int64_t outputFramesRead = AAudioStream_getFramesRead(outputStream);
+            printf(" INPUT: wr %lld rd %lld state %s, OUTPUT: wr %lld rd %lld state %s, xruns %d\n",
+                   (long long) inputFramesWritten,
+                   (long long) inputFramesRead,
+                   AAudio_convertStreamStateToText(AAudioStream_getState(loopbackData.inputStream)),
+                   (long long) outputFramesWritten,
+                   (long long) outputFramesRead,
+                   AAudio_convertStreamStateToText(AAudioStream_getState(outputStream)),
+                   AAudioStream_getXRunCount(outputStream)
+            );
+        }
+    }
 
     printf("input error = %d = %s\n",
                 loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
 
     printf("AAudioStream_getXRunCount %d\n", AAudioStream_getXRunCount(outputStream));
-    printf("framesRead    = %d\n", (int) AAudioStream_getFramesRead(outputStream));
-    printf("framesWritten = %d\n", (int) AAudioStream_getFramesWritten(outputStream));
+    printf("framesRead    = %8d\n", (int) AAudioStream_getFramesRead(outputStream));
+    printf("framesWritten = %8d\n", (int) AAudioStream_getFramesWritten(outputStream));
+    printf("min numFrames = %8d\n", (int) loopbackData.minNumFrames);
+    printf("max numFrames = %8d\n", (int) loopbackData.maxNumFrames);
 
-    loopbackData.loopbackProcessor->report();
+    if (loopbackData.inputError == AAUDIO_OK) {
+        if (testMode == TEST_SINE_MAGNITUDE) {
+            printAudioGraph(loopbackData.audioRecorder, 200);
+        }
+        loopbackData.loopbackProcessor->report();
+    }
 
-//    {
-//        int written = loopbackData.audioRecorder.save(FILENAME);
-//        printf("wrote %d mono samples to %s on Android device\n", written, FILENAME);
-//    }
-
+    {
+        int written = loopbackData.audioRecorder.save(FILENAME);
+        printf("main() wrote %d mono samples to %s on Android device\n", written, FILENAME);
+    }
 
 finish:
-    AAudioStream_close(outputStream);
-    AAudioStream_close(loopbackData.inputStream);
+    player.close();
+    recorder.close();
     delete[] loopbackData.conversionBuffer;
     delete[] loopbackData.inputData;
     delete[] outputData;
-    AAudioStreamBuilder_delete(builder);
 
     printf(RESULT_TAG "error = %d = %s\n", result, AAudio_convertResultToText(result));
     if ((result != AAUDIO_OK)) {
diff --git a/media/libaaudio/examples/loopback/src/loopback.sh b/media/libaaudio/examples/loopback/src/loopback.sh
new file mode 100644
index 0000000..bc63125
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/loopback.sh
@@ -0,0 +1,14 @@
+#!/system/bin/sh
+# Run a loopback test in the background after a delay.
+# To run the script enter:
+#    adb shell "nohup sh /data/loopback.sh &"
+
+SLEEP_TIME=10
+TEST_COMMAND="aaudio_loopback -pl -Pl -C1 -n2 -m2 -tm -d5"
+
+echo "Plug in USB Mir and Fun Plug."
+echo "Test will start in ${SLEEP_TIME} seconds: ${TEST_COMMAND}"
+sleep ${SLEEP_TIME}
+date > /data/loopreport.txt
+${TEST_COMMAND} >> /data/loopreport.txt
+date >> /data/loopreport.txt
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index 54217a5..30c3ccd 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -24,7 +24,8 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
-#include <AAudioExampleUtils.h>
+
+#include "AAudioExampleUtils.h"
 
 // TODO use this as a base class within AAudio
 class AAudioParameters {
@@ -121,7 +122,7 @@
     aaudio_sharing_mode_t      mSharingMode     = AAUDIO_SHARING_MODE_SHARED;
     aaudio_performance_mode_t  mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
 
-    int32_t                    mNumberOfBursts = AAUDIO_UNSPECIFIED;
+    int32_t                    mNumberOfBursts  = AAUDIO_UNSPECIFIED;
 };
 
 class AAudioArgsParser : public AAudioParameters {
@@ -151,9 +152,13 @@
                 case 'd':
                     mDurationSeconds = atoi(&arg[2]);
                     break;
-                case 'm':
-                    AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
-                    break;
+                case 'm': {
+                    aaudio_policy_t policy = AAUDIO_POLICY_AUTO;
+                    if (strlen(arg) > 2) {
+                        policy = atoi(&arg[2]);
+                    }
+                    AAudio_setMMapPolicy(policy);
+                } break;
                 case 'n':
                     setNumberOfBursts(atoi(&arg[2]));
                     break;
@@ -198,7 +203,11 @@
         printf("      -b{bufferCapacity} frames\n");
         printf("      -c{channels} for example 2 for stereo\n");
         printf("      -d{duration} in seconds, default is %d\n", DEFAULT_DURATION_SECONDS);
-        printf("      -m enable MMAP\n");
+        printf("      -m{0|1|2|3} set MMAP policy\n");
+        printf("          0 = _UNSPECIFIED, default\n");
+        printf("          1 = _NEVER\n");
+        printf("          2 = _AUTO, also if -m is used with no number\n");
+        printf("          3 = _ALWAYS\n");
         printf("      -n{numberOfBursts} for setBufferSize\n");
         printf("      -p{performanceMode} set output AAUDIO_PERFORMANCE_MODE*, default NONE\n");
         printf("          n for _NONE\n");
@@ -231,7 +240,7 @@
      * Print stream parameters in comparison with requested values.
      * @param stream
      */
-    void compareWithStream(AAudioStream *stream) {
+    void compareWithStream(AAudioStream *stream) const {
 
         printf("  DeviceId:     requested = %d, actual = %d\n",
                getDeviceId(), AAudioStream_getDeviceId(stream));
diff --git a/media/libaaudio/examples/utils/AAudioExampleUtils.h b/media/libaaudio/examples/utils/AAudioExampleUtils.h
index 66de25f..6cbcc58 100644
--- a/media/libaaudio/examples/utils/AAudioExampleUtils.h
+++ b/media/libaaudio/examples/utils/AAudioExampleUtils.h
@@ -25,7 +25,7 @@
 #define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
 #define NANOS_PER_SECOND      (NANOS_PER_MILLISECOND * 1000)
 
-static const char *getSharingModeText(aaudio_sharing_mode_t mode) {
+const char *getSharingModeText(aaudio_sharing_mode_t mode) {
     const char *modeText = "unknown";
     switch (mode) {
     case AAUDIO_SHARING_MODE_EXCLUSIVE:
@@ -49,7 +49,7 @@
     return (time.tv_sec * NANOS_PER_SECOND) + time.tv_nsec;
 }
 
-void displayPeakLevel(float peakLevel) {
+static void displayPeakLevel(float peakLevel) {
     printf("%5.3f ", peakLevel);
     const int maxStars = 50; // arbitrary, fits on one line
     int numStars = (int) (peakLevel * maxStars);
@@ -59,4 +59,24 @@
     printf("\n");
 }
 
+/**
+ * @param position1 position of hardware frame
+ * @param nanoseconds1
+ * @param position2 position of client read/write
+ * @param nanoseconds2
+ * @param sampleRate
+ * @return latency in milliseconds
+ */
+static double calculateLatencyMillis(int64_t position1, int64_t nanoseconds1,
+                              int64_t position2, int64_t nanoseconds2,
+                              int64_t sampleRate) {
+    int64_t deltaFrames = position2 - position1;
+    int64_t deltaTime =
+            (NANOS_PER_SECOND * deltaFrames / sampleRate);
+    int64_t timeCurrentFramePlayed = nanoseconds1 + deltaTime;
+    int64_t latencyNanos = timeCurrentFramePlayed - nanoseconds2;
+    double latencyMillis = latencyNanos / 1000000.0;
+    return latencyMillis;
+}
+
 #endif // AAUDIO_EXAMPLE_UTILS_H
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 19f8aff..cc0cb34 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -30,6 +30,11 @@
 #define SHARING_MODE  AAUDIO_SHARING_MODE_SHARED
 #define PERFORMANCE_MODE AAUDIO_PERFORMANCE_MODE_NONE
 
+// Arbitrary period for glitches, once per second at 48000 Hz.
+#define FORCED_UNDERRUN_PERIOD_FRAMES    48000
+// How long to sleep in a callback to cause an intentional glitch. For testing.
+#define FORCED_UNDERRUN_SLEEP_MICROS     (10 * 1000)
+
 /**
  * Simple wrapper for AAudio that opens an output stream either in callback or blocking write mode.
  */
@@ -219,8 +224,13 @@
 typedef struct SineThreadedData_s {
     SineGenerator  sineOsc1;
     SineGenerator  sineOsc2;
+    int64_t        framesTotal = 0;
+    int64_t        nextFrameToGlitch = FORCED_UNDERRUN_PERIOD_FRAMES;
+    int32_t        minNumFrames = INT32_MAX;
+    int32_t        maxNumFrames = 0;
     int            scheduler;
-    bool           schedulerChecked;
+    bool           schedulerChecked = false;
+    bool           forceUnderruns = false;
 } SineThreadedData_t;
 
 // Callback function that fills the audio output buffer.
@@ -233,16 +243,33 @@
 
     // should not happen but just in case...
     if (userData == nullptr) {
-        fprintf(stderr, "ERROR - SimplePlayerDataCallbackProc needs userData\n");
+        printf("ERROR - SimplePlayerDataCallbackProc needs userData\n");
         return AAUDIO_CALLBACK_RESULT_STOP;
     }
     SineThreadedData_t *sineData = (SineThreadedData_t *) userData;
 
+    sineData->framesTotal += numFrames;
+
+    if (sineData->forceUnderruns) {
+        if (sineData->framesTotal > sineData->nextFrameToGlitch) {
+            usleep(FORCED_UNDERRUN_SLEEP_MICROS);
+            printf("Simulate glitch at %lld\n", (long long) sineData->framesTotal);
+            sineData->nextFrameToGlitch += FORCED_UNDERRUN_PERIOD_FRAMES;
+        }
+    }
+
     if (!sineData->schedulerChecked) {
         sineData->scheduler = sched_getscheduler(gettid());
         sineData->schedulerChecked = true;
     }
 
+    if (numFrames > sineData->maxNumFrames) {
+        sineData->maxNumFrames = numFrames;
+    }
+    if (numFrames < sineData->minNumFrames) {
+        sineData->minNumFrames = numFrames;
+    }
+
     int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
     // This code only plays on the first one or two channels.
     // TODO Support arbitrary number of channels.
diff --git a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
index 6be9112..1344273 100644
--- a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
+++ b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
@@ -73,12 +73,20 @@
     /**
      * Only call this after open() has been called.
      */
-    int32_t getSamplesPerFrame() {
+    int32_t getChannelCount() {
         if (mStream == nullptr) {
             return AAUDIO_ERROR_INVALID_STATE;
         }
         return AAudioStream_getChannelCount(mStream);;
     }
+
+    /**
+     * @deprecated use getChannelCount()
+     */
+    int32_t getSamplesPerFrame() {
+        return getChannelCount();
+    }
+
     /**
      * Only call this after open() has been called.
      */
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index 2211b72..b5602e9 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -26,9 +26,7 @@
 #include <aaudio/AAudio.h>
 #include "AAudioExampleUtils.h"
 #include "AAudioSimplePlayer.h"
-
-// Application data that gets passed to the callback.
-#define MAX_FRAME_COUNT_RECORDS    256
+#include "../../utils/AAudioSimplePlayer.h"
 
 int main(int argc, const char **argv)
 {
@@ -42,9 +40,10 @@
     // in a buffer if we hang or crash.
     setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
 
-    printf("%s - Play a sine sweep using an AAudio callback V0.1.1\n", argv[0]);
+    printf("%s - Play a sine sweep using an AAudio callback V0.1.2\n", argv[0]);
 
     myData.schedulerChecked = false;
+    myData.forceUnderruns = false; // set true to test AAudioStream_getXRunCount()
 
     if (argParser.parseArgs(argc, argv)) {
         return EXIT_FAILURE;
@@ -99,7 +98,10 @@
             printf("Stream state is %d %s!\n", state, AAudio_convertStreamStateToText(state));
             break;
         }
-        printf("framesWritten = %d\n", (int) AAudioStream_getFramesWritten(player.getStream()));
+        printf("framesWritten = %d, underruns = %d\n",
+               (int) AAudioStream_getFramesWritten(player.getStream()),
+               (int) AAudioStream_getXRunCount(player.getStream())
+        );
     }
     printf("Woke up now.\n");
 
@@ -120,6 +122,9 @@
                SCHED_FIFO);
     }
 
+    printf("min numFrames = %8d\n", (int) myData.minNumFrames);
+    printf("max numFrames = %8d\n", (int) myData.maxNumFrames);
+
     printf("SUCCESS\n");
     return EXIT_SUCCESS;
 error:
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index e5f0deb..3c23736 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -234,6 +234,15 @@
                                                    int32_t channelCount);
 
 /**
+ * Identical to AAudioStreamBuilder_setChannelCount().
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param samplesPerFrame Number of samples in a frame.
+ */
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+                                                       int32_t samplesPerFrame);
+
+/**
  * Request a sample data format, for example AAUDIO_FORMAT_PCM_I16.
  *
  * The default, if you do not call this function, is AAUDIO_UNSPECIFIED.
@@ -721,6 +730,14 @@
 AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream);
 
 /**
+ * Identical to AAudioStream_getChannelCount().
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual number of samples frame
+ */
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream);
+
+/**
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual device ID
  */
@@ -793,7 +810,7 @@
  * The position and time passed back are monotonically increasing.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @param clockid AAUDIO_CLOCK_MONOTONIC or AAUDIO_CLOCK_BOOTTIME
+ * @param clockid CLOCK_MONOTONIC or CLOCK_BOOTTIME
  * @param framePosition pointer to a variable to receive the position
  * @param timeNanoseconds pointer to a variable to receive the time
  * @return AAUDIO_OK or a negative error
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index b9012e5..2ba5250 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -11,6 +11,7 @@
     AAudioStreamBuilder_setErrorCallback;
     AAudioStreamBuilder_setFramesPerDataCallback;
     AAudioStreamBuilder_setSampleRate;
+    AAudioStreamBuilder_setSamplesPerFrame;
     AAudioStreamBuilder_setChannelCount;
     AAudioStreamBuilder_setFormat;
     AAudioStreamBuilder_setSharingMode;
@@ -34,6 +35,7 @@
     AAudioStream_getBufferCapacityInFrames;
     AAudioStream_getXRunCount;
     AAudioStream_getSampleRate;
+    AAudioStream_getSamplesPerFrame;
     AAudioStream_getChannelCount;
     AAudioStream_getPerformanceMode;
     AAudioStream_getDeviceId;
diff --git a/media/libaaudio/src/binding/AAudioServiceMessage.h b/media/libaaudio/src/binding/AAudioServiceMessage.h
index b4377fb..54e8001 100644
--- a/media/libaaudio/src/binding/AAudioServiceMessage.h
+++ b/media/libaaudio/src/binding/AAudioServiceMessage.h
@@ -28,7 +28,6 @@
 // Used to send information about the HAL to the client.
 struct AAudioMessageTimestamp {
     int64_t position;     // number of frames transferred so far
-    int64_t deviceOffset; // add to client position to get device position
     int64_t timestamp;    // time when that position was reached
 };
 
@@ -51,7 +50,8 @@
 typedef struct AAudioServiceMessage_s {
     enum class code : uint32_t {
         NOTHING,
-        TIMESTAMP,
+        TIMESTAMP_SERVICE, // when frame is read or written by the service to the client
+        TIMESTAMP_HARDWARE, // when frame is at DAC or ADC
         EVENT,
     };
 
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 899eb04..b582b99 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -64,9 +64,9 @@
     if (mSizeInBytes > 0) {
         // Keep the original FD until you are done with the mFd.
         // If you close it in here then it will prevent mFd from working.
-        mOriginalFd = parcel->readFileDescriptor();
-        ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mOriginalFd = %d\n", mOriginalFd);
-        mFd = fcntl(mOriginalFd, F_DUPFD_CLOEXEC, 0);
+        int originalFd = parcel->readFileDescriptor();
+        ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? originalFd = %d\n", originalFd);
+        mFd = fcntl(originalFd, F_DUPFD_CLOEXEC, 0);
         ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mFd = %d\n", mFd);
         if (mFd == -1) {
             status = -errno;
@@ -87,14 +87,13 @@
     }
     if (mFd != -1) {
         ALOGV("SharedMemoryParcelable::close() LEAK? mFd = %d\n", mFd);
-        ::close(mFd);
+        if(::close(mFd) < 0) {
+            int err = errno;
+            ALOGE("SharedMemoryParcelable close failed for fd = %d, errno = %d (%s)",
+                  mFd, err, strerror(err));
+        }
         mFd = -1;
     }
-    if (mOriginalFd != -1) {
-        ALOGV("SharedMemoryParcelable::close() LEAK? mOriginalFd = %d\n", mOriginalFd);
-        ::close(mOriginalFd);
-        mOriginalFd = -1;
-    }
     return AAUDIO_OK;
 }
 
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4b94b46..c5107d3 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -68,7 +68,6 @@
 #define MMAP_UNRESOLVED_ADDRESS    reinterpret_cast<uint8_t*>(MAP_FAILED)
 
     int      mFd = -1;
-    int      mOriginalFd = -1;
     int32_t  mSizeInBytes = 0;
     uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
 };
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 0684ed6..6ae5379 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -113,7 +113,8 @@
     return result;
 }
 
-aaudio_result_t AudioEndpoint::configure(const EndpointDescriptor *pEndpointDescriptor)
+aaudio_result_t AudioEndpoint::configure(const EndpointDescriptor *pEndpointDescriptor,
+                                         aaudio_direction_t   direction)
 {
     aaudio_result_t result = AudioEndpoint_validateDescriptor(pEndpointDescriptor);
     if (result != AAUDIO_OK) {
@@ -143,12 +144,20 @@
             descriptor->dataAddress
     );
 
-    // ============================ down data queue =============================
+    // ============================ data queue =============================
     descriptor = &pEndpointDescriptor->dataQueueDescriptor;
     ALOGV("AudioEndpoint::configure() data framesPerBurst = %d", descriptor->framesPerBurst);
-    ALOGV("AudioEndpoint::configure() data readCounterAddress = %p", descriptor->readCounterAddress);
-    mFreeRunning = descriptor->readCounterAddress == nullptr;
+    ALOGV("AudioEndpoint::configure() data readCounterAddress = %p",
+          descriptor->readCounterAddress);
+
+    // An example of free running is when the other side is read or written by hardware DMA
+    // or a DSP. It does not update its counter so we have to update it.
+    int64_t *remoteCounter = (direction == AAUDIO_DIRECTION_OUTPUT)
+                             ? descriptor->readCounterAddress // read by other side
+                             : descriptor->writeCounterAddress; // written by other side
+    mFreeRunning = (remoteCounter == nullptr);
     ALOGV("AudioEndpoint::configure() mFreeRunning = %d", mFreeRunning ? 1 : 0);
+
     int64_t *readCounterAddress = (descriptor->readCounterAddress == nullptr)
                                   ? &mDataReadCounter
                                   : descriptor->readCounterAddress;
@@ -173,13 +182,8 @@
     return mUpCommandQueue->read(commandPtr, 1);
 }
 
-aaudio_result_t AudioEndpoint::writeDataNow(const void *buffer, int32_t numFrames)
-{
-    return mDataQueue->write(buffer, numFrames);
-}
-
-void AudioEndpoint::getEmptyFramesAvailable(WrappingBuffer *wrappingBuffer) {
-    mDataQueue->getEmptyRoomAvailable(wrappingBuffer);
+int32_t AudioEndpoint::getEmptyFramesAvailable(WrappingBuffer *wrappingBuffer) {
+    return mDataQueue->getEmptyRoomAvailable(wrappingBuffer);
 }
 
 int32_t AudioEndpoint::getEmptyFramesAvailable()
@@ -187,7 +191,7 @@
     return mDataQueue->getFifoControllerBase()->getEmptyFramesAvailable();
 }
 
-void AudioEndpoint::getFullFramesAvailable(WrappingBuffer *wrappingBuffer)
+int32_t AudioEndpoint::getFullFramesAvailable(WrappingBuffer *wrappingBuffer)
 {
     return mDataQueue->getFullDataAvailable(wrappingBuffer);
 }
@@ -250,3 +254,7 @@
     ALOGD("AudioEndpoint: data readCounter  = %lld", (long long) mDataQueue->getReadCounter());
     ALOGD("AudioEndpoint: data writeCounter = %lld", (long long) mDataQueue->getWriteCounter());
 }
+
+void AudioEndpoint::eraseDataMemory() {
+    mDataQueue->eraseMemory();
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index e7c6916..f5b67e8 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -40,7 +40,8 @@
     /**
      * Configure based on the EndPointDescriptor_t.
      */
-    aaudio_result_t configure(const EndpointDescriptor *pEndpointDescriptor);
+    aaudio_result_t configure(const EndpointDescriptor *pEndpointDescriptor,
+                              aaudio_direction_t direction);
 
     /**
      * Read from a command passed up from the Server.
@@ -48,17 +49,11 @@
      */
     aaudio_result_t readUpCommand(AAudioServiceMessage *commandPtr);
 
-    /**
-     * Non-blocking write.
-     * @return framesWritten or a negative error code.
-     */
-    aaudio_result_t writeDataNow(const void *buffer, int32_t numFrames);
-
-    void getEmptyFramesAvailable(android::WrappingBuffer *wrappingBuffer);
+    int32_t getEmptyFramesAvailable(android::WrappingBuffer *wrappingBuffer);
 
     int32_t getEmptyFramesAvailable();
 
-    void getFullFramesAvailable(android::WrappingBuffer *wrappingBuffer);
+    int32_t getFullFramesAvailable(android::WrappingBuffer *wrappingBuffer);
 
     int32_t getFullFramesAvailable();
 
@@ -91,6 +86,11 @@
 
     int32_t getBufferCapacityInFrames() const;
 
+    /**
+     * Write zeros to the data queue memory.
+     */
+    void eraseDataMemory();
+
     void dump() const;
 
 private:
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 8b14922..41d4909 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -28,10 +28,10 @@
 #include <binder/IServiceManager.h>
 
 #include <aaudio/AAudio.h>
+#include <cutils/properties.h>
 #include <utils/String16.h>
 #include <utils/Trace.h>
 
-#include "AudioClock.h"
 #include "AudioEndpointParcelable.h"
 #include "binding/AAudioStreamRequest.h"
 #include "binding/AAudioStreamConfiguration.h"
@@ -39,6 +39,7 @@
 #include "binding/AAudioServiceMessage.h"
 #include "core/AudioStreamBuilder.h"
 #include "fifo/FifoBuffer.h"
+#include "utility/AudioClock.h"
 #include "utility/LinearRamp.h"
 
 #include "AudioStreamInternal.h"
@@ -64,7 +65,13 @@
         , mFramesPerBurst(16)
         , mStreamVolume(1.0f)
         , mInService(inService)
-        , mServiceInterface(serviceInterface) {
+        , mServiceInterface(serviceInterface)
+        , mWakeupDelayNanos(AAudioProperty_getWakeupDelayMicros() * AAUDIO_NANOS_PER_MICROSECOND)
+        , mMinimumSleepNanos(AAudioProperty_getMinimumSleepMicros() * AAUDIO_NANOS_PER_MICROSECOND)
+        , mAtomicTimestamp()
+        {
+    ALOGD("AudioStreamInternal(): mWakeupDelayNanos = %d, mMinimumSleepNanos = %d",
+          mWakeupDelayNanos, mMinimumSleepNanos);
 }
 
 AudioStreamInternal::~AudioStreamInternal() {
@@ -135,7 +142,7 @@
         }
 
         // Configure endpoint based on descriptor.
-        mAudioEndpoint.configure(&mEndpointDescriptor);
+        mAudioEndpoint.configure(&mEndpointDescriptor, getDirection());
 
         mFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
         int32_t capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
@@ -234,9 +241,11 @@
     int64_t startTime;
     ALOGD("AudioStreamInternal()::requestStart()");
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+        ALOGE("AudioStreamInternal::requestStart() mServiceStreamHandle invalid");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     if (isActive()) {
+        ALOGE("AudioStreamInternal::requestStart() already active");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_stream_state_t originalState = getState();
@@ -313,6 +322,7 @@
 
 aaudio_result_t AudioStreamInternal::registerThread() {
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+        ALOGE("AudioStreamInternal::registerThread() mServiceStreamHandle invalid");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     return mServiceInterface.registerAudioThread(mServiceStreamHandle,
@@ -322,6 +332,7 @@
 
 aaudio_result_t AudioStreamInternal::unregisterThread() {
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+        ALOGE("AudioStreamInternal::unregisterThread() mServiceStreamHandle invalid");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     return mServiceInterface.unregisterAudioThread(mServiceStreamHandle, gettid());
@@ -345,15 +356,18 @@
 aaudio_result_t AudioStreamInternal::getTimestamp(clockid_t clockId,
                            int64_t *framePosition,
                            int64_t *timeNanoseconds) {
-    // TODO Generate in server and pass to client. Return latest.
-    int64_t time = AudioClock::getNanoseconds();
-    *framePosition = mClockModel.convertTimeToPosition(time) + mFramesOffsetFromService;
-    // TODO Get a more accurate timestamp from the service. This code just adds a fudge factor.
-    *timeNanoseconds = time + (6 * AAUDIO_NANOS_PER_MILLISECOND);
-    return AAUDIO_OK;
+    // Generated in server and passed to client. Return latest.
+    if (mAtomicTimestamp.isValid()) {
+        Timestamp timestamp = mAtomicTimestamp.read();
+        *framePosition = timestamp.getPosition();
+        *timeNanoseconds = timestamp.getNanoseconds();
+        return AAUDIO_OK;
+    } else {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
 }
 
-aaudio_result_t AudioStreamInternal::updateStateWhileWaiting() {
+aaudio_result_t AudioStreamInternal::updateStateMachine() {
     if (isDataCallbackActive()) {
         return AAUDIO_OK; // state is getting updated by the callback thread read/write call
     }
@@ -379,7 +393,7 @@
     oldTime = nanoTime;
 }
 
-aaudio_result_t AudioStreamInternal::onTimestampFromServer(AAudioServiceMessage *message) {
+aaudio_result_t AudioStreamInternal::onTimestampService(AAudioServiceMessage *message) {
 #if LOG_TIMESTAMPS
     logTimestamp(*message);
 #endif
@@ -387,23 +401,29 @@
     return AAUDIO_OK;
 }
 
+aaudio_result_t AudioStreamInternal::onTimestampHardware(AAudioServiceMessage *message) {
+    Timestamp timestamp(message->timestamp.position, message->timestamp.timestamp);
+    mAtomicTimestamp.write(timestamp);
+    return AAUDIO_OK;
+}
+
 aaudio_result_t AudioStreamInternal::onEventFromServer(AAudioServiceMessage *message) {
     aaudio_result_t result = AAUDIO_OK;
     switch (message->event.event) {
         case AAUDIO_SERVICE_EVENT_STARTED:
-            ALOGD("AudioStreamInternal::onEventFromServergot() AAUDIO_SERVICE_EVENT_STARTED");
+            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_STARTED");
             if (getState() == AAUDIO_STREAM_STATE_STARTING) {
                 setState(AAUDIO_STREAM_STATE_STARTED);
             }
             break;
         case AAUDIO_SERVICE_EVENT_PAUSED:
-            ALOGD("AudioStreamInternal::onEventFromServergot() AAUDIO_SERVICE_EVENT_PAUSED");
+            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_PAUSED");
             if (getState() == AAUDIO_STREAM_STATE_PAUSING) {
                 setState(AAUDIO_STREAM_STATE_PAUSED);
             }
             break;
         case AAUDIO_SERVICE_EVENT_STOPPED:
-            ALOGD("AudioStreamInternal::onEventFromServergot() AAUDIO_SERVICE_EVENT_STOPPED");
+            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_STOPPED");
             if (getState() == AAUDIO_STREAM_STATE_STOPPING) {
                 setState(AAUDIO_STREAM_STATE_STOPPED);
             }
@@ -420,10 +440,14 @@
             setState(AAUDIO_STREAM_STATE_CLOSED);
             break;
         case AAUDIO_SERVICE_EVENT_DISCONNECTED:
+            // Prevent hardware from looping on old data and making buzzing sounds.
+            if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+                mAudioEndpoint.eraseDataMemory();
+            }
             result = AAUDIO_ERROR_DISCONNECTED;
             setState(AAUDIO_STREAM_STATE_DISCONNECTED);
             ALOGW("WARNING - AudioStreamInternal::onEventFromServer()"
-                          " AAUDIO_SERVICE_EVENT_DISCONNECTED");
+                          " AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared");
             break;
         case AAUDIO_SERVICE_EVENT_VOLUME:
             mStreamVolume = (float)message->event.dataDouble;
@@ -450,8 +474,12 @@
             break; // no command this time, no problem
         }
         switch (message.what) {
-        case AAudioServiceMessage::code::TIMESTAMP:
-            result = onTimestampFromServer(&message);
+        case AAudioServiceMessage::code::TIMESTAMP_SERVICE:
+            result = onTimestampService(&message);
+            break;
+
+        case AAudioServiceMessage::code::TIMESTAMP_HARDWARE:
+            result = onTimestampHardware(&message);
             break;
 
         case AAudioServiceMessage::code::EVENT:
@@ -472,12 +500,12 @@
 aaudio_result_t AudioStreamInternal::processData(void *buffer, int32_t numFrames,
                                                  int64_t timeoutNanoseconds)
 {
-    const char * traceName = (mInService) ? "aaWrtS" : "aaWrtC";
+    const char * traceName = "aaProc";
+    const char * fifoName = "aaRdy";
     ATRACE_BEGIN(traceName);
-    int32_t fullFrames = mAudioEndpoint.getFullFramesAvailable();
     if (ATRACE_ENABLED()) {
-        const char * traceName = (mInService) ? "aaFullS" : "aaFullC";
-        ATRACE_INT(traceName, fullFrames);
+        int32_t fullFrames = mAudioEndpoint.getFullFramesAvailable();
+        ATRACE_INT(fifoName, fullFrames);
     }
 
     aaudio_result_t result = AAUDIO_OK;
@@ -505,10 +533,12 @@
         if (timeoutNanoseconds == 0) {
             break; // don't block
         } else if (framesLeft > 0) {
-            // clip the wake time to something reasonable
-            if (wakeTimeNanos < currentTimeNanos) {
-                wakeTimeNanos = currentTimeNanos;
+            if (!mAudioEndpoint.isFreeRunning()) {
+                // If there is software on the other end of the FIFO then it may get delayed.
+                // So wake up just a little after we expect it to be ready.
+                wakeTimeNanos += mWakeupDelayNanos;
             }
+
             if (wakeTimeNanos > deadlineNanos) {
                 // If we time out, just return the framesWritten so far.
                 // TODO remove after we fix the deadline bug
@@ -525,12 +555,30 @@
                 break;
             }
 
-            int64_t sleepForNanos = wakeTimeNanos - currentTimeNanos;
-            AudioClock::sleepForNanos(sleepForNanos);
+            currentTimeNanos = AudioClock::getNanoseconds();
+            int64_t earliestWakeTime = currentTimeNanos + mMinimumSleepNanos;
+            // Guarantee a minimum sleep time.
+            if (wakeTimeNanos < earliestWakeTime) {
+                wakeTimeNanos = earliestWakeTime;
+            }
+
+            if (ATRACE_ENABLED()) {
+                int32_t fullFrames = mAudioEndpoint.getFullFramesAvailable();
+                ATRACE_INT(fifoName, fullFrames);
+                int64_t sleepForNanos = wakeTimeNanos - currentTimeNanos;
+                ATRACE_INT("aaSlpNs", (int32_t)sleepForNanos);
+            }
+
+            AudioClock::sleepUntilNanoTime(wakeTimeNanos);
             currentTimeNanos = AudioClock::getNanoseconds();
         }
     }
 
+    if (ATRACE_ENABLED()) {
+        int32_t fullFrames = mAudioEndpoint.getFullFramesAvailable();
+        ATRACE_INT(fifoName, fullFrames);
+    }
+
     // return error or framesProcessed
     (void) loopCount;
     ATRACE_END();
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 109e425..13cf16c 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -27,6 +27,7 @@
 #include "client/IsochronousClockModel.h"
 #include "client/AudioEndpoint.h"
 #include "core/AudioStream.h"
+#include "utility/AudioClock.h"
 #include "utility/LinearRamp.h"
 
 using android::sp;
@@ -49,7 +50,7 @@
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) override;
 
-    virtual aaudio_result_t updateStateWhileWaiting() override;
+    virtual aaudio_result_t updateStateMachine() override;
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
@@ -121,7 +122,9 @@
 
     aaudio_result_t onEventFromServer(AAudioServiceMessage *message);
 
-    aaudio_result_t onTimestampFromServer(AAudioServiceMessage *message);
+    aaudio_result_t onTimestampService(AAudioServiceMessage *message);
+
+    aaudio_result_t onTimestampHardware(AAudioServiceMessage *message);
 
     void logTimestamp(AAudioServiceMessage &message);
 
@@ -173,8 +176,18 @@
     // Adjust timing model based on timestamp from service.
     void processTimestamp(uint64_t position, int64_t time);
 
+    // Thread on other side of FIFO will have wakeup jitter.
+    // By delaying slightly we can avoid waking up before other side is ready.
+    const int32_t            mWakeupDelayNanos; // delay past typical wakeup jitter
+    const int32_t            mMinimumSleepNanos; // minimum sleep while polling
+
     AudioEndpointParcelable  mEndPointParcelable; // description of the buffers filled by service
     EndpointDescriptor       mEndpointDescriptor; // buffer description with resolved addresses
+
+    SimpleDoubleBuffer<Timestamp>  mAtomicTimestamp;
+
+    int64_t                  mServiceLatencyNanos = 0;
+
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 22f8bd1..7b1e53e 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -24,6 +24,9 @@
 #include "client/AudioStreamInternalCapture.h"
 #include "utility/AudioClock.h"
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <utils/Trace.h>
+
 using android::WrappingBuffer;
 
 using namespace aaudio;
@@ -36,7 +39,6 @@
 
 AudioStreamInternalCapture::~AudioStreamInternalCapture() {}
 
-
 // Write the data, block if needed and timeoutMillis > 0
 aaudio_result_t AudioStreamInternalCapture::read(void *buffer, int32_t numFrames,
                                                int64_t timeoutNanoseconds)
@@ -52,6 +54,9 @@
         return result;
     }
 
+    const char *traceName = "aaRdNow";
+    ATRACE_BEGIN(traceName);
+
     if (mAudioEndpoint.isFreeRunning()) {
         //ALOGD("AudioStreamInternalCapture::processDataNow() - update remote counter");
         // Update data queue based on the timing model.
@@ -63,6 +68,9 @@
     // If the write index passed the read index then consider it an overrun.
     if (mAudioEndpoint.getEmptyFramesAvailable() < 0) {
         mXRunCount++;
+        if (ATRACE_ENABLED()) {
+            ATRACE_INT("aaOverRuns", mXRunCount);
+        }
     }
 
     // Read some data from the buffer.
@@ -70,6 +78,9 @@
     int32_t framesProcessed = readNowWithConversion(buffer, numFrames);
     //ALOGD("AudioStreamInternalCapture::processDataNow() - tried to read %d frames, read %d",
     //    numFrames, framesProcessed);
+    if (ATRACE_ENABLED()) {
+        ATRACE_INT("aaRead", framesProcessed);
+    }
 
     // Calculate an ideal time to wake up.
     if (wakeTimePtr != nullptr && framesProcessed >= 0) {
@@ -82,14 +93,14 @@
             case AAUDIO_STREAM_STATE_OPEN:
             case AAUDIO_STREAM_STATE_STARTING:
                 break;
-            case AAUDIO_STREAM_STATE_STARTED:   // When do we expect the next read burst to occur?
+            case AAUDIO_STREAM_STATE_STARTED:
             {
-                uint32_t burstSize = mFramesPerBurst;
-                if (burstSize < 32) {
-                    burstSize = 32; // TODO review
-                }
+                // When do we expect the next write burst to occur?
 
-                uint64_t nextReadPosition = mAudioEndpoint.getDataWriteCounter() + burstSize;
+                // Calculate frame position based off of the readCounter because
+                // the writeCounter might have just advanced in the background,
+                // causing us to sleep until a later burst.
+                int64_t nextReadPosition = mAudioEndpoint.getDataReadCounter() + mFramesPerBurst;
                 wakeTime = mClockModel.convertPositionToTime(nextReadPosition);
             }
                 break;
@@ -99,10 +110,8 @@
         *wakeTimePtr = wakeTime;
 
     }
-//    ALOGD("AudioStreamInternalCapture::readNow finished: now = %llu, read# = %llu, wrote# = %llu",
-//         (unsigned long long)currentNanoTime,
-//         (unsigned long long)mAudioEndpoint.getDataReadCounter(),
-//         (unsigned long long)mAudioEndpoint.getDownDataWriteCounter());
+
+    ATRACE_END();
     return framesProcessed;
 }
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1b18577..31e0a40 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -18,6 +18,10 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+
+#include <utils/Trace.h>
+
 #include "client/AudioStreamInternalPlay.h"
 #include "utility/AudioClock.h"
 
@@ -99,6 +103,10 @@
         return result;
     }
 
+    const char *traceName = "aaWrNow";
+    ATRACE_BEGIN(traceName);
+
+    // If a DMA channel or DSP is reading the other end then we have to update the readCounter.
     if (mAudioEndpoint.isFreeRunning()) {
         // Update data queue based on the timing model.
         int64_t estimatedReadCounter = mClockModel.convertTimeToPosition(currentNanoTime);
@@ -108,10 +116,10 @@
 
     // If the read index passed the write index then consider it an underrun.
     if (mAudioEndpoint.getFullFramesAvailable() < 0) {
-        ALOGV("AudioStreamInternal::processDataNow() - XRun! write = %d, read = %d",
-              (int)mAudioEndpoint.getDataWriteCounter(),
-              (int)mAudioEndpoint.getDataReadCounter());
         mXRunCount++;
+        if (ATRACE_ENABLED()) {
+            ATRACE_INT("aaUnderRuns", mXRunCount);
+        }
     }
 
     // Write some data to the buffer.
@@ -119,6 +127,9 @@
     int32_t framesWritten = writeNowWithConversion(buffer, numFrames);
     //ALOGD("AudioStreamInternal::processDataNow() - tried to write %d frames, wrote %d",
     //    numFrames, framesWritten);
+    if (ATRACE_ENABLED()) {
+        ATRACE_INT("aaWrote", framesWritten);
+    }
 
     // Calculate an ideal time to wake up.
     if (wakeTimePtr != nullptr && framesWritten >= 0) {
@@ -135,14 +146,15 @@
                     wakeTime = currentNanoTime;
                 }
                 break;
-            case AAUDIO_STREAM_STATE_STARTED:   // When do we expect the next read burst to occur?
+            case AAUDIO_STREAM_STATE_STARTED:
             {
-                uint32_t burstSize = mFramesPerBurst;
-                if (burstSize < 32) {
-                    burstSize = 32; // TODO review
-                }
+                // When do we expect the next read burst to occur?
 
-                uint64_t nextReadPosition = mAudioEndpoint.getDataReadCounter() + burstSize;
+                // Calculate frame position based off of the writeCounter because
+                // the readCounter might have just advanced in the background,
+                // causing us to sleep until a later burst.
+                int64_t nextReadPosition = mAudioEndpoint.getDataWriteCounter() + mFramesPerBurst
+                        - mAudioEndpoint.getBufferSizeInFrames();
                 wakeTime = mClockModel.convertPositionToTime(nextReadPosition);
             }
                 break;
@@ -152,10 +164,8 @@
         *wakeTimePtr = wakeTime;
 
     }
-//    ALOGD("AudioStreamInternal::processDataNow finished: now = %llu, read# = %llu, wrote# = %llu",
-//         (unsigned long long)currentNanoTime,
-//         (unsigned long long)mAudioEndpoint.getDataReadCounter(),
-//         (unsigned long long)mAudioEndpoint.getDownDataWriteCounter());
+
+    ATRACE_END();
     return framesWritten;
 }
 
@@ -170,7 +180,7 @@
 
     mAudioEndpoint.getEmptyFramesAvailable(&wrappingBuffer);
 
-    // Read data in one or two parts.
+    // Write data in one or two parts.
     int partIndex = 0;
     while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
         int32_t framesToWrite = framesLeft;
@@ -291,6 +301,7 @@
     aaudio_data_callback_result_t callbackResult = AAUDIO_CALLBACK_RESULT_CONTINUE;
     AAudioStream_dataCallback appCallback = getDataCallbackProc();
     if (appCallback == nullptr) return NULL;
+    int64_t timeoutNanos = calculateReasonableTimeout(mCallbackFrames);
 
     // result might be a frame count
     while (mCallbackEnabled.load() && isActive() && (result >= 0)) {
@@ -302,10 +313,7 @@
                 mCallbackFrames);
 
         if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-            // Write audio data to stream.
-            int64_t timeoutNanos = calculateReasonableTimeout(mCallbackFrames);
-
-            // This is a BLOCKING WRITE!
+            // Write audio data to stream. This is a BLOCKING WRITE!
             result = write(mCallbackBuffer, mCallbackFrames, timeoutNanos);
             if ((result != mCallbackFrames)) {
                 ALOGE("AudioStreamInternalPlay(): callbackLoop: write() returned %d", result);
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index 6ea1172..c06c8a9 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -149,7 +149,7 @@
     int64_t nextBurstPosition = mFramesPerBurst * nextBurstIndex;
     int64_t framesDelta = nextBurstPosition - mMarkerFramePosition;
     int64_t nanosDelta = convertDeltaPositionToTime(framesDelta);
-    int64_t time = (int64_t) (mMarkerNanoTime + nanosDelta);
+    int64_t time = mMarkerNanoTime + nanosDelta;
 //    ALOGD("IsochronousClockModel::convertPositionToTime: pos = %llu --> time = %llu",
 //         (unsigned long long)framePosition,
 //         (unsigned long long)time);
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index ca42444..5089b00 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -101,7 +101,6 @@
  */
 static aaudio_policy_t s_MMapPolicy = AAUDIO_UNSPECIFIED;
 
-
 static AudioStream *convertAAudioStreamToAudioStream(AAudioStream* stream)
 {
     return (AudioStream*) stream;
@@ -144,12 +143,18 @@
 }
 
 AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* builder,
-                                                       int32_t channelCount)
+                                                    int32_t channelCount)
 {
     AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
     streamBuilder->setSamplesPerFrame(channelCount);
 }
 
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+                                                       int32_t channelCount)
+{
+    AAudioStreamBuilder_setChannelCount(builder, channelCount);
+}
+
 AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* builder,
                                              aaudio_direction_t direction)
 {
@@ -248,7 +253,7 @@
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
     ALOGD("AAudioStream_requestStart(%p) called --------------", stream);
     aaudio_result_t result = audioStream->requestStart();
-    ALOGD("AAudioStream_requestStart(%p) returned ------------", stream);
+    ALOGD("AAudioStream_requestStart(%p) returned %d ---------", stream, result);
     return result;
 }
 
@@ -350,6 +355,11 @@
     return audioStream->getSamplesPerFrame();
 }
 
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream)
+{
+    return AAudioStream_getChannelCount(stream);
+}
+
 AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 19b08c4..4f1cc37 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -82,9 +82,10 @@
           mSampleRate, mSamplesPerFrame, mFormat,
           AudioStream_convertSharingModeToShortText(mSharingMode),
           (getDirection() == AAUDIO_DIRECTION_OUTPUT) ? "OUTPUT" : "INPUT");
-    ALOGI("AudioStream::open() device = %d, perfMode = %d, callbackFrames = %d",
-          mDeviceId, mPerformanceMode, mFramesPerDataCallback);
-
+    ALOGI("AudioStream::open() device = %d, perfMode = %d, callback: %s with frames = %d",
+          mDeviceId, mPerformanceMode,
+          (mDataCallbackProc == nullptr ? "OFF" : "ON"),
+          mFramesPerDataCallback);
 
     return AAUDIO_OK;
 }
@@ -97,7 +98,7 @@
                                                 aaudio_stream_state_t *nextState,
                                                 int64_t timeoutNanoseconds)
 {
-    aaudio_result_t result = updateStateWhileWaiting();
+    aaudio_result_t result = updateStateMachine();
     if (result != AAUDIO_OK) {
         return result;
     }
@@ -111,7 +112,7 @@
         AudioClock::sleepForNanos(durationNanos);
         timeoutNanoseconds -= durationNanos;
 
-        aaudio_result_t result = updateStateWhileWaiting();
+        aaudio_result_t result = updateStateMachine();
         if (result != AAUDIO_OK) {
             return result;
         }
@@ -152,6 +153,7 @@
                                      void* threadArg)
 {
     if (mHasThread) {
+        ALOGE("AudioStream::createThread() - mHasThread already true");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     if (threadProc == nullptr) {
@@ -173,6 +175,7 @@
 aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds)
 {
     if (!mHasThread) {
+        ALOGE("AudioStream::joinThread() - but has no thread");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 #if 0
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index e5fdcc6..ad18751 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -68,10 +68,10 @@
 
 
     /**
-     * Update state while in the middle of waitForStateChange()
+     * Update state machine.()
      * @return
      */
-    virtual aaudio_result_t updateStateWhileWaiting() = 0;
+    virtual aaudio_result_t updateStateMachine() = 0;
 
 
     // =========== End ABSTRACT methods ===========================
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 6b4a772..a869886 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -105,16 +105,18 @@
 
 }
 
-void FifoBuffer::getFullDataAvailable(WrappingBuffer *wrappingBuffer) {
+fifo_frames_t FifoBuffer::getFullDataAvailable(WrappingBuffer *wrappingBuffer) {
     fifo_frames_t framesAvailable = mFifo->getFullFramesAvailable();
     fifo_frames_t startIndex = mFifo->getReadIndex();
     fillWrappingBuffer(wrappingBuffer, framesAvailable, startIndex);
+    return framesAvailable;
 }
 
-void FifoBuffer::getEmptyRoomAvailable(WrappingBuffer *wrappingBuffer) {
+fifo_frames_t FifoBuffer::getEmptyRoomAvailable(WrappingBuffer *wrappingBuffer) {
     fifo_frames_t framesAvailable = mFifo->getEmptyFramesAvailable();
     fifo_frames_t startIndex = mFifo->getWriteIndex();
     fillWrappingBuffer(wrappingBuffer, framesAvailable, startIndex);
+    return framesAvailable;
 }
 
 fifo_frames_t FifoBuffer::read(void *buffer, fifo_frames_t numFrames) {
@@ -208,3 +210,9 @@
     return mFifo->getCapacity();
 }
 
+void FifoBuffer::eraseMemory() {
+    int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
+    if (numBytes > 0) {
+        memset(mStorage, 0, (size_t) numBytes);
+    }
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 2b262a1..f5a9e27 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -64,16 +64,18 @@
      * if the data is split across the end of the FIFO then set data2 and numFrames2.
      * Other wise set them to null
      * @param wrappingBuffer
+     * @return total full frames available
      */
-    void getFullDataAvailable(WrappingBuffer *wrappingBuffer);
+    fifo_frames_t getFullDataAvailable(WrappingBuffer *wrappingBuffer);
 
     /**
      * Return pointer to available empty frames in data1 and set size in numFrames1.
      * if the room is split across the end of the FIFO then set data2 and numFrames2.
      * Other wise set them to null
      * @param wrappingBuffer
+     * @return total empty frames available
      */
-    void getEmptyRoomAvailable(WrappingBuffer *wrappingBuffer);
+    fifo_frames_t getEmptyRoomAvailable(WrappingBuffer *wrappingBuffer);
 
     /**
      * Copy data from the FIFO into the buffer.
@@ -109,6 +111,11 @@
         mFifo->setWriteCounter(n);
     }
 
+    /*
+     * This is generally only called before or after the buffer is used.
+     */
+    void eraseMemory();
+
 private:
 
     void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dd5e3c0..2816bac 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -94,12 +94,15 @@
                 } else {
                     audioBuffer->size = 0;
                 }
-                break;
+
+                if (updateStateMachine() == AAUDIO_OK) {
+                    break; // don't fall through
+                }
             }
         }
         /// FALL THROUGH
 
-            // Stream got rerouted so we disconnect.
+        // Stream got rerouted so we disconnect.
         case AAUDIO_CALLBACK_OPERATION_DISCONNECTED: {
             setState(AAUDIO_STREAM_STATE_DISCONNECTED);
             ALOGD("processCallbackCommon() stream disconnected");
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 8e8070c..041280d 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -159,6 +159,9 @@
         actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
     }
     setPerformanceMode(actualPerformanceMode);
+
+    setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
+
     // Log warning if we did not get what we asked for.
     ALOGW_IF(actualFlags != flags,
              "AudioStreamRecord::open() flags changed from 0x%08X to 0x%08X",
@@ -207,7 +210,7 @@
     if (mAudioRecord.get() == nullptr) {
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    // Get current position so we can detect when the track is playing.
+    // Get current position so we can detect when the track is recording.
     status_t err = mAudioRecord->getPosition(&mPositionWhenStarting);
     if (err != OK) {
         return AAudioConvert_androidToAAudioResult(err);
@@ -235,7 +238,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamRecord::updateStateWhileWaiting()
+aaudio_result_t AudioStreamRecord::updateStateMachine()
 {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_wrapping_frames_t position;
@@ -292,6 +295,12 @@
     }
     int32_t framesRead = (int32_t)(bytesRead / bytesPerFrame);
     incrementFramesRead(framesRead);
+
+    result = updateStateMachine();
+    if (result != AAUDIO_OK) {
+        return result;
+    }
+
     return (aaudio_result_t) framesRead;
 }
 
@@ -330,3 +339,21 @@
     }
     return getBestTimestamp(clockId, framePosition, timeNanoseconds, &extendedTimestamp);
 }
+
+int64_t AudioStreamRecord::getFramesWritten() {
+    aaudio_wrapping_frames_t position;
+    status_t result;
+    switch (getState()) {
+        case AAUDIO_STREAM_STATE_STARTING:
+        case AAUDIO_STREAM_STATE_STARTED:
+        case AAUDIO_STREAM_STATE_STOPPING:
+            result = mAudioRecord->getPosition(&position);
+            if (result == OK) {
+                mFramesWritten.update32(position);
+            }
+            break;
+        default:
+            break;
+    }
+    return AudioStreamLegacy::getFramesWritten();
+}
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 2c6a7eb..c1723ba 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -59,9 +59,11 @@
 
     int32_t getXRunCount() const override;
 
+    int64_t getFramesWritten() override;
+
     int32_t getFramesPerBurst() const override;
 
-    aaudio_result_t updateStateWhileWaiting() override;
+    aaudio_result_t updateStateMachine() override;
 
     aaudio_direction_t getDirection() const override {
         return AAUDIO_DIRECTION_INPUT;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 77f31e2..51440d6 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -183,6 +183,9 @@
         actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
     }
     setPerformanceMode(actualPerformanceMode);
+
+    setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
+
     // Log warning if we did not get what we asked for.
     ALOGW_IF(actualFlags != flags,
              "AudioStreamTrack::open() flags changed from 0x%08X to 0x%08X",
@@ -227,6 +230,7 @@
     std::lock_guard<std::mutex> lock(mStreamMutex);
 
     if (mAudioTrack.get() == nullptr) {
+        ALOGE("AudioStreamTrack::requestStart() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     // Get current position so we can detect when the track is playing.
@@ -250,6 +254,7 @@
     std::lock_guard<std::mutex> lock(mStreamMutex);
 
     if (mAudioTrack.get() == nullptr) {
+        ALOGE("AudioStreamTrack::requestPause() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     } else if (getState() != AAUDIO_STREAM_STATE_STARTING
             && getState() != AAUDIO_STREAM_STATE_STARTED) {
@@ -271,8 +276,10 @@
     std::lock_guard<std::mutex> lock(mStreamMutex);
 
     if (mAudioTrack.get() == nullptr) {
+        ALOGE("AudioStreamTrack::requestFlush() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     } else if (getState() != AAUDIO_STREAM_STATE_PAUSED) {
+        ALOGE("AudioStreamTrack::requestFlush() not paused");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     setState(AAUDIO_STREAM_STATE_FLUSHING);
@@ -286,6 +293,7 @@
     std::lock_guard<std::mutex> lock(mStreamMutex);
 
     if (mAudioTrack.get() == nullptr) {
+        ALOGE("AudioStreamTrack::requestStop() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     onStop();
@@ -296,7 +304,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamTrack::updateStateWhileWaiting()
+aaudio_result_t AudioStreamTrack::updateStateMachine()
 {
     status_t err;
     aaudio_wrapping_frames_t position;
@@ -373,6 +381,12 @@
     }
     int32_t framesWritten = (int32_t)(bytesWritten / bytesPerFrame);
     incrementFramesWritten(framesWritten);
+
+    result = updateStateMachine();
+    if (result != AAUDIO_OK) {
+        return result;
+    }
+
     return framesWritten;
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index ff429ea..3230ac8 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -67,7 +67,7 @@
         return AAUDIO_DIRECTION_OUTPUT;
     }
 
-    aaudio_result_t updateStateWhileWaiting() override;
+    aaudio_result_t updateStateMachine() override;
 
     // This is public so it can be called from the C callback function.
     void processCallback(int event, void *info) override;
@@ -81,8 +81,7 @@
     // adapts between variable sized blocks and fixed size blocks
     FixedBlockReader                 mFixedBlockReader;
 
-    // TODO add 64-bit position reporting to AudioRecord and use it.
-    aaudio_wrapping_frames_t         mPositionWhenStarting = 0;
+    // TODO add 64-bit position reporting to AudioTrack and use it.
     aaudio_wrapping_frames_t         mPositionWhenPausing = 0;
 };
 
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index a3198d7..2450920 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -369,12 +369,43 @@
     return prop;
 }
 
+int32_t AAudioProperty_getWakeupDelayMicros() {
+    const int32_t minMicros = 0; // arbitrary
+    const int32_t defaultMicros = 200; // arbitrary, based on some observed jitter
+    const int32_t maxMicros = 5000; // arbitrary, probably don't want more than 500
+    int32_t prop = property_get_int32(AAUDIO_PROP_WAKEUP_DELAY_USEC, defaultMicros);
+    if (prop < minMicros) {
+        ALOGW("AAudioProperty_getWakeupDelayMicros: clipped %d to %d", prop, minMicros);
+        prop = minMicros;
+    } else if (prop > maxMicros) {
+        ALOGW("AAudioProperty_getWakeupDelayMicros: clipped %d to %d", prop, maxMicros);
+        prop = maxMicros;
+    }
+    return prop;
+}
+
+int32_t AAudioProperty_getMinimumSleepMicros() {
+    const int32_t minMicros = 20; // arbitrary
+    const int32_t defaultMicros = 200; // arbitrary
+    const int32_t maxMicros = 2000; // arbitrary
+    int32_t prop = property_get_int32(AAUDIO_PROP_MINIMUM_SLEEP_USEC, defaultMicros);
+    if (prop < minMicros) {
+        ALOGW("AAudioProperty_getMinimumSleepMicros: clipped %d to %d", prop, minMicros);
+        prop = minMicros;
+    } else if (prop > maxMicros) {
+        ALOGW("AAudioProperty_getMinimumSleepMicros: clipped %d to %d", prop, maxMicros);
+        prop = maxMicros;
+    }
+    return prop;
+}
+
 int32_t AAudioProperty_getHardwareBurstMinMicros() {
     const int32_t defaultMicros = 1000; // arbitrary
     const int32_t maxMicros = 1000 * 1000; // arbitrary
     int32_t prop = property_get_int32(AAUDIO_PROP_HW_BURST_MIN_USEC, defaultMicros);
     if (prop < 1 || prop > maxMicros) {
-        ALOGE("AAudioProperty_getHardwareBurstMinMicros: invalid = %d", prop);
+        ALOGE("AAudioProperty_getHardwareBurstMinMicros: invalid = %d, use %d",
+              prop, defaultMicros);
         prop = defaultMicros;
     }
     return prop;
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index efd663d..b0c6c94 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -195,13 +195,35 @@
 
 /**
  * Read system property.
- * @return number of bursts per mixer cycle
+ * @return number of bursts per AAudio service mixer cycle
  */
 int32_t AAudioProperty_getMixerBursts();
 
 #define AAUDIO_PROP_HW_BURST_MIN_USEC      "aaudio.hw_burst_min_usec"
 
 /**
+ * Read a system property that specifies the number of extra microseconds that a thread
+ * should sleep when waiting for another thread to service a FIFO. This is used
+ * to avoid the waking thread from being overly optimistic about the other threads
+ * wakeup timing. This value should be set high enough to cover typical scheduling jitter
+ * for a real-time thread.
+ *
+ * @return number of microseconds to delay the wakeup.
+ */
+int32_t AAudioProperty_getWakeupDelayMicros();
+
+#define AAUDIO_PROP_WAKEUP_DELAY_USEC      "aaudio.wakeup_delay_usec"
+
+/**
+ * Read a system property that specifies the minimum sleep time when polling the FIFO.
+ *
+ * @return minimum number of microseconds to sleep.
+ */
+int32_t AAudioProperty_getMinimumSleepMicros();
+
+#define AAUDIO_PROP_MINIMUM_SLEEP_USEC      "aaudio.minimum_sleep_usec"
+
+/**
  * Read system property.
  * This is handy in case the DMA is bursting too quickly for the CPU to keep up.
  * For example, there may be a DMA burst every 100 usec but you only
@@ -236,4 +258,74 @@
     }
 }
 
+
+/**
+ * Simple double buffer for a structure that can be written occasionally and read occasionally.
+ * This allows a SINGLE writer with multiple readers.
+ *
+ * It is OK if the FIFO overflows and we lose old values.
+ * It is also OK if we read an old value.
+ * Thread may return a non-atomic result if the other thread is rapidly writing
+ * new values on another core.
+ */
+template <class T>
+class SimpleDoubleBuffer {
+public:
+    SimpleDoubleBuffer()
+            : mValues()
+            , mCounter(0) {}
+
+    __attribute__((no_sanitize("integer")))
+    void write(T value) {
+        int index = mCounter.load() & 1;
+        mValues[index] = value;
+        mCounter++; // Increment AFTER updating storage, OK if it wraps.
+    }
+
+    T read() const {
+        T result;
+        int before;
+        int after;
+        int timeout = 3;
+        do {
+            // Check to see if a write occurred while were reading.
+            before = mCounter.load();
+            int index = (before & 1) ^ 1;
+            result = mValues[index];
+            after = mCounter.load();
+        } while ((after != before) && --timeout > 0);
+        return result;
+    }
+
+    /**
+     * @return true if at least one value has been written
+     */
+    bool isValid() const {
+        return mCounter.load() > 0;
+    }
+
+private:
+    T                    mValues[2];
+    std::atomic<int>     mCounter;
+};
+
+class Timestamp {
+public:
+    Timestamp()
+            : mPosition(0)
+            , mNanoseconds(0) {}
+    Timestamp(int64_t position, int64_t nanoseconds)
+            : mPosition(position)
+            , mNanoseconds(nanoseconds) {}
+
+    int64_t getPosition() const { return mPosition; }
+
+    int64_t getNanoseconds() const { return mNanoseconds; }
+
+private:
+    // These cannot be const because we need to implement the copy assignment operator.
+    int64_t mPosition;
+    int64_t mNanoseconds;
+};
+
 #endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.mk b/media/libaaudio/tests/Android.mk
index e4eef06..4402919 100644
--- a/media/libaaudio/tests/Android.mk
+++ b/media/libaaudio/tests/Android.mk
@@ -34,6 +34,17 @@
 LOCAL_C_INCLUDES := \
     $(call include-path-for, audio-utils) \
     frameworks/av/media/libaaudio/include \
+    frameworks/av/media/libaaudio/src \
+    frameworks/av/media/libaaudio/examples
+LOCAL_SRC_FILES:= test_timestamps.cpp
+LOCAL_SHARED_LIBRARIES := libaaudio
+LOCAL_MODULE := test_timestamps
+include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES := \
+    $(call include-path-for, audio-utils) \
+    frameworks/av/media/libaaudio/include \
     frameworks/av/media/libaaudio/src
 LOCAL_SRC_FILES:= test_linear_ramp.cpp
 LOCAL_SHARED_LIBRARIES := libaaudio
diff --git a/media/libaaudio/tests/test_timestamps.cpp b/media/libaaudio/tests/test_timestamps.cpp
new file mode 100644
index 0000000..d9ca391
--- /dev/null
+++ b/media/libaaudio/tests/test_timestamps.cpp
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Play silence and recover from dead servers or disconnected devices.
+
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+
+#include "utils/AAudioExampleUtils.h"
+
+#define DEFAULT_TIMEOUT_NANOS  ((int64_t)1000000000)
+
+int main(int argc, char **argv) {
+    (void) argc;
+    (void *)argv;
+
+    aaudio_result_t result = AAUDIO_OK;
+
+    int32_t triesLeft = 3;
+    int32_t bufferCapacity;
+    int32_t framesPerBurst = 0;
+    float *buffer = nullptr;
+
+    int32_t actualChannelCount = 0;
+    int32_t actualSampleRate = 0;
+    int32_t originalBufferSize = 0;
+    int32_t requestedBufferSize = 0;
+    int32_t finalBufferSize = 0;
+    aaudio_format_t actualDataFormat = AAUDIO_FORMAT_PCM_FLOAT;
+    aaudio_sharing_mode_t actualSharingMode = AAUDIO_SHARING_MODE_SHARED;
+    int32_t framesMax;
+    int64_t framesTotal;
+    int64_t printAt;
+    int samplesPerBurst;
+    int64_t previousFramePosition = -1;
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+    printf("Test Timestamps V0.1.1\n");
+
+    AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    result = AAudio_createStreamBuilder(&aaudioBuilder);
+    if (result != AAUDIO_OK) {
+        printf("AAudio_createStreamBuilder returned %s",
+               AAudio_convertResultToText(result));
+        goto finish;
+    }
+
+    // Request stream properties.
+    AAudioStreamBuilder_setFormat(aaudioBuilder, AAUDIO_FORMAT_PCM_FLOAT);
+    //AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_NONE);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+    // Create an AAudioStream using the Builder.
+    result = AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream);
+    if (result != AAUDIO_OK) {
+        printf("AAudioStreamBuilder_openStream returned %s",
+               AAudio_convertResultToText(result));
+        goto finish;
+    }
+
+    // Check to see what kind of stream we actually got.
+    actualSampleRate = AAudioStream_getSampleRate(aaudioStream);
+    actualChannelCount = AAudioStream_getChannelCount(aaudioStream);
+    actualDataFormat = AAudioStream_getFormat(aaudioStream);
+
+    printf("-------- chans = %3d, rate = %6d format = %d\n",
+            actualChannelCount, actualSampleRate, actualDataFormat);
+    printf("    Is MMAP used? %s\n", AAudioStream_isMMapUsed(aaudioStream)
+                                   ? "yes" : "no");
+
+    // This is the number of frames that are read in one chunk by a DMA controller
+    // or a DSP or a mixer.
+    framesPerBurst = AAudioStream_getFramesPerBurst(aaudioStream);
+    printf("    framesPerBurst = %3d\n", framesPerBurst);
+
+    originalBufferSize = AAudioStream_getBufferSizeInFrames(aaudioStream);
+    requestedBufferSize = 2 * framesPerBurst;
+    finalBufferSize = AAudioStream_setBufferSizeInFrames(aaudioStream, requestedBufferSize);
+
+    printf("    BufferSize: original = %4d, requested = %4d, final = %4d\n",
+           originalBufferSize, requestedBufferSize, finalBufferSize);
+
+    samplesPerBurst = framesPerBurst * actualChannelCount;
+    buffer = new float[samplesPerBurst];
+
+    result = AAudioStream_requestStart(aaudioStream);
+    if (result != AAUDIO_OK) {
+        printf("AAudioStream_requestStart returned %s",
+               AAudio_convertResultToText(result));
+        goto finish;
+    }
+
+    // Play silence very briefly.
+    framesMax = actualSampleRate * 4;
+    framesTotal = 0;
+    printAt = actualSampleRate;
+    while (result == AAUDIO_OK && framesTotal < framesMax) {
+        int32_t framesWritten = AAudioStream_write(aaudioStream,
+                                                   buffer, framesPerBurst,
+                                                   DEFAULT_TIMEOUT_NANOS);
+        if (framesWritten < 0) {
+            result = framesWritten;
+            printf("write() returned %s, frames = %d\n",
+                   AAudio_convertResultToText(result), (int)framesTotal);
+            printf("  frames = %d\n", (int)framesTotal);
+        } else if (framesWritten != framesPerBurst) {
+            printf("write() returned %d, frames = %d\n", framesWritten, (int)framesTotal);
+            result = AAUDIO_ERROR_TIMEOUT;
+        } else {
+            framesTotal += framesWritten;
+            if (framesTotal >= printAt) {
+                printf("frames = %d\n", (int)framesTotal);
+                printAt += actualSampleRate;
+            }
+        }
+
+        // Print timestamps.
+        int64_t framePosition = 0;
+        int64_t frameTime = 0;
+        aaudio_result_t timeResult;
+        timeResult = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+                                           &framePosition, &frameTime);
+
+        if (timeResult == AAUDIO_OK) {
+            if (framePosition > (previousFramePosition + 5000)) {
+                int64_t realTime = getNanoseconds();
+                int64_t framesWritten = AAudioStream_getFramesWritten(aaudioStream);
+
+                double latencyMillis = calculateLatencyMillis(framePosition, frameTime,
+                                                              framesWritten, realTime,
+                                                              actualSampleRate);
+
+                printf("--- timestamp: result = %4d, position = %lld, at %lld nanos"
+                               ", latency = %7.2f msec\n",
+                       timeResult,
+                       (long long) framePosition,
+                       (long long) frameTime,
+                       latencyMillis);
+                previousFramePosition = framePosition;
+            }
+        }
+    }
+
+    result = AAudioStream_requestStop(aaudioStream);
+    if (result != AAUDIO_OK) {
+        printf("AAudioStream_requestStop returned %s\n",
+               AAudio_convertResultToText(result));
+    }
+    result = AAudioStream_close(aaudioStream);
+    if (result != AAUDIO_OK) {
+        printf("AAudioStream_close returned %s\n",
+               AAudio_convertResultToText(result));
+    }
+    aaudioStream = nullptr;
+
+
+finish:
+    if (aaudioStream != nullptr) {
+        AAudioStream_close(aaudioStream);
+    }
+    AAudioStreamBuilder_delete(aaudioBuilder);
+    delete[] buffer;
+    printf("result = %d = %s\n", result, AAudio_convertResultToText(result));
+}
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index e49945b..7bf4f99 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -111,7 +111,8 @@
 status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *requested,
         struct timespec *elapsed)
 {
-    LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0);
+    LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0,
+            "%s: null or zero frame buffer, buffer:%p", __func__, buffer);
     struct timespec total;          // total elapsed time spent waiting
     total.tv_sec = 0;
     total.tv_nsec = 0;
@@ -345,7 +346,10 @@
         buffer->mNonContig = 0;
         return;
     }
-    LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount));
+    LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount),
+            "%s: mUnreleased out of range, "
+            "!(stepCount:%zu <= mUnreleased:%zu <= mFrameCount:%zu), BufferSizeInFrames:%u",
+            __func__, stepCount, mUnreleased, mFrameCount, getBufferSizeInFrames());
     mUnreleased -= stepCount;
     audio_track_cblk_t* cblk = mCblk;
     // Both of these barriers are required
@@ -675,7 +679,8 @@
 __attribute__((no_sanitize("integer")))
 status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
 {
-    LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0);
+    LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0,
+            "%s: null or zero frame buffer, buffer:%p", __func__, buffer);
     if (mIsShutdown) {
         goto no_init;
     }
@@ -761,7 +766,10 @@
         buffer->mNonContig = 0;
         return;
     }
-    LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount));
+    LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount),
+            "%s: mUnreleased out of range, "
+            "!(stepCount:%zu <= mUnreleased:%zu <= mFrameCount:%zu)",
+            __func__, stepCount, mUnreleased, mFrameCount);
     mUnreleased -= stepCount;
     audio_track_cblk_t* cblk = mCblk;
     if (mIsOut) {
@@ -1056,7 +1064,9 @@
     }
     // As mFramesReady is the total remaining frames in the static audio track,
     // it is always larger or equal to avail.
-    LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail);
+    LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail,
+            "%s: mFramesReady out of range, mFramesReady:%lld < avail:%zu",
+            __func__, (long long)mFramesReady, avail);
     buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);
     if (!ackFlush) {
         mUnreleased = avail;
@@ -1068,8 +1078,14 @@
 void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
 {
     size_t stepCount = buffer->mFrameCount;
-    LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady));
-    LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased));
+    LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady),
+            "%s: stepCount out of range, "
+            "!(stepCount:%zu <= mFramesReady:%lld)",
+            __func__, stepCount, (long long)mFramesReady);
+    LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased),
+            "%s: stepCount out of range, "
+            "!(stepCount:%zu <= mUnreleased:%zu)",
+            __func__, stepCount, mUnreleased);
     if (stepCount == 0) {
         // prevent accidental re-use of buffer
         buffer->mRaw = NULL;
diff --git a/media/libaudiohal/StreamHalHidl.cpp b/media/libaudiohal/StreamHalHidl.cpp
index 176abee..0cafa36 100644
--- a/media/libaudiohal/StreamHalHidl.cpp
+++ b/media/libaudiohal/StreamHalHidl.cpp
@@ -47,7 +47,8 @@
 StreamHalHidl::StreamHalHidl(IStream *stream)
         : ConversionHelperHidl("Stream"),
           mStream(stream),
-          mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT) {
+          mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT),
+          mCachedBufferSize(0){
 
     // Instrument audio signal power logging.
     // Note: This assumes channel mask, format, and sample rate do not change after creation.
@@ -73,7 +74,11 @@
 
 status_t StreamHalHidl::getBufferSize(size_t *size) {
     if (!mStream) return NO_INIT;
-    return processReturn("getBufferSize", mStream->getBufferSize(), size);
+    status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
+    if (status == OK) {
+        mCachedBufferSize = *size;
+    }
+    return status;
 }
 
 status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
@@ -202,6 +207,14 @@
     return OK;
 }
 
+status_t StreamHalHidl::getCachedBufferSize(size_t *size) {
+    if (mCachedBufferSize != 0) {
+        *size = mCachedBufferSize;
+        return OK;
+    }
+    return getBufferSize(size);
+}
+
 bool StreamHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
     if (mHalThreadPriority == HAL_THREAD_PRIORITY_DEFAULT) {
         return true;
@@ -320,8 +333,19 @@
     }
 
     status_t status;
-    if (!mDataMQ && (status = prepareForWriting(bytes)) != OK) {
-        return status;
+    if (!mDataMQ) {
+        // In case if playback starts close to the end of a compressed track, the bytes
+        // that need to be written is less than the actual buffer size. Need to use
+        // full buffer size for the MQ since otherwise after seeking back to the middle
+        // data will be truncated.
+        size_t bufferSize;
+        if ((status = getCachedBufferSize(&bufferSize)) != OK) {
+            return status;
+        }
+        if (bytes > bufferSize) bufferSize = bytes;
+        if ((status = prepareForWriting(bufferSize)) != OK) {
+            return status;
+        }
     }
 
     status = callWriterThread(
diff --git a/media/libaudiohal/StreamHalHidl.h b/media/libaudiohal/StreamHalHidl.h
index a69cce8..d4ab943 100644
--- a/media/libaudiohal/StreamHalHidl.h
+++ b/media/libaudiohal/StreamHalHidl.h
@@ -102,6 +102,8 @@
     // The destructor automatically closes the stream.
     virtual ~StreamHalHidl();
 
+    status_t getCachedBufferSize(size_t *size);
+
     bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
 
     // mStreamPowerLog is used for audio signal power logging.
@@ -111,6 +113,7 @@
     const int HAL_THREAD_PRIORITY_DEFAULT = -1;
     IStream *mStream;
     int mHalThreadPriority;
+    size_t mCachedBufferSize;
 };
 
 class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk
index 28b96d5..a5fbf14 100644
--- a/media/libeffects/downmix/Android.mk
+++ b/media/libeffects/downmix/Android.mk
@@ -20,7 +20,8 @@
 	$(call include-path-for, audio-effects) \
 	$(call include-path-for, audio-utils)
 
-LOCAL_CFLAGS += -fvisibility=hidden -DBUILD_FLOAT
+#-DBUILD_FLOAT
+LOCAL_CFLAGS += -fvisibility=hidden
 LOCAL_CFLAGS += -Wall -Werror
 
 LOCAL_HEADER_LIBRARIES += libhardware_headers
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 521dc5f..7abf09f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -1,12 +1,23 @@
 cc_library_headers {
     name: "libmedia_headers",
     vendor_available: true,
-    export_include_dirs: ["include", "omx/1.0/include"],
+    export_include_dirs: ["include"],
+    header_libs:[
+        "libstagefright_headers",
+        "media_plugin_headers",
+    ],
+    export_header_lib_headers: [
+        "libstagefright_headers",
+        "media_plugin_headers",
+    ],
 }
 
 cc_library {
     name: "libmedia_helper",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
     srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
     cflags: [
         "-Werror",
@@ -70,13 +81,6 @@
         "libutils",
     ],
 
-    include_dirs: [
-        "frameworks/av/include/media",
-        "frameworks/native/include", // for media/hardware/MetadataBufferType.h
-        "frameworks/native/include/media/openmax",
-        "frameworks/av/media/libstagefright",
-    ],
-
     export_shared_lib_headers: [
         "android.hidl.memory@1.0",
         "android.hidl.token@1.0-utils",
@@ -202,6 +206,7 @@
         "libicui18n",
         "libsonivox",
         "libmediadrm",
+        "libmedia_helper",
         "android.hidl.memory@1.0",
     ],
 
@@ -210,15 +215,10 @@
         "libc_malloc_debug_backtrace",
     ],
 
-    include_dirs: [
-        "frameworks/native/include/media/openmax",
-        "frameworks/av/include/media/",
-        "frameworks/av/media/libstagefright",
-    ],
-
     export_include_dirs: [
         "include",
     ],
+
     cflags: [
         "-Werror",
         "-Wno-error=deprecated-declarations",
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
index 808c2b5..990d260 100644
--- a/media/libmedia/CharacterEncodingDetector.cpp
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -18,15 +18,15 @@
 #define LOG_TAG "CharacterEncodingDector"
 #include <utils/Log.h>
 
-#include <CharacterEncodingDetector.h>
+#include <media/CharacterEncodingDetector.h>
 #include "CharacterEncodingDetectorTables.h"
 
-#include "utils/Vector.h"
-#include "StringArray.h"
+#include <utils/Vector.h>
+#include <media/StringArray.h>
 
-#include "unicode/ucnv.h"
-#include "unicode/ucsdet.h"
-#include "unicode/ustring.h"
+#include <unicode/ucnv.h>
+#include <unicode/ucsdet.h>
+#include <unicode/ustring.h>
 
 namespace android {
 
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 7058ee8..5ea2e8b 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -127,22 +127,32 @@
         return reply.readInt32();
     }
 
-    status_t setDataSource(const sp<IDataSource>& source)
+    status_t setDataSource(const sp<IDataSource>& source, const char *mime)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
         data.writeStrongBinder(IInterface::asBinder(source));
+
+        if (mime != NULL) {
+            data.writeInt32(1);
+            data.writeCString(mime);
+        } else {
+            data.writeInt32(0);
+        }
         remote()->transact(SET_DATA_SOURCE_CALLBACK, data, &reply);
         return reply.readInt32();
     }
 
-    sp<IMemory> getFrameAtTime(int64_t timeUs, int option)
+    sp<IMemory> getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly)
     {
-        ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
+        ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d) metaOnly(%d)",
+                timeUs, option, colorFormat, metaOnly);
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
         data.writeInt64(timeUs);
         data.writeInt32(option);
+        data.writeInt32(colorFormat);
+        data.writeInt32(metaOnly);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
         sendSchedPolicy(data);
 #endif
@@ -258,7 +268,12 @@
             if (source == NULL) {
                 reply->writeInt32(BAD_VALUE);
             } else {
-                reply->writeInt32(setDataSource(source));
+                int32_t hasMime = data.readInt32();
+                const char *mime = NULL;
+                if (hasMime) {
+                    mime = data.readCString();
+                }
+                reply->writeInt32(setDataSource(source, mime));
             }
             return NO_ERROR;
         } break;
@@ -266,11 +281,14 @@
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
             int64_t timeUs = data.readInt64();
             int option = data.readInt32();
-            ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
+            int colorFormat = data.readInt32();
+            bool metaOnly = (data.readInt32() != 0);
+            ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d), metaOnly(%d)",
+                    timeUs, option, colorFormat, metaOnly);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
             setSchedPolicy(data);
 #endif
-            sp<IMemory> bitmap = getFrameAtTime(timeUs, option);
+            sp<IMemory> bitmap = getFrameAtTime(timeUs, option, colorFormat, metaOnly);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
                 reply->writeStrongBinder(IInterface::asBinder(bitmap));
diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp
index 95f7d2e..9724fc1 100644
--- a/media/libmedia/IResourceManagerService.cpp
+++ b/media/libmedia/IResourceManagerService.cpp
@@ -19,7 +19,7 @@
 #define LOG_TAG "IResourceManagerService"
 #include <utils/Log.h>
 
-#include "media/IResourceManagerService.h"
+#include <media/IResourceManagerService.h>
 
 #include <binder/Parcel.h>
 
diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp
index 9f803cb..028616b 100644
--- a/media/libmedia/MediaScannerClient.cpp
+++ b/media/libmedia/MediaScannerClient.cpp
@@ -20,8 +20,8 @@
 
 #include <media/mediascanner.h>
 
-#include "CharacterEncodingDetector.h"
-#include "StringArray.h"
+#include <media/CharacterEncodingDetector.h>
+#include <media/StringArray.h>
 
 namespace android {
 
diff --git a/media/libmedia/MidiDeviceInfo.cpp b/media/libmedia/MidiDeviceInfo.cpp
index 02efc5f..7588e00 100644
--- a/media/libmedia/MidiDeviceInfo.cpp
+++ b/media/libmedia/MidiDeviceInfo.cpp
@@ -16,7 +16,7 @@
 
 #define LOG_TAG "MidiDeviceInfo"
 
-#include "MidiDeviceInfo.h"
+#include <media/MidiDeviceInfo.h>
 
 #include <binder/Parcel.h>
 #include <log/log.h>
diff --git a/media/libmedia/MidiIoWrapper.cpp b/media/libmedia/MidiIoWrapper.cpp
index faae954..4e5d67f 100644
--- a/media/libmedia/MidiIoWrapper.cpp
+++ b/media/libmedia/MidiIoWrapper.cpp
@@ -22,7 +22,7 @@
 #include <sys/stat.h>
 #include <fcntl.h>
 
-#include "media/MidiIoWrapper.h"
+#include <media/MidiIoWrapper.h>
 
 static int readAt(void *handle, void *buffer, int pos, int size) {
     return ((android::MidiIoWrapper*)handle)->readAt(buffer, pos, size);
diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp
index b2e5907..7868b85 100644
--- a/media/libmedia/StringArray.cpp
+++ b/media/libmedia/StringArray.cpp
@@ -21,7 +21,7 @@
 #include <stdlib.h>
 #include <string.h>
 
-#include "StringArray.h"
+#include <media/StringArray.h>
 
 namespace android {
 
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index a6eba86..e6c8f9c 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -375,7 +375,7 @@
 audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
 {
     audio_channel_mask_t channels;
-    if (!OutputChannelConverter::fromString(literalChannels, channels) ||
+    if (!OutputChannelConverter::fromString(literalChannels, channels) &&
             !InputChannelConverter::fromString(literalChannels, channels)) {
         return AUDIO_CHANNEL_INVALID;
     }
diff --git a/media/libmedia/include/media/IDataSource.h b/media/libmedia/include/media/IDataSource.h
index 655f337..3858f78 100644
--- a/media/libmedia/include/media/IDataSource.h
+++ b/media/libmedia/include/media/IDataSource.h
@@ -35,7 +35,9 @@
     // Get the memory that readAt writes into.
     virtual sp<IMemory> getIMemory() = 0;
     // Read up to |size| bytes into the memory returned by getIMemory(). Returns
-    // the number of bytes read, or -1 on error. |size| must not be larger than
+    // the number of bytes read, or negative value on error (eg.
+    // ERROR_END_OF_STREAM indicating EOS. This is needed by CallbackDataSource
+    // to properly handle reading of last chunk). |size| must not be larger than
     // the buffer.
     virtual ssize_t readAt(off64_t offset, size_t size) = 0;
     // Get the size, or -1 if the size is unknown.
diff --git a/media/libmedia/include/media/IMediaMetadataRetriever.h b/media/libmedia/include/media/IMediaMetadataRetriever.h
index c90f254..ea95161 100644
--- a/media/libmedia/include/media/IMediaMetadataRetriever.h
+++ b/media/libmedia/include/media/IMediaMetadataRetriever.h
@@ -19,13 +19,12 @@
 #define ANDROID_IMEDIAMETADATARETRIEVER_H
 
 #include <binder/IInterface.h>
-#include <binder/Parcel.h>
 #include <binder/IMemory.h>
 #include <utils/KeyedVector.h>
 #include <utils/RefBase.h>
 
 namespace android {
-
+class Parcel;
 class IDataSource;
 struct IMediaHTTPService;
 
@@ -41,8 +40,10 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t        setDataSource(const sp<IDataSource>& dataSource) = 0;
-    virtual sp<IMemory>     getFrameAtTime(int64_t timeUs, int option) = 0;
+    virtual status_t        setDataSource(
+            const sp<IDataSource>& dataSource, const char *mime) = 0;
+    virtual sp<IMemory>     getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
     virtual sp<IMemory>     extractAlbumArt() = 0;
     virtual const char*     extractMetadata(int keyCode) = 0;
 };
diff --git a/media/libmedia/include/media/IOMX.h b/media/libmedia/include/media/IOMX.h
index 9a0ada1..d868860 100644
--- a/media/libmedia/include/media/IOMX.h
+++ b/media/libmedia/include/media/IOMX.h
@@ -29,8 +29,8 @@
 #include <media/hardware/MetadataBufferType.h>
 #include <android/hardware/media/omx/1.0/IOmxNode.h>
 
-#include <OMX_Core.h>
-#include <OMX_Video.h>
+#include <media/openmax/OMX_Core.h>
+#include <media/openmax/OMX_Video.h>
 
 namespace android {
 
diff --git a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
index a5e1350..257002d 100644
--- a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
+++ b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
@@ -41,8 +41,9 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t setDataSource(const sp<DataSource>& source) = 0;
-    virtual VideoFrame* getFrameAtTime(int64_t timeUs, int option) = 0;
+    virtual status_t setDataSource(const sp<DataSource>& source, const char *mime) = 0;
+    virtual VideoFrame* getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
     virtual MediaAlbumArt* extractAlbumArt() = 0;
     virtual const char* extractMetadata(int keyCode) = 0;
 };
@@ -54,7 +55,9 @@
     MediaMetadataRetrieverInterface() {}
 
     virtual             ~MediaMetadataRetrieverInterface() {}
-    virtual VideoFrame* getFrameAtTime(int64_t /*timeUs*/, int /*option*/) { return NULL; }
+    virtual VideoFrame* getFrameAtTime(
+            int64_t /*timeUs*/, int /*option*/, int /*colorFormat*/, bool /*metaOnly*/)
+    { return NULL; }
     virtual MediaAlbumArt* extractAlbumArt() { return NULL; }
     virtual const char* extractMetadata(int /*keyCode*/) { return NULL; }
 };
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 8ed07ee..aa2c868 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -76,8 +76,10 @@
             const KeyedVector<String8, String8> *headers = NULL);
 
     status_t setDataSource(int fd, int64_t offset, int64_t length);
-    status_t setDataSource(const sp<IDataSource>& dataSource);
-    sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
+    status_t setDataSource(
+            const sp<IDataSource>& dataSource, const char *mime = NULL);
+    sp<IMemory> getFrameAtTime(int64_t timeUs, int option,
+            int colorFormat = 0, bool metaOnly = false);
     sp<IMemory> extractAlbumArt();
     const char* extractMetadata(int keyCode);
 
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/Conversion.h b/media/libmedia/include/media/omx/1.0/Conversion.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/Conversion.h
rename to media/libmedia/include/media/omx/1.0/Conversion.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WGraphicBufferSource.h b/media/libmedia/include/media/omx/1.0/WGraphicBufferSource.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WGraphicBufferSource.h
rename to media/libmedia/include/media/omx/1.0/WGraphicBufferSource.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmx.h b/media/libmedia/include/media/omx/1.0/WOmx.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmx.h
rename to media/libmedia/include/media/omx/1.0/WOmx.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxBufferSource.h b/media/libmedia/include/media/omx/1.0/WOmxBufferSource.h
similarity index 98%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmxBufferSource.h
rename to media/libmedia/include/media/omx/1.0/WOmxBufferSource.h
index 86322da..086f648 100644
--- a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxBufferSource.h
+++ b/media/libmedia/include/media/omx/1.0/WOmxBufferSource.h
@@ -21,7 +21,7 @@
 #include <hidl/Status.h>
 
 #include <binder/Binder.h>
-#include <OMXFenceParcelable.h>
+#include <media/OMXFenceParcelable.h>
 
 #include <android/hardware/media/omx/1.0/IOmxBufferSource.h>
 #include <android/BnOMXBufferSource.h>
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxNode.h b/media/libmedia/include/media/omx/1.0/WOmxNode.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmxNode.h
rename to media/libmedia/include/media/omx/1.0/WOmxNode.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxObserver.h b/media/libmedia/include/media/omx/1.0/WOmxObserver.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmxObserver.h
rename to media/libmedia/include/media/omx/1.0/WOmxObserver.h
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 08a9e6a..7d27d57 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -130,7 +130,7 @@
 }
 
 status_t MediaMetadataRetriever::setDataSource(
-    const sp<IDataSource>& dataSource)
+    const sp<IDataSource>& dataSource, const char *mime)
 {
     ALOGV("setDataSource(IDataSource)");
     Mutex::Autolock _l(mLock);
@@ -138,18 +138,20 @@
         ALOGE("retriever is not initialized");
         return INVALID_OPERATION;
     }
-    return mRetriever->setDataSource(dataSource);
+    return mRetriever->setDataSource(dataSource, mime);
 }
 
-sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)
+sp<IMemory> MediaMetadataRetriever::getFrameAtTime(
+        int64_t timeUs, int option, int colorFormat, bool metaOnly)
 {
-    ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d)", timeUs, option);
+    ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d) colorFormat(%d) metaOnly(%d)",
+            timeUs, option, colorFormat, metaOnly);
     Mutex::Autolock _l(mLock);
     if (mRetriever == 0) {
         ALOGE("retriever is not initialized");
         return NULL;
     }
-    return mRetriever->getFrameAtTime(timeUs, option);
+    return mRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
 }
 
 const char* MediaMetadataRetriever::extractMetadata(int keyCode)
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 77f0cab..496db0d 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -77,7 +77,7 @@
 #include "TestPlayerStub.h"
 #include "nuplayer/NuPlayerDriver.h"
 
-#include <OMX.h>
+#include <media/stagefright/omx/OMX.h>
 
 #include "HDCP.h"
 #include "HTTPBase.h"
@@ -2318,6 +2318,33 @@
             }
         }
     } else {
+        // VolumeShapers are not affected when a track moves between players for
+        // gapless playback (setNextMediaPlayer).
+        // We forward VolumeShaper operations that do not change configuration
+        // to the new player so that unducking may occur as expected.
+        // Unducking is an idempotent operation, same if applied back-to-back.
+        if (configuration->getType() == VolumeShaper::Configuration::TYPE_ID
+                && mNextOutput != nullptr) {
+            ALOGV("applyVolumeShaper: Attempting to forward missed operation: %s %s",
+                    configuration->toString().c_str(), operation->toString().c_str());
+            Mutex::Autolock nextLock(mNextOutput->mLock);
+
+            // recycled track should be forwarded from this AudioSink by switchToNextOutput
+            sp<AudioTrack> track = mNextOutput->mRecycledTrack;
+            if (track != nullptr) {
+                ALOGD("Forward VolumeShaper operation to recycled track %p", track.get());
+                (void)track->applyVolumeShaper(configuration, operation);
+            } else {
+                // There is a small chance that the unduck occurs after the next
+                // player has already started, but before it is registered to receive
+                // the unduck command.
+                track = mNextOutput->mTrack;
+                if (track != nullptr) {
+                    ALOGD("Forward VolumeShaper operation to track %p", track.get());
+                    (void)track->applyVolumeShaper(configuration, operation);
+                }
+            }
+        }
         status = mVolumeHandler->applyVolumeShaper(configuration, operation);
     }
     return status;
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 793f476..47c5aaf 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -175,7 +175,7 @@
 }
 
 status_t MetadataRetrieverClient::setDataSource(
-        const sp<IDataSource>& source)
+        const sp<IDataSource>& source, const char *mime)
 {
     ALOGV("setDataSource(IDataSource)");
     Mutex::Autolock lock(mLock);
@@ -186,16 +186,18 @@
     ALOGV("player type = %d", playerType);
     sp<MediaMetadataRetrieverBase> p = createRetriever(playerType);
     if (p == NULL) return NO_INIT;
-    status_t ret = p->setDataSource(dataSource);
+    status_t ret = p->setDataSource(dataSource, mime);
     if (ret == NO_ERROR) mRetriever = p;
     return ret;
 }
 
 Mutex MetadataRetrieverClient::sLock;
 
-sp<IMemory> MetadataRetrieverClient::getFrameAtTime(int64_t timeUs, int option)
+sp<IMemory> MetadataRetrieverClient::getFrameAtTime(
+        int64_t timeUs, int option, int colorFormat, bool metaOnly)
 {
-    ALOGV("getFrameAtTime: time(%lld us) option(%d)", (long long)timeUs, option);
+    ALOGV("getFrameAtTime: time(%lld us) option(%d) colorFormat(%d), metaOnly(%d)",
+            (long long)timeUs, option, colorFormat, metaOnly);
     Mutex::Autolock lock(mLock);
     Mutex::Autolock glock(sLock);
     mThumbnail.clear();
@@ -203,7 +205,8 @@
         ALOGE("retriever is not initialized");
         return NULL;
     }
-    VideoFrame *frame = mRetriever->getFrameAtTime(timeUs, option);
+    VideoFrame *frame = mRetriever->getFrameAtTime(
+            timeUs, option, colorFormat, metaOnly);
     if (frame == NULL) {
         ALOGE("failed to capture a video frame");
         return NULL;
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index fe7547c..c78cd4b 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -49,8 +49,9 @@
             const KeyedVector<String8, String8> *headers);
 
     virtual status_t                setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t                setDataSource(const sp<IDataSource>& source);
-    virtual sp<IMemory>             getFrameAtTime(int64_t timeUs, int option);
+    virtual status_t                setDataSource(const sp<IDataSource>& source, const char *mime);
+    virtual sp<IMemory>             getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly);
     virtual sp<IMemory>             extractAlbumArt();
     virtual const char*             extractMetadata(int keyCode);
 
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index d83c406..aa21fff 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1242,6 +1242,16 @@
         mAudioLastDequeueTimeUs = seekTimeUs;
     }
 
+    if (mSubtitleTrack.mSource != NULL) {
+        mSubtitleTrack.mPackets->clear();
+        mFetchSubtitleDataGeneration++;
+    }
+
+    if (mTimedTextTrack.mSource != NULL) {
+        mTimedTextTrack.mPackets->clear();
+        mFetchTimedTextDataGeneration++;
+    }
+
     // If currently buffering, post kWhatBufferingEnd first, so that
     // NuPlayer resumes. Otherwise, if cache hits high watermark
     // before new polling happens, no one will resume the playback.
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6491ceb..d4ec30d 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -49,18 +49,18 @@
 
 #include <hidlmemory/mapping.h>
 
-#include <OMX_AudioExt.h>
-#include <OMX_VideoExt.h>
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-#include <OMX_AsString.h>
+#include <media/openmax/OMX_AudioExt.h>
+#include <media/openmax/OMX_VideoExt.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/openmax/OMX_AsString.h>
 
 #include "include/avc_utils.h"
 #include "include/ACodecBufferChannel.h"
 #include "include/DataConverter.h"
 #include "include/SecureBuffer.h"
 #include "include/SharedMemoryBuffer.h"
-#include "omx/OMXUtils.h"
+#include <media/stagefright/omx/OMXUtils.h>
 
 #include <android/hidl/allocator/1.0/IAllocator.h>
 #include <android/hidl/memory/1.0/IMemory.h>
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 99e6d45..19973bd 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -68,11 +68,6 @@
         "avc_utils.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/native/include/media/openmax",
-        "frameworks/native/include/media/hardware",
-    ],
-
     shared_libs: [
         "libaudioutils",
         "libbinder",
diff --git a/media/libstagefright/CallbackDataSource.cpp b/media/libstagefright/CallbackDataSource.cpp
index 4309372..6dfe2de 100644
--- a/media/libstagefright/CallbackDataSource.cpp
+++ b/media/libstagefright/CallbackDataSource.cpp
@@ -127,10 +127,6 @@
 }
 
 ssize_t TinyCacheSource::readAt(off64_t offset, void* data, size_t size) {
-    if (size >= kCacheSize) {
-        return mSource->readAt(offset, data, size);
-    }
-
     // Check if the cache satisfies the read.
     if (mCachedOffset <= offset
             && offset < (off64_t) (mCachedOffset + mCachedSize)) {
@@ -154,6 +150,9 @@
         }
     }
 
+    if (size >= kCacheSize) {
+        return mSource->readAt(offset, data, size);
+    }
 
     // Fill the cache and copy to the caller.
     const ssize_t numRead = mSource->readAt(offset, mCache, kCacheSize);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 941c759..a0d9db0 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -36,6 +36,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
@@ -285,45 +286,6 @@
 
 static const bool kUseHexDump = false;
 
-static void hexdump(const void *_data, size_t size) {
-    const uint8_t *data = (const uint8_t *)_data;
-    size_t offset = 0;
-    while (offset < size) {
-        printf("0x%04zx  ", offset);
-
-        size_t n = size - offset;
-        if (n > 16) {
-            n = 16;
-        }
-
-        for (size_t i = 0; i < 16; ++i) {
-            if (i == 8) {
-                printf(" ");
-            }
-
-            if (offset + i < size) {
-                printf("%02x ", data[offset + i]);
-            } else {
-                printf("   ");
-            }
-        }
-
-        printf(" ");
-
-        for (size_t i = 0; i < n; ++i) {
-            if (isprint(data[offset + i])) {
-                printf("%c", data[offset + i]);
-            } else {
-                printf(".");
-            }
-        }
-
-        printf("\n");
-
-        offset += 16;
-    }
-}
-
 static const char *FourCC2MIME(uint32_t fourcc) {
     switch (fourcc) {
         case FOURCC('m', 'p', '4', 'a'):
@@ -512,14 +474,6 @@
     return track->meta;
 }
 
-static void MakeFourCCString(uint32_t x, char *s) {
-    s[0] = x >> 24;
-    s[1] = (x >> 16) & 0xff;
-    s[2] = (x >> 8) & 0xff;
-    s[3] = x & 0xff;
-    s[4] = '\0';
-}
-
 status_t MPEG4Extractor::readMetaData() {
     if (mInitCheck != NO_INIT) {
         return mInitCheck;
@@ -941,6 +895,12 @@
                 ALOGE("moov: depth %d", depth);
                 return ERROR_MALFORMED;
             }
+
+            if (chunk_type == FOURCC('m', 'o', 'o', 'v') && mInitCheck == OK) {
+                ALOGE("duplicate moov");
+                return ERROR_MALFORMED;
+            }
+
             if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) {
                 // store the offset of the first segment
                 mMoofFound = true;
@@ -961,8 +921,9 @@
                     }
                 }
 
-                if (mLastTrack == NULL)
+                if (mLastTrack == NULL) {
                     return ERROR_MALFORMED;
+                }
 
                 mLastTrack->sampleTable = new SampleTable(mDataSource);
             }
@@ -1014,6 +975,12 @@
                 if (!mLastTrack->meta->findInt32(kKeyTrackID, &trackId)) {
                     mLastTrack->skipTrack = true;
                 }
+
+                status_t err = verifyTrack(mLastTrack);
+                if (err != OK) {
+                    mLastTrack->skipTrack = true;
+                }
+
                 if (mLastTrack->skipTrack) {
                     Track *cur = mFirstTrack;
 
@@ -1033,12 +1000,6 @@
 
                     return OK;
                 }
-
-                status_t err = verifyTrack(mLastTrack);
-
-                if (err != OK) {
-                    return err;
-                }
             } else if (chunk_type == FOURCC('m', 'o', 'o', 'v')) {
                 mInitCheck = OK;
 
@@ -1134,8 +1095,9 @@
             original_fourcc = ntohl(original_fourcc);
             ALOGV("read original format: %d", original_fourcc);
 
-            if (mLastTrack == NULL)
+            if (mLastTrack == NULL) {
                 return ERROR_MALFORMED;
+            }
 
             mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc));
             uint32_t num_channels = 0;
@@ -1575,8 +1537,9 @@
         case FOURCC('s', 't', 'c', 'o'):
         case FOURCC('c', 'o', '6', '4'):
         {
-            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
                 return ERROR_MALFORMED;
+            }
 
             status_t err =
                 mLastTrack->sampleTable->setChunkOffsetParams(
@@ -1612,8 +1575,9 @@
         case FOURCC('s', 't', 's', 'z'):
         case FOURCC('s', 't', 'z', '2'):
         {
-            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
                 return ERROR_MALFORMED;
+            }
 
             status_t err =
                 mLastTrack->sampleTable->setSampleSizeParams(
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 92399f1..93d4f57 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -1860,10 +1860,12 @@
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
             ESDS esds(data, size);
-            if (esds.getCodecSpecificInfo(&data, &size) != OK) {
-                data = NULL;
-                size = 0;
+            if (esds.getCodecSpecificInfo(&data, &size) == OK &&
+                    data != NULL &&
+                    copyCodecSpecificData((uint8_t*)data, size) == OK) {
+                mGotAllCodecSpecificData = true;
             }
+            return;
         }
     }
     if (data != NULL && copyCodecSpecificData((uint8_t *)data, size) == OK) {
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index e31c37c..810b0d6 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -578,6 +578,10 @@
             }
             // First two pages are header pages.
             if (err == ERROR_END_OF_STREAM || mCurrentPage.mPageNo > 2) {
+                if (mBuf != NULL) {
+                    mBuf->release();
+                    mBuf = NULL;
+                }
                 break;
             }
             curGranulePosition = mCurrentPage.mGranulePosition;
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index f0c27ac..4ff2bfe 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -85,7 +85,8 @@
     status_t status;
     if (fd < 0) {
         // couldn't open it locally, maybe the media server can?
-        status = mRetriever->setDataSource(NULL /* httpService */, path);
+        sp<IMediaHTTPService> nullService;
+        status = mRetriever->setDataSource(nullService, path);
     } else {
         status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL);
         close(fd);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 03dc9df..0281563 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -121,12 +121,12 @@
 }
 
 status_t StagefrightMetadataRetriever::setDataSource(
-        const sp<DataSource>& source) {
+        const sp<DataSource>& source, const char *mime) {
     ALOGV("setDataSource(DataSource)");
 
     clearMetadata();
     mSource = source;
-    mExtractor = MediaExtractor::Create(mSource);
+    mExtractor = MediaExtractor::Create(mSource, mime);
 
     if (mExtractor == NULL) {
         ALOGE("Failed to instantiate a MediaExtractor.");
@@ -142,7 +142,9 @@
         const sp<MetaData> &trackMeta,
         const sp<IMediaSource> &source,
         int64_t frameTimeUs,
-        int seekMode) {
+        int seekMode,
+        int /*colorFormat*/,
+        bool /*metaOnly*/) {
 
     sp<MetaData> format = source->getFormat();
 
@@ -422,15 +424,8 @@
                 && trackMeta->findInt32(kKeyDisplayHeight, &height)
                 && frame->mDisplayWidth > 0 && frame->mDisplayHeight > 0
                 && width > 0 && height > 0) {
-            if (frame->mDisplayHeight * (int64_t)width / height > (int64_t)frame->mDisplayWidth) {
-                frame->mDisplayHeight =
-                        (int32_t)(height * (int64_t)frame->mDisplayWidth / width);
-            } else {
-                frame->mDisplayWidth =
-                        (int32_t)(frame->mDisplayHeight * (int64_t)width / height);
-            }
-            ALOGV("thumbNail width and height are overridden to %d x %d",
-                    frame->mDisplayWidth, frame->mDisplayHeight);
+            frame->mDisplayWidth = width;
+            frame->mDisplayHeight = height;
         }
     }
 
@@ -470,9 +465,10 @@
 }
 
 VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
-        int64_t timeUs, int option) {
+        int64_t timeUs, int option, int colorFormat, bool metaOnly) {
 
-    ALOGV("getFrameAtTime: %" PRId64 " us option: %d", timeUs, option);
+    ALOGV("getFrameAtTime: %" PRId64 " us option: %d colorFormat: %d, metaOnly: %d",
+            timeUs, option, colorFormat, metaOnly);
 
     if (mExtractor.get() == NULL) {
         ALOGV("no extractor.");
@@ -540,8 +536,8 @@
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
         const AString &componentName = matchingCodecs[i];
-        VideoFrame *frame =
-            extractVideoFrame(componentName, trackMeta, source, timeUs, option);
+        VideoFrame *frame = extractVideoFrame(
+                componentName, trackMeta, source, timeUs, option, colorFormat, metaOnly);
 
         if (frame != NULL) {
             return frame;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 0aea8e1..a3bda5d 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -1874,5 +1874,13 @@
     return result;
 }
 
+void MakeFourCCString(uint32_t x, char *s) {
+    s[0] = x >> 24;
+    s[1] = (x >> 16) & 0xff;
+    s[2] = (x >> 8) & 0xff;
+    s[3] = x & 0xff;
+    s[4] = '\0';
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index a1cf285..73a3965 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -17,7 +17,7 @@
 #ifndef SOFT_AAC_2_H_
 #define SOFT_AAC_2_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 #include "aacdecoder_lib.h"
 #include "DrcPresModeWrap.h"
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
index 981cbbb..e64c1b7 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
@@ -18,7 +18,7 @@
 
 #define SOFT_AAC_ENCODER_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 struct VO_AUDIO_CODECAPI;
 struct VO_MEM_OPERATOR;
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
index 123fd25..681dcf2 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
@@ -18,7 +18,7 @@
 
 #define SOFT_AAC_ENCODER_2_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 #include "aacenc_lib.h"
 
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
index 758d6ac..869b81d 100644
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
@@ -18,7 +18,7 @@
 
 #define SOFT_AMR_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
index 50178c4..c73e4dd 100644
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
+++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
@@ -18,7 +18,7 @@
 
 #define SOFT_AMRNB_ENCODER_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
index d0c1dab..8950a8c 100644
--- a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
+++ b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
@@ -18,7 +18,7 @@
 
 #define SOFT_AMRWB_ENCODER_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 #include "voAMRWB.h"
 
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
index 18b7556..679ed3e 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
@@ -18,7 +18,7 @@
 
 #define SOFT_H264_DEC_H_
 
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 #include <sys/time.h>
 
 namespace android {
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
index 818e4a1..a43cdf1 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
@@ -21,7 +21,7 @@
 #include <media/stagefright/foundation/ABase.h>
 #include <utils/Vector.h>
 
-#include "SoftVideoEncoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
index c09081d..4a21c34 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
@@ -18,7 +18,7 @@
 #define SOFT_FLAC_DECODER_H
 
 #include "FLACDecoder.h"
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
index 6027f76..f4f0655 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
@@ -18,7 +18,7 @@
 
 #define SOFT_FLAC_ENC_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 #include "FLAC/stream_encoder.h"
 
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.h b/media/libstagefright/codecs/g711/dec/SoftG711.h
index 16b6340..3ece246 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.h
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.h
@@ -18,7 +18,7 @@
 
 #define SOFT_G711_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
index 0303dea..ef86915 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -18,7 +18,7 @@
 
 #define SOFT_GSM_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 extern "C" {
 #include "gsm.h"
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
index e7c2127..5800490 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -18,7 +18,7 @@
 
 #define SOFT_HEVC_H_
 
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 #include <sys/time.h>
 
 namespace android {
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
index 4114e7d..e399ac9 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
@@ -18,7 +18,7 @@
 
 #define SOFT_MPEG4_H_
 
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 
 struct tagvideoDecControls;
 
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index ae8cb6f..00f2dd3 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -19,7 +19,7 @@
 
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/foundation/ABase.h>
-#include "SoftVideoEncoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
 #include "mp4enc_api.h"
 
 
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
index 3bfa6c7..976fd00 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.h
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h
@@ -18,7 +18,7 @@
 
 #define SOFT_MP3_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 struct tPVMP3DecoderExternal;
 
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
index 9a69226..9d5f342 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -789,7 +789,7 @@
 
             if (s_dec_op.u4_output_present) {
                 ssize_t timeStampIdx;
-                outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
 
                 timeStampIdx = getMinTimestampIdx(mTimeStamps, mTimeStampsValid);
                 if (timeStampIdx < 0) {
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
index 6729a54..338fc30 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
@@ -18,7 +18,7 @@
 
 #define SOFT_MPEG2_H_
 
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 #include <sys/time.h>
 
 namespace android {
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index 84cf79c..d6bb902 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -18,7 +18,7 @@
 
 #define SOFT_VPX_H_
 
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 
 #include "vpx/vpx_decoder.h"
 #include "vpx/vpx_codec.h"
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 86dfad7..dd86d36 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -18,7 +18,7 @@
 
 #define SOFT_VPX_ENCODER_H_
 
-#include "SoftVideoEncoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
 
 #include <OMX_VideoExt.h>
 #include <OMX_IndexExt.h>
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index b8c1807..fad988b 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -18,7 +18,7 @@
 
 #define SOFT_AVC_H_
 
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 #include <utils/KeyedVector.h>
 
 #include "H264SwDecApi.h"
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index 97f6561..fab925d 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -23,7 +23,7 @@
 
 #define SOFT_OPUS_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 struct OpusMSDecoder;
 
diff --git a/media/libstagefright/codecs/raw/SoftRaw.h b/media/libstagefright/codecs/raw/SoftRaw.h
index 80906b4..ebc2741 100644
--- a/media/libstagefright/codecs/raw/SoftRaw.h
+++ b/media/libstagefright/codecs/raw/SoftRaw.h
@@ -18,7 +18,7 @@
 
 #define SOFT_RAW_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 struct tPVMP4AudioDecoderExternal;
 
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index 30d137b..52d1632 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -18,7 +18,7 @@
 
 #define SOFT_VORBIS_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
 struct vorbis_dsp_state;
 struct vorbis_info;
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 3ca7cc0..0982006 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -23,6 +23,7 @@
 #include <media/stagefright/MediaErrors.h>
 
 #include "libyuv/convert_from.h"
+#include "libyuv/video_common.h"
 
 #define USE_LIBYUV
 
@@ -41,17 +42,17 @@
 }
 
 bool ColorConverter::isValid() const {
-    if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
-        return false;
-    }
-
     switch (mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar:
+            return mDstFormat == OMX_COLOR_Format16bitRGB565
+                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+                    || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
+
         case OMX_COLOR_FormatCbYCrY:
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
         case OMX_COLOR_FormatYUV420SemiPlanar:
         case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
-            return true;
+            return mDstFormat == OMX_COLOR_Format16bitRGB565;
 
         default:
             return false;
@@ -62,14 +63,43 @@
         void *bits,
         size_t width, size_t height,
         size_t cropLeft, size_t cropTop,
-        size_t cropRight, size_t cropBottom)
+        size_t cropRight, size_t cropBottom,
+        OMX_COLOR_FORMATTYPE colorFromat)
     : mBits(bits),
+      mColorFormat(colorFromat),
       mWidth(width),
       mHeight(height),
       mCropLeft(cropLeft),
       mCropTop(cropTop),
       mCropRight(cropRight),
       mCropBottom(cropBottom) {
+    switch(mColorFormat) {
+    case OMX_COLOR_Format16bitRGB565:
+        mBpp = 2;
+        mStride = 2 * mWidth;
+        break;
+
+    case OMX_COLOR_Format32bitBGRA8888:
+    case OMX_COLOR_Format32BitRGBA8888:
+        mBpp = 4;
+        mStride = 4 * mWidth;
+        break;
+
+    case OMX_COLOR_FormatYUV420Planar:
+    case OMX_COLOR_FormatCbYCrY:
+    case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+    case OMX_COLOR_FormatYUV420SemiPlanar:
+    case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+        mBpp = 1;
+        mStride = mWidth;
+        break;
+
+    default:
+        ALOGE("Unsupported color format %d", mColorFormat);
+        mBpp = 1;
+        mStride = mWidth;
+        break;
+    }
 }
 
 size_t ColorConverter::BitmapParams::cropWidth() const {
@@ -89,19 +119,15 @@
         size_t dstWidth, size_t dstHeight,
         size_t dstCropLeft, size_t dstCropTop,
         size_t dstCropRight, size_t dstCropBottom) {
-    if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
-        return ERROR_UNSUPPORTED;
-    }
-
     BitmapParams src(
             const_cast<void *>(srcBits),
             srcWidth, srcHeight,
-            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom);
+            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
 
     BitmapParams dst(
             dstBits,
             dstWidth, dstHeight,
-            dstCropLeft, dstCropTop, dstCropRight, dstCropBottom);
+            dstCropLeft, dstCropTop, dstCropRight, dstCropBottom, mDstFormat);
 
     status_t err;
 
@@ -212,26 +238,104 @@
         return ERROR_UNSUPPORTED;
     }
 
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
     const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
+        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * src.mHeight
-        + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
+        (const uint8_t *)src.mBits + src.mStride * src.mHeight
+        + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
 
     const uint8_t *src_v =
-        src_u + (src.mWidth / 2) * (src.mHeight / 2);
+        src_u + (src.mStride / 2) * (src.mHeight / 2);
 
+    switch (mDstFormat) {
+    case OMX_COLOR_Format16bitRGB565:
+        libyuv::I420ToRGB565(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+                (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+        break;
 
-    libyuv::I420ToRGB565(src_y, src.mWidth, src_u, src.mWidth / 2, src_v, src.mWidth / 2,
-            (uint8 *)dst_ptr, dst.mWidth * 2, dst.mWidth, dst.mHeight);
+    case OMX_COLOR_Format32BitRGBA8888:
+        libyuv::ConvertFromI420(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+                (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight(), libyuv::FOURCC_ABGR);
+        break;
+
+    case OMX_COLOR_Format32bitBGRA8888:
+        libyuv::ConvertFromI420(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+                (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight(), libyuv::FOURCC_ARGB);
+        break;
+
+    default:
+        return ERROR_UNSUPPORTED;
+    }
 
     return OK;
 }
 
+void ColorConverter::writeToDst(
+        void *dst_ptr, uint8_t *kAdjustedClip, bool uncropped,
+        signed r1, signed g1, signed b1,
+        signed r2, signed g2, signed b2) {
+    switch (mDstFormat) {
+    case OMX_COLOR_Format16bitRGB565:
+    {
+        uint32_t rgb1 =
+            ((kAdjustedClip[r1] >> 3) << 11)
+            | ((kAdjustedClip[g1] >> 2) << 5)
+            | (kAdjustedClip[b1] >> 3);
+
+        if (uncropped) {
+            uint32_t rgb2 =
+                ((kAdjustedClip[r2] >> 3) << 11)
+                | ((kAdjustedClip[g2] >> 2) << 5)
+                | (kAdjustedClip[b2] >> 3);
+
+            *(uint32_t *)dst_ptr = (rgb2 << 16) | rgb1;
+        } else {
+            *(uint16_t *)dst_ptr = rgb1;
+        }
+        break;
+    }
+    case OMX_COLOR_Format32BitRGBA8888:
+    {
+        ((uint32_t *)dst_ptr)[0] =
+                (kAdjustedClip[r1])
+                | (kAdjustedClip[g1] << 8)
+                | (kAdjustedClip[b1] << 16)
+                | (0xFF << 24);
+
+        if (uncropped) {
+            ((uint32_t *)dst_ptr)[1] =
+                    (kAdjustedClip[r2])
+                    | (kAdjustedClip[g2] << 8)
+                    | (kAdjustedClip[b2] << 16)
+                    | (0xFF << 24);
+        }
+        break;
+    }
+    case OMX_COLOR_Format32bitBGRA8888:
+    {
+        ((uint32_t *)dst_ptr)[0] =
+                (kAdjustedClip[b1])
+                | (kAdjustedClip[g1] << 8)
+                | (kAdjustedClip[r1] << 16)
+                | (0xFF << 24);
+
+        if (uncropped) {
+            ((uint32_t *)dst_ptr)[1] =
+                    (kAdjustedClip[b2])
+                    | (kAdjustedClip[g2] << 8)
+                    | (kAdjustedClip[r2] << 16)
+                    | (0xFF << 24);
+        }
+        break;
+    }
+    default:
+        break;
+    }
+}
 status_t ColorConverter::convertYUV420Planar(
         const BitmapParams &src, const BitmapParams &dst) {
     if (!((src.mCropLeft & 1) == 0
@@ -242,18 +346,18 @@
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
     const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
+        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * src.mHeight
-        + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
+        (const uint8_t *)src.mBits + src.mStride * src.mHeight
+        + (src.mCropTop / 2) * (src.mStride / 2) + src.mCropLeft / 2;
 
     const uint8_t *src_v =
-        src_u + (src.mWidth / 2) * (src.mHeight / 2);
+        src_u + (src.mStride / 2) * (src.mHeight / 2);
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
@@ -296,31 +400,19 @@
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
 
-            uint32_t rgb1 =
-                ((kAdjustedClip[r1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[b1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[r2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[b2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
+            bool uncropped = x + 1 < src.cropWidth();
+            (void)writeToDst(dst_ptr + x * dst.mBpp,
+                    kAdjustedClip, uncropped, r1, g1, b1, r2, g2, b2);
         }
 
-        src_y += src.mWidth;
+        src_y += src.mStride;
 
         if (y & 1) {
-            src_u += src.mWidth / 2;
-            src_v += src.mWidth / 2;
+            src_u += src.mStride / 2;
+            src_v += src.mStride / 2;
         }
 
-        dst_ptr += dst.mWidth;
+        dst_ptr += dst.mStride;
     }
 
     return OK;
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 9108ce1..221af1d 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -7,6 +7,9 @@
 cc_library_shared {
     name: "libstagefright_foundation",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
     include_dirs: [
         "frameworks/av/include",
         "frameworks/native/include",
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b7ac718..277eb3e 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -38,9 +38,9 @@
             const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t setDataSource(const sp<DataSource>& source);
+    virtual status_t setDataSource(const sp<DataSource>& source, const char *mime);
 
-    virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option);
+    virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly);
     virtual MediaAlbumArt *extractAlbumArt();
     virtual const char *extractMetadata(int keyCode);
 
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 2aaa884..d6149c0 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -29,7 +29,7 @@
 #include <utils/List.h>
 #include <utils/RefBase.h>
 #include <utils/String16.h>
-#include <MetadataBufferType.h>
+#include <media/hardware/MetadataBufferType.h>
 
 namespace android {
 
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 270c809..7ac9b37 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -49,14 +49,17 @@
                 void *bits,
                 size_t width, size_t height,
                 size_t cropLeft, size_t cropTop,
-                size_t cropRight, size_t cropBottom);
+                size_t cropRight, size_t cropBottom,
+                OMX_COLOR_FORMATTYPE colorFromat);
 
         size_t cropWidth() const;
         size_t cropHeight() const;
 
         void *mBits;
+        OMX_COLOR_FORMATTYPE mColorFormat;
         size_t mWidth, mHeight;
         size_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+        size_t mBpp, mStride;
     };
 
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
@@ -82,6 +85,10 @@
     status_t convertTIYUV420PackedSemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    void writeToDst(void *dst_ptr, uint8_t *kAdjustedClip, bool uncropped,
+            signed r1, signed g1, signed b1,
+            signed r2, signed g2, signed b2);
+
     ColorConverter(const ColorConverter &);
     ColorConverter &operator=(const ColorConverter &);
 };
diff --git a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
index d38c337..d1677fa 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
@@ -25,7 +25,7 @@
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 
-#include <MetadataBufferType.h>
+#include <media/hardware/MetadataBufferType.h>
 
 #include "foundation/ABase.h"
 
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 88a416a..77cbd4c 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -95,7 +95,7 @@
 void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */);
 
 AString nameForFd(int fd);
-
+void MakeFourCCString(uint32_t x, char *s);
 }  // namespace android
 
 #endif  // UTILS_H_
diff --git a/media/libstagefright/omx/1.0/Omx.cpp b/media/libstagefright/omx/1.0/Omx.cpp
index 789379a..dfab3b0 100644
--- a/media/libstagefright/omx/1.0/Omx.cpp
+++ b/media/libstagefright/omx/1.0/Omx.cpp
@@ -19,20 +19,19 @@
 
 #include <android-base/logging.h>
 #include <gui/IGraphicBufferProducer.h>
-#include <OMX_Core.h>
-#include <OMX_AsString.h>
+#include <media/openmax/OMX_Core.h>
+#include <media/openmax/OMX_AsString.h>
 
-#include "../OMXUtils.h"
-#include "../OMXMaster.h"
-#include "../GraphicBufferSource.h"
+#include <media/stagefright/omx/OMXUtils.h>
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/GraphicBufferSource.h>
 
-#include "WOmxNode.h"
-#include "WOmxObserver.h"
-#include "WGraphicBufferProducer.h"
-#include "WGraphicBufferSource.h"
-#include "Conversion.h"
-
-#include "Omx.h"
+#include <media/stagefright/omx/1.0/WOmxNode.h>
+#include <media/stagefright/omx/1.0/WOmxObserver.h>
+#include <media/stagefright/omx/1.0/WGraphicBufferProducer.h>
+#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
+#include <media/stagefright/omx/1.0/Omx.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 0e37af9..a82625a 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -19,8 +19,8 @@
 
 #include <android-base/logging.h>
 
-#include "Conversion.h"
-#include "OmxStore.h"
+#include <media/stagefright/omx/1.0/Conversion.h>
+#include <media/stagefright/omx/1.0/OmxStore.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
index acda060..fcf1092 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
@@ -18,9 +18,9 @@
 
 #include <android-base/logging.h>
 
-#include "WGraphicBufferProducer.h"
-#include "WProducerListener.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WGraphicBufferProducer.h>
+#include <media/stagefright/omx/1.0/WProducerListener.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
 #include <system/window.h>
 
 namespace android {
@@ -64,10 +64,9 @@
     sp<Fence> fence;
     ::android::FrameEventHistoryDelta outTimestamps;
     status_t status = mBase->dequeueBuffer(
-            &slot, &fence,
-            width, height,
-            static_cast<::android::PixelFormat>(format), usage,
-            getFrameTimestamps ? &outTimestamps : nullptr);
+        &slot, &fence, width, height,
+        static_cast<::android::PixelFormat>(format), usage, nullptr,
+        getFrameTimestamps ? &outTimestamps : nullptr);
     hidl_handle tFence;
     FrameEventHistoryDelta tOutTimestamps;
 
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index d8540f8..3697429 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -17,15 +17,14 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "TWGraphicBufferSource"
 
+#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
+#include <media/stagefright/omx/1.0/WOmxNode.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
+#include <media/stagefright/omx/OMXUtils.h>
 #include <android/hardware/media/omx/1.0/IOmxBufferSource.h>
 #include <android/hardware/media/omx/1.0/IOmxNode.h>
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-
-#include "omx/OMXUtils.h"
-#include "WGraphicBufferSource.h"
-#include "WOmxNode.h"
-#include "Conversion.h"
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxBufferSource.cpp b/media/libstagefright/omx/1.0/WOmxBufferSource.cpp
index 803283a..c8c963f 100644
--- a/media/libstagefright/omx/1.0/WOmxBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WOmxBufferSource.cpp
@@ -16,8 +16,8 @@
 
 #include <utils/String8.h>
 
-#include "WOmxBufferSource.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WOmxBufferSource.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxNode.cpp b/media/libstagefright/omx/1.0/WOmxNode.cpp
index 91d1010..9f82283 100644
--- a/media/libstagefright/omx/1.0/WOmxNode.cpp
+++ b/media/libstagefright/omx/1.0/WOmxNode.cpp
@@ -16,9 +16,9 @@
 
 #include <algorithm>
 
-#include "WOmxNode.h"
-#include "WOmxBufferSource.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WOmxNode.h>
+#include <media/stagefright/omx/1.0/WOmxBufferSource.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxObserver.cpp b/media/libstagefright/omx/1.0/WOmxObserver.cpp
index 354db29..ccbe25c 100644
--- a/media/libstagefright/omx/1.0/WOmxObserver.cpp
+++ b/media/libstagefright/omx/1.0/WOmxObserver.cpp
@@ -16,14 +16,14 @@
 
 #define LOG_TAG "WOmxObserver-impl"
 
-#include <vector>
-
 #include <android-base/logging.h>
 #include <cutils/native_handle.h>
 #include <binder/Binder.h>
 
-#include "WOmxObserver.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WOmxObserver.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
+
+#include <vector>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WProducerListener.cpp b/media/libstagefright/omx/1.0/WProducerListener.cpp
index be0d4d5..bdc3aa1 100644
--- a/media/libstagefright/omx/1.0/WProducerListener.cpp
+++ b/media/libstagefright/omx/1.0/WProducerListener.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "WProducerListener.h"
+#include <media/stagefright/omx/1.0/WProducerListener.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index b60ce16..3027cdd 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -25,6 +25,10 @@
         "1.0/WOmxBufferSource.cpp",
     ],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     include_dirs: [
         "frameworks/av/include/media/",
         "frameworks/av/media/libstagefright",
@@ -34,6 +38,14 @@
         "frameworks/native/include/media/openmax",
     ],
 
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    export_header_lib_headers: [
+        "media_plugin_headers",
+    ],
+
     shared_libs: [
         "libbase",
         "libbinder",
@@ -60,7 +72,9 @@
 
     export_shared_lib_headers: [
         "android.hidl.memory@1.0",
-	"libstagefright_xmlparser",
+        "libmedia_omx",
+        "libstagefright_foundation",
+        "libstagefright_xmlparser",
     ],
 
     cflags: [
@@ -85,12 +99,21 @@
 cc_library_static {
     name: "libstagefright_omx_utils",
     srcs: ["OMXUtils.cpp"],
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/native/include/media/hardware",
-        "frameworks/native/include/media/openmax",
+    export_include_dirs: [
+        "include",
     ],
-    shared_libs: ["libmedia"],
+    header_libs: [
+        "media_plugin_headers",
+    ],
+    export_header_lib_headers: [
+        "media_plugin_headers",
+    ],
+    shared_libs: [
+        "libmedia",
+    ],
+    export_shared_lib_headers: [
+        "libmedia",
+    ],
     sanitize: {
         misc_undefined: [
             "signed-integer-overflow",
diff --git a/media/libstagefright/omx/BWGraphicBufferSource.cpp b/media/libstagefright/omx/BWGraphicBufferSource.cpp
index 79f6d93..94ef598 100644
--- a/media/libstagefright/omx/BWGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/BWGraphicBufferSource.cpp
@@ -17,15 +17,13 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "BWGraphicBufferSource"
 
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-
+#include <media/stagefright/omx/BWGraphicBufferSource.h>
+#include <media/stagefright/omx/OMXUtils.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
 #include <media/OMXBuffer.h>
 #include <media/IOMX.h>
 
-#include "OMXUtils.h"
-#include "BWGraphicBufferSource.h"
-
 namespace android {
 
 static const OMX_U32 kPortIndexInput = 0;
diff --git a/media/libstagefright/omx/FrameDropper.cpp b/media/libstagefright/omx/FrameDropper.cpp
index 9a4952e..0c50c58 100644
--- a/media/libstagefright/omx/FrameDropper.cpp
+++ b/media/libstagefright/omx/FrameDropper.cpp
@@ -18,8 +18,7 @@
 #define LOG_TAG "FrameDropper"
 #include <utils/Log.h>
 
-#include "FrameDropper.h"
-
+#include <media/stagefright/omx/FrameDropper.h>
 #include <media/stagefright/foundation/ADebug.h>
 
 namespace android {
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index ef4d745..caf3ac8 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -22,7 +22,9 @@
 
 #define STRINGIFY_ENUMS // for asString in HardwareAPI.h/VideoAPI.h
 
-#include "GraphicBufferSource.h"
+#include <media/stagefright/omx/GraphicBufferSource.h>
+#include <media/stagefright/omx/FrameDropper.h>
+#include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/ColorUtils.h>
@@ -31,14 +33,12 @@
 #include <media/hardware/MetadataBufferType.h>
 #include <ui/GraphicBuffer.h>
 #include <gui/BufferItem.h>
-#include <HardwareAPI.h>
-#include "omx/OMXUtils.h"
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-#include "media/OMXBuffer.h"
+#include <media/hardware/HardwareAPI.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/OMXBuffer.h>
 
 #include <inttypes.h>
-#include "FrameDropper.h"
 
 #include <functional>
 #include <memory>
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 8c1141d..93b4dbe 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -22,15 +22,12 @@
 
 #include <dlfcn.h>
 
-#include "../include/OMX.h"
-
-#include "../include/OMXNodeInstance.h"
-
+#include <media/stagefright/omx/OMX.h>
+#include <media/stagefright/omx/OMXNodeInstance.h>
+#include <media/stagefright/omx/BWGraphicBufferSource.h>
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include "BWGraphicBufferSource.h"
-
-#include "OMXMaster.h"
-#include "OMXUtils.h"
 
 namespace android {
 
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index ac9b0c3..fd97fdc 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -18,15 +18,13 @@
 #define LOG_TAG "OMXMaster"
 #include <utils/Log.h>
 
-#include "OMXMaster.h"
-
-#include "SoftOMXPlugin.h"
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/foundation/ADebug.h>
 
 #include <dlfcn.h>
 #include <fcntl.h>
 
-#include <media/stagefright/foundation/ADebug.h>
-
 namespace android {
 
 OMXMaster::OMXMaster()
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bc4ce9d..c749454 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -20,20 +20,20 @@
 
 #include <inttypes.h>
 
-#include "../include/OMXNodeInstance.h"
-#include "OMXMaster.h"
-#include "OMXUtils.h"
+#include <media/stagefright/omx/OMXNodeInstance.h>
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXUtils.h>
 #include <android/IOMXBufferSource.h>
 
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-#include <OMX_VideoExt.h>
-#include <OMX_AsString.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/openmax/OMX_VideoExt.h>
+#include <media/openmax/OMX_AsString.h>
 
 #include <binder/IMemory.h>
 #include <cutils/properties.h>
 #include <gui/BufferQueue.h>
-#include <HardwareAPI.h>
+#include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ColorUtils.h>
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index a66d565..5894837 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -19,13 +19,13 @@
 
 #include <string.h>
 
-#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaErrors.h>
+#include <media/hardware/HardwareAPI.h>
 #include <media/MediaDefs.h>
 #include <system/graphics-base.h>
-#include "OMXUtils.h"
 
 namespace android {
 
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 761b425..09e6d75 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -18,8 +18,7 @@
 #define LOG_TAG "SimpleSoftOMXComponent"
 #include <utils/Log.h>
 
-#include "include/SimpleSoftOMXComponent.h"
-
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libstagefright/omx/SoftOMXComponent.cpp b/media/libstagefright/omx/SoftOMXComponent.cpp
index df978f8..ee269e1 100644
--- a/media/libstagefright/omx/SoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftOMXComponent.cpp
@@ -18,8 +18,7 @@
 #define LOG_TAG "SoftOMXComponent"
 #include <utils/Log.h>
 
-#include "include/SoftOMXComponent.h"
-
+#include <media/stagefright/omx/SoftOMXComponent.h>
 #include <media/stagefright/foundation/ADebug.h>
 
 namespace android {
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 0bc65e1..4946ada 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -18,8 +18,8 @@
 #define LOG_TAG "SoftOMXPlugin"
 #include <utils/Log.h>
 
-#include "SoftOMXPlugin.h"
-#include "include/SoftOMXComponent.h"
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/omx/SoftOMXComponent.h>
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AString.h>
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 920dd18..24ed981 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -20,14 +20,14 @@
 #define LOG_TAG "SoftVideoDecoderOMXComponent"
 #include <utils/Log.h>
 
-#include "include/SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
 
-#include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/MediaDefs.h>
 
 namespace android {
 
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 7ecfbbb..f33bdc0 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -21,25 +21,22 @@
 #include <utils/Log.h>
 #include <utils/misc.h>
 
-#include "include/SoftVideoEncoderOMXComponent.h"
-
-#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/MediaDefs.h>
 
 #include <ui/Fence.h>
 #include <ui/GraphicBufferMapper.h>
 #include <ui/Rect.h>
 
 #include <hardware/gralloc.h>
-
 #include <nativebase/nativebase.h>
 
-#include <OMX_IndexExt.h>
-
 namespace android {
 
 const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = {
diff --git a/media/libstagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
similarity index 99%
rename from media/libstagefright/omx/1.0/Conversion.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index fd91574..f319bdc 100644
--- a/media/libstagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -29,12 +29,12 @@
 #include <binder/Binder.h>
 #include <binder/Status.h>
 #include <ui/FenceTime.h>
-#include <media/OMXFenceParcelable.h>
 #include <cutils/native_handle.h>
 #include <gui/IGraphicBufferProducer.h>
 
+#include <media/OMXFenceParcelable.h>
 #include <media/OMXBuffer.h>
-#include <VideoAPI.h>
+#include <media/hardware/VideoAPI.h>
 
 #include <android/hidl/memory/1.0/IMemory.h>
 #include <android/hardware/graphics/bufferqueue/1.0/IProducerListener.h>
diff --git a/media/libstagefright/omx/1.0/Omx.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
similarity index 97%
rename from media/libstagefright/omx/1.0/Omx.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
index 5fdf38e..a6a9d3e 100644
--- a/media/libstagefright/omx/1.0/Omx.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
@@ -20,10 +20,9 @@
 #include <hidl/MQDescriptor.h>
 #include <hidl/Status.h>
 
-#include "../../include/OMXNodeInstance.h"
-
-#include <android/hardware/media/omx/1.0/IOmx.h>
+#include <media/stagefright/omx/OMXNodeInstance.h>
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+#include <android/hardware/media/omx/1.0/IOmx.h>
 
 namespace android {
 
diff --git a/media/libstagefright/omx/1.0/OmxStore.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
similarity index 100%
rename from media/libstagefright/omx/1.0/OmxStore.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferProducer.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WGraphicBufferProducer.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferSource.h
similarity index 98%
rename from media/libstagefright/omx/1.0/WGraphicBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferSource.h
index 4549c97..b9f22ab 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferSource.h
@@ -28,7 +28,7 @@
 
 #include <android/BnGraphicBufferSource.h>
 
-#include "../GraphicBufferSource.h"
+#include <media/stagefright/omx/GraphicBufferSource.h>
 
 namespace android {
 namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxBufferSource.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WOmxBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxBufferSource.h
diff --git a/media/libstagefright/omx/1.0/WOmxNode.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxNode.h
similarity index 98%
rename from media/libstagefright/omx/1.0/WOmxNode.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxNode.h
index d715374..38d5885 100644
--- a/media/libstagefright/omx/1.0/WOmxNode.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxNode.h
@@ -22,7 +22,7 @@
 
 #include <utils/Errors.h>
 
-#include "../../include/OMXNodeInstance.h"
+#include <media/stagefright/omx/OMXNodeInstance.h>
 
 #include <android/hardware/media/omx/1.0/IOmxNode.h>
 #include <android/hardware/media/omx/1.0/IOmxObserver.h>
diff --git a/media/libstagefright/omx/1.0/WOmxObserver.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxObserver.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WOmxObserver.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxObserver.h
diff --git a/media/libstagefright/omx/1.0/WProducerListener.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WProducerListener.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WProducerListener.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WProducerListener.h
diff --git a/media/libstagefright/omx/BWGraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/BWGraphicBufferSource.h
similarity index 100%
rename from media/libstagefright/omx/BWGraphicBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/BWGraphicBufferSource.h
diff --git a/media/libstagefright/omx/FrameDropper.h b/media/libstagefright/omx/include/media/stagefright/omx/FrameDropper.h
similarity index 100%
rename from media/libstagefright/omx/FrameDropper.h
rename to media/libstagefright/omx/include/media/stagefright/omx/FrameDropper.h
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/GraphicBufferSource.h
similarity index 100%
rename from media/libstagefright/omx/GraphicBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/GraphicBufferSource.h
diff --git a/media/libstagefright/omx/IOmxNodeWrapper.h b/media/libstagefright/omx/include/media/stagefright/omx/IOmxNodeWrapper.h
similarity index 100%
rename from media/libstagefright/omx/IOmxNodeWrapper.h
rename to media/libstagefright/omx/include/media/stagefright/omx/IOmxNodeWrapper.h
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/omx/include/media/stagefright/omx/OMX.h
similarity index 100%
rename from media/libstagefright/include/OMX.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMX.h
diff --git a/media/libstagefright/omx/OMXMaster.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
similarity index 100%
rename from media/libstagefright/omx/OMXMaster.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
similarity index 100%
rename from media/libstagefright/include/OMXNodeInstance.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
diff --git a/media/libstagefright/omx/OMXUtils.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXUtils.h
similarity index 100%
rename from media/libstagefright/omx/OMXUtils.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMXUtils.h
diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SimpleSoftOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
diff --git a/media/libstagefright/include/SoftOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SoftOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftOMXComponent.h
diff --git a/media/libstagefright/omx/SoftOMXPlugin.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftOMXPlugin.h
similarity index 100%
rename from media/libstagefright/omx/SoftOMXPlugin.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftOMXPlugin.h
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SoftVideoDecoderOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoEncoderOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SoftVideoEncoderOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftVideoEncoderOMXComponent.h
diff --git a/media/libstagefright/omx/tests/FrameDropper_test.cpp b/media/libstagefright/omx/tests/FrameDropper_test.cpp
index f966b5e..a925da6 100644
--- a/media/libstagefright/omx/tests/FrameDropper_test.cpp
+++ b/media/libstagefright/omx/tests/FrameDropper_test.cpp
@@ -20,7 +20,7 @@
 
 #include <gtest/gtest.h>
 
-#include "FrameDropper.h"
+#include <media/stagefright/omx/FrameDropper.h>
 #include <media/stagefright/foundation/ADebug.h>
 
 namespace android {
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index c50af2f..23fd7ab 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -719,9 +719,22 @@
 }
 
 int MtpFfsHandle::sendEvent(mtp_event me) {
+    // Mimic the behavior of f_mtp by sending the event async.
+    // Events aren't critical to the connection, so we don't need to check the return value.
+    char *temp = new char[me.length];
+    memcpy(temp, me.data, me.length);
+    me.data = temp;
+    std::thread t([&me](MtpFfsHandle *h) { return h->doSendEvent(me); }, this);
+    t.detach();
+    return 0;
+}
+
+void MtpFfsHandle::doSendEvent(mtp_event me) {
     unsigned length = me.length;
-    int ret = writeHandle(mIntr, me.data, length);
-    return static_cast<unsigned>(ret) == length ? 0 : -1;
+    int ret = ::write(mIntr, me.data, length);
+    delete[] reinterpret_cast<char*>(me.data);
+    if (static_cast<unsigned>(ret) != length)
+        PLOG(ERROR) << "Mtp error sending event thread!";
 }
 
 } // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index 98669ff..b637d65 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -33,6 +33,7 @@
     bool initFunctionfs();
     void closeConfig();
     void closeEndpoints();
+    void doSendEvent(mtp_event me);
 
     bool mPtp;
 
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 5d1a20b..a450dd3 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -44,7 +44,11 @@
 const char* AImageReader::kGraphicBufferKey = "GraphicBuffer";
 
 bool
-AImageReader::isSupportedFormat(int32_t format) {
+AImageReader::isSupportedFormatAndUsage(int32_t format, uint64_t usage) {
+    // Check whether usage has either CPU_READ_OFTEN or CPU_READ set. Note that check against
+    // AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN (0x6) is sufficient as it implies
+    // AHARDWAREBUFFER_USAGE_CPU_READ (0x2).
+    bool hasCpuUsage = usage & AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN;
     switch (format) {
         case AIMAGE_FORMAT_RGBA_8888:
         case AIMAGE_FORMAT_RGBX_8888:
@@ -60,6 +64,9 @@
         case AIMAGE_FORMAT_DEPTH16:
         case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
             return true;
+        case AIMAGE_FORMAT_PRIVATE:
+            // For private format, cpu usage is prohibited.
+            return !hasCpuUsage;
         default:
             return false;
     }
@@ -83,6 +90,8 @@
         case AIMAGE_FORMAT_DEPTH16:
         case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
             return 1;
+        case AIMAGE_FORMAT_PRIVATE:
+            return 0;
         default:
             return -1;
     }
@@ -606,9 +615,9 @@
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
-    if (!AImageReader::isSupportedFormat(format)) {
-        ALOGE("%s: format %d is not supported by AImageReader",
-                __FUNCTION__, format);
+    if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
+        ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
+                __FUNCTION__, format, usage);
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 989c1fd..989b937 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -49,7 +49,7 @@
 
 struct AImageReader : public RefBase {
   public:
-    static bool isSupportedFormat(int32_t format);
+    static bool isSupportedFormatAndUsage(int32_t format, uint64_t usage0);
     static int getNumPlanesForFormat(int32_t format);
 
     AImageReader(int32_t width,
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index d7443be..1931496 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -495,7 +495,13 @@
     /**
      * Android private opaque image format.
      *
-     * <p>This format is not currently supported by {@link AImageReader}.</p>
+     * <p>The choices of the actual format and pixel data layout are entirely up to the
+     * device-specific and framework internal implementations, and may vary depending on use cases
+     * even for the same device. Also note that the contents of these buffers are not directly
+     * accessible to the application.</p>
+     *
+     * <p>When an {@link AImage} of this format is obtained from an {@link AImageReader} or
+     * {@link AImage_getNumberOfPlanes()} method will return zero.</p>
      */
     AIMAGE_FORMAT_PRIVATE           = 0x22
 };
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 59ae507..7a0c17b 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -70,7 +70,9 @@
  * @param height The default height in pixels of the Images that this reader will produce.
  * @param format The format of the Image that this reader will produce. This must be one of the
  *            AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}. Note that not all
- *            formats are supported, like {@link AIMAGE_FORMAT_PRIVATE}.
+ *            formats are supported. One example is {@link AIMAGE_FORMAT_PRIVATE}, as it is not
+ *            intended to be read by applications directly. That format is supported by
+ *            {@link AImageReader_newWithUsage} introduced in API 26.
  * @param maxImages The maximum number of images the user will want to access simultaneously. This
  *            should be as small as possible to limit memory use. Once maxImages Images are obtained
  *            by the user, one of them has to be released before a new {@link AImage} will become
@@ -307,6 +309,28 @@
  * for the consumer usage. All other parameters and the return values are identical to those passed
  * to {@line AImageReader_new}.
  *
+ * <p>If the {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, the created {@link AImageReader}
+ * will produce images whose contents are not directly accessible by the application. The application can
+ * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
+ * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
+ * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
+ * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
+ * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
+ * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
+ * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * transporting textures that may be shared cross-process.</p>
+ * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
+ * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
+ * AImageReader}s using other format such as {@link AIMAGE_FORMAT_YUV_420_888}.</p>
+ *
+ * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
+ * especially if {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, {@code usage} must not include either
+ * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param format The format of the Image that this reader will produce. This must be one of the
+ *            AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}.
  * @param usage specifies how the consumer will access the AImage, using combination of the
  *            AHARDWAREBUFFER_USAGE flags described in {@link hardware_buffer.h}.
  *            Passing {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN} is equivalent to calling
@@ -331,6 +355,11 @@
  *   {@link AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE}, or combined</td>
  * </tr>
  * </table>
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ *                 height, format, maxImages, or usage arguments is not supported.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
  *
  * @see AImage
  * @see AImageReader_new
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 9023b2d..63898a0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -604,7 +604,7 @@
         virtual status_t standby();
 
     private:
-        sp<MmapThread> mThread;
+        const sp<MmapThread> mThread;
     };
 
               ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 8efcce6..8c4531a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7503,34 +7503,22 @@
 AudioFlinger::MmapThreadHandle::MmapThreadHandle(const sp<MmapThread>& thread)
     : mThread(thread)
 {
+    assert(thread != 0); // thread must start non-null and stay non-null
 }
 
 AudioFlinger::MmapThreadHandle::~MmapThreadHandle()
 {
-    MmapThread *thread = mThread.get();
-    // clear our strong reference before disconnecting the thread: the last strong reference
-    // will be removed when closeInput/closeOutput is executed upon call from audio policy manager
-    // and the thread removed from mMMapThreads list causing the thread destruction.
-    mThread.clear();
-    if (thread != nullptr) {
-        thread->disconnect();
-    }
+    mThread->disconnect();
 }
 
 status_t AudioFlinger::MmapThreadHandle::createMmapBuffer(int32_t minSizeFrames,
                                   struct audio_mmap_buffer_info *info)
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->createMmapBuffer(minSizeFrames, info);
 }
 
 status_t AudioFlinger::MmapThreadHandle::getMmapPosition(struct audio_mmap_position *position)
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->getMmapPosition(position);
 }
 
@@ -7538,25 +7526,16 @@
         audio_port_handle_t *handle)
 
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->start(client, handle);
 }
 
 status_t AudioFlinger::MmapThreadHandle::stop(audio_port_handle_t handle)
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->stop(handle);
 }
 
 status_t AudioFlinger::MmapThreadHandle::standby()
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->standby();
 }
 
@@ -7588,7 +7567,7 @@
     for (const sp<MmapTrack> &t : mActiveTracks) {
         stop(t->portId());
     }
-    // this will cause the destruction of this thread.
+    // This will decrement references and may cause the destruction of this thread.
     if (isOutput()) {
         AudioSystem::releaseOutput(mId, streamType(), mSessionId);
     } else {
@@ -7877,17 +7856,34 @@
 {
     AudioParameter param = AudioParameter(keyValuePair);
     int value;
+    bool sendToHal = true;
     if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) {
+        audio_devices_t device = (audio_devices_t)value;
         // forward device change to effects that have requested to be
         // aware of attached audio device.
-        if (value != AUDIO_DEVICE_NONE) {
-            mOutDevice = value;
+        if (device != AUDIO_DEVICE_NONE) {
             for (size_t i = 0; i < mEffectChains.size(); i++) {
-                mEffectChains[i]->setDevice_l(mOutDevice);
+                mEffectChains[i]->setDevice_l(device);
             }
         }
+        if (audio_is_output_devices(device)) {
+            mOutDevice = device;
+            if (!isOutput()) {
+                sendToHal = false;
+            }
+        } else {
+            mInDevice = device;
+            if (device != AUDIO_DEVICE_NONE) {
+                mPrevInDevice = value;
+            }
+            // TODO: implement and call checkBtNrec_l();
+        }
     }
-    status = mHalStream->setParameters(keyValuePair);
+    if (sendToHal) {
+        status = mHalStream->setParameters(keyValuePair);
+    } else {
+        status = NO_ERROR;
+    }
 
     return false;
 }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 55c364f..78f195d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -54,6 +54,11 @@
 //FIXME: workaround for truncated touch sounds
 // to be removed when the problem is handled by system UI
 #define TOUCH_SOUND_FIXED_DELAY_MS 100
+
+// Largest difference in dB on earpiece in call between the voice volume and another
+// media / notification / system volume.
+constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
+
 // ----------------------------------------------------------------------------
 // AudioPolicyInterface implementation
 // ----------------------------------------------------------------------------
@@ -933,7 +938,6 @@
     if (stream == AUDIO_STREAM_TTS) {
         flags = AUDIO_OUTPUT_FLAG_TTS;
     } else if (stream == AUDIO_STREAM_VOICE_CALL &&
-               getPhoneState() == AUDIO_MODE_IN_COMMUNICATION &&
                audio_is_linear_pcm(format)) {
         flags = (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_VOIP_RX |
                                        AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1647,7 +1651,6 @@
             halInputSource = AUDIO_SOURCE_VOICE_RECOGNITION;
         }
     } else if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION &&
-               getPhoneState() == AUDIO_MODE_IN_COMMUNICATION &&
                audio_is_linear_pcm(format)) {
         flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_VOIP_TX);
     }
@@ -5350,6 +5353,30 @@
         return ringVolumeDB - 4 > volumeDB ? ringVolumeDB - 4 : volumeDB;
     }
 
+    // in-call: always cap earpiece volume by voice volume + some low headroom
+    if ((stream != AUDIO_STREAM_VOICE_CALL) && (device & AUDIO_DEVICE_OUT_EARPIECE) && isInCall()) {
+        switch (stream) {
+        case AUDIO_STREAM_SYSTEM:
+        case AUDIO_STREAM_RING:
+        case AUDIO_STREAM_MUSIC:
+        case AUDIO_STREAM_ALARM:
+        case AUDIO_STREAM_NOTIFICATION:
+        case AUDIO_STREAM_ENFORCED_AUDIBLE:
+        case AUDIO_STREAM_DTMF:
+        case AUDIO_STREAM_ACCESSIBILITY: {
+            const float maxVoiceVolDb = computeVolume(AUDIO_STREAM_VOICE_CALL, index, device)
+                    + IN_CALL_EARPIECE_HEADROOM_DB;
+            if (volumeDB > maxVoiceVolDb) {
+                ALOGV("computeVolume() stream %d at vol=%f overriden by stream %d at vol=%f",
+                        stream, volumeDB, AUDIO_STREAM_VOICE_CALL, maxVoiceVolDb);
+                volumeDB = maxVoiceVolDb;
+            }
+            } break;
+        default:
+            break;
+        }
+    }
+
     // if a headset is connected, apply the following rules to ring tones and notifications
     // to avoid sound level bursts in user's ears:
     // - always attenuate notifications volume by 6dB
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 0429e7f..fc5a28e 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -313,11 +313,13 @@
 
 binder::Status CameraDeviceClient::beginConfigure() {
     // TODO: Implement this.
+    ATRACE_CALL();
     ALOGV("%s: Not implemented yet.", __FUNCTION__);
     return binder::Status::ok();
 }
 
 binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
+    ATRACE_CALL();
     ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
             __FUNCTION__, mInputStream.configured ? 1 : 0,
             mStreamMap.size());
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 2bf73a0..e8fc080 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -85,7 +85,7 @@
 
     virtual binder::Status endConfigure(int operatingMode) override;
 
-    // Returns -EBUSY if device is not idle
+    // Returns -EBUSY if device is not idle or in error state
     virtual binder::Status deleteStream(int streamId) override;
 
     virtual binder::Status createStream(
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0fc3740..89d2c65 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -240,6 +240,7 @@
 
     status_t res = OK;
     std::vector<wp<Camera3StreamInterface>> streams;
+    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     {
         Mutex::Autolock l(mLock);
         if (mStatus == STATUS_UNINITIALIZED) return res;
@@ -251,7 +252,6 @@
                 SET_ERR_L("Can't stop streaming");
                 // Continue to close device even in case of error
             } else {
-                nsecs_t maxExpectedDuration = getExpectedInFlightDurationLocked();
                 res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
                 if (res != OK) {
                     SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
@@ -311,10 +311,10 @@
 
     {
         Mutex::Autolock l(mLock);
-        mExpectedInflightDuration = 0;
         mInterface->clear();
         mOutputStreams.clear();
         mInputStream.clear();
+        mDeletedStreams.clear();
         mBufferManager.clear();
         internalUpdateStatusLocked(STATUS_UNINITIALIZED);
     }
@@ -1141,6 +1141,7 @@
         uint32_t width, uint32_t height, int format, int *id) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
+    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new input stream %d: %d x %d, format %d",
             mId.string(), mNextStreamId, width, height, format);
@@ -1161,7 +1162,7 @@
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            res = internalPauseAndWaitLocked();
+            res = internalPauseAndWaitLocked(maxExpectedDuration);
             if (res != OK) {
                 SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
@@ -1228,6 +1229,7 @@
         int streamSetId, bool isShared, uint32_t consumerUsage) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
+    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
             " consumer usage 0x%x, isShared %d", mId.string(), mNextStreamId, width, height, format,
@@ -1249,7 +1251,7 @@
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            res = internalPauseAndWaitLocked();
+            res = internalPauseAndWaitLocked(maxExpectedDuration);
             if (res != OK) {
                 SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
@@ -1428,6 +1430,12 @@
         return -EBUSY;
     }
 
+    if (mStatus == STATUS_ERROR) {
+        ALOGW("%s: Camera %s: deleteStream not allowed in ERROR state",
+                __FUNCTION__, mId.string());
+        return -EBUSY;
+    }
+
     sp<Camera3StreamInterface> deletedStream;
     ssize_t outputStreamIdx = mOutputStreams.indexOfKey(id);
     if (mInputStream != NULL && id == mInputStream->getId()) {
@@ -1470,6 +1478,7 @@
 
 status_t Camera3Device::getInputBufferProducer(
         sp<IGraphicBufferProducer> *producer) {
+    ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -1494,59 +1503,66 @@
     }
 
     Mutex::Autolock il(mInterfaceLock);
-    Mutex::Autolock l(mLock);
 
-    switch (mStatus) {
-        case STATUS_ERROR:
-            CLOGE("Device has encountered a serious error");
-            return INVALID_OPERATION;
-        case STATUS_UNINITIALIZED:
-            CLOGE("Device is not initialized!");
-            return INVALID_OPERATION;
-        case STATUS_UNCONFIGURED:
-        case STATUS_CONFIGURED:
-        case STATUS_ACTIVE:
-            // OK
-            break;
-        default:
-            SET_ERR_L("Unexpected status: %d", mStatus);
-            return INVALID_OPERATION;
-    }
+    {
+        Mutex::Autolock l(mLock);
+        switch (mStatus) {
+            case STATUS_ERROR:
+                CLOGE("Device has encountered a serious error");
+                return INVALID_OPERATION;
+            case STATUS_UNINITIALIZED:
+                CLOGE("Device is not initialized!");
+                return INVALID_OPERATION;
+            case STATUS_UNCONFIGURED:
+            case STATUS_CONFIGURED:
+            case STATUS_ACTIVE:
+                // OK
+                break;
+            default:
+                SET_ERR_L("Unexpected status: %d", mStatus);
+                return INVALID_OPERATION;
+        }
 
-    if (!mRequestTemplateCache[templateId].isEmpty()) {
-        *request = mRequestTemplateCache[templateId];
-        return OK;
+        if (!mRequestTemplateCache[templateId].isEmpty()) {
+            *request = mRequestTemplateCache[templateId];
+            return OK;
+        }
     }
 
     camera_metadata_t *rawRequest;
     status_t res = mInterface->constructDefaultRequestSettings(
             (camera3_request_template_t) templateId, &rawRequest);
-    if (res == BAD_VALUE) {
-        ALOGI("%s: template %d is not supported on this camera device",
-              __FUNCTION__, templateId);
-        return res;
-    } else if (res != OK) {
-        CLOGE("Unable to construct request template %d: %s (%d)",
-                templateId, strerror(-res), res);
-        return res;
+
+    {
+        Mutex::Autolock l(mLock);
+        if (res == BAD_VALUE) {
+            ALOGI("%s: template %d is not supported on this camera device",
+                  __FUNCTION__, templateId);
+            return res;
+        } else if (res != OK) {
+            CLOGE("Unable to construct request template %d: %s (%d)",
+                    templateId, strerror(-res), res);
+            return res;
+        }
+
+        set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
+        mRequestTemplateCache[templateId].acquire(rawRequest);
+
+        *request = mRequestTemplateCache[templateId];
     }
-
-    set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
-    mRequestTemplateCache[templateId].acquire(rawRequest);
-
-    *request = mRequestTemplateCache[templateId];
     return OK;
 }
 
 status_t Camera3Device::waitUntilDrained() {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
+    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
 
-    return waitUntilDrainedLocked();
+    return waitUntilDrainedLocked(maxExpectedDuration);
 }
 
-status_t Camera3Device::waitUntilDrainedLocked() {
+status_t Camera3Device::waitUntilDrainedLocked(nsecs_t maxExpectedDuration) {
     switch (mStatus) {
         case STATUS_UNINITIALIZED:
         case STATUS_UNCONFIGURED:
@@ -1562,9 +1578,6 @@
             SET_ERR_L("Unexpected status: %d",mStatus);
             return INVALID_OPERATION;
     }
-
-    nsecs_t maxExpectedDuration = getExpectedInFlightDurationLocked();
-
     ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.string(),
             maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
@@ -1583,11 +1596,10 @@
 }
 
 // Pause to reconfigure
-status_t Camera3Device::internalPauseAndWaitLocked() {
+status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
     mRequestThread->setPaused(true);
     mPauseStateNotify = true;
 
-    nsecs_t maxExpectedDuration = getExpectedInFlightDurationLocked();
     ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
           maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
@@ -1680,6 +1692,7 @@
 }
 
 status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
+    ATRACE_CALL();
     status_t res;
     Mutex::Autolock l(mOutputLock);
 
@@ -1873,6 +1886,7 @@
  */
 
 void Camera3Device::notifyStatus(bool idle) {
+    ATRACE_CALL();
     {
         // Need mLock to safely update state and synchronize to current
         // state of methods in flight.
@@ -2306,6 +2320,7 @@
 }
 
 void Camera3Device::setErrorState(const char *fmt, ...) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
     va_list args;
     va_start(args, fmt);
@@ -2316,6 +2331,7 @@
 }
 
 void Camera3Device::setErrorStateV(const char *fmt, va_list args) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
     setErrorStateLockedV(fmt, args);
 }
@@ -2339,7 +2355,9 @@
 
     mErrorCause = errorCause;
 
-    mRequestThread->setPaused(true);
+    if (mRequestThread != nullptr) {
+        mRequestThread->setPaused(true);
+    }
     internalUpdateStatusLocked(STATUS_ERROR);
 
     // Notify upstream about a device error
@@ -2377,9 +2395,7 @@
         }
     }
 
-    Mutex::Autolock ml(mLock);
     mExpectedInflightDuration += maxExpectedDuration;
-
     return OK;
 }
 
@@ -2400,6 +2416,7 @@
 }
 
 void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+    ATRACE_CALL();
     nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
     mInFlightMap.removeItemsAt(idx, 1);
 
@@ -2411,7 +2428,6 @@
             mStatusTracker->markComponentIdle(mInFlightStatusId, Fence::NO_FENCE);
         }
     }
-    Mutex::Autolock l(mLock);
     mExpectedInflightDuration -= duration;
 }
 
@@ -2485,6 +2501,7 @@
 }
 
 void Camera3Device::flushInflightRequests() {
+    ATRACE_CALL();
     { // First return buffers cached in mInFlightMap
         Mutex::Autolock l(mInFlightLock);
         for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
@@ -2493,6 +2510,7 @@
                 request.pendingOutputBuffers.size(), 0);
         }
         mInFlightMap.clear();
+        mExpectedInflightDuration = 0;
     }
 
     // Then return all inflight buffers not returned by HAL
@@ -2516,16 +2534,60 @@
         streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
         streamBuffer.acquire_fence = -1;
         streamBuffer.release_fence = -1;
+
+        // First check if the buffer belongs to deleted stream
+        bool streamDeleted = false;
+        for (auto& stream : mDeletedStreams) {
+            if (streamId == stream->getId()) {
+                streamDeleted = true;
+                // Return buffer to deleted stream
+                camera3_stream* halStream = stream->asHalStream();
+                streamBuffer.stream = halStream;
+                switch (halStream->stream_type) {
+                    case CAMERA3_STREAM_OUTPUT:
+                        res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0);
+                        if (res != OK) {
+                            ALOGE("%s: Can't return output buffer for frame %d to"
+                                  " stream %d: %s (%d)",  __FUNCTION__,
+                                  frameNumber, streamId, strerror(-res), res);
+                        }
+                        break;
+                    case CAMERA3_STREAM_INPUT:
+                        res = stream->returnInputBuffer(streamBuffer);
+                        if (res != OK) {
+                            ALOGE("%s: Can't return input buffer for frame %d to"
+                                  " stream %d: %s (%d)",  __FUNCTION__,
+                                  frameNumber, streamId, strerror(-res), res);
+                        }
+                        break;
+                    default: // Bi-direcitonal stream is deprecated
+                        ALOGE("%s: stream %d has unknown stream type %d",
+                                __FUNCTION__, streamId, halStream->stream_type);
+                        break;
+                }
+                break;
+            }
+        }
+        if (streamDeleted) {
+            continue;
+        }
+
+        // Then check against configured streams
         if (streamId == inputStreamId) {
             streamBuffer.stream = mInputStream->asHalStream();
             res = mInputStream->returnInputBuffer(streamBuffer);
             if (res != OK) {
                 ALOGE("%s: Can't return input buffer for frame %d to"
-                      "  its stream:%s (%d)",  __FUNCTION__,
-                      frameNumber, strerror(-res), res);
+                      " stream %d: %s (%d)",  __FUNCTION__,
+                      frameNumber, streamId, strerror(-res), res);
             }
         } else {
-            streamBuffer.stream = mOutputStreams.valueFor(streamId)->asHalStream();
+            ssize_t idx = mOutputStreams.indexOfKey(streamId);
+            if (idx == NAME_NOT_FOUND) {
+                ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
+                continue;
+            }
+            streamBuffer.stream = mOutputStreams.valueAt(idx)->asHalStream();
             returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
         }
     }
@@ -2566,6 +2628,7 @@
 
 void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
         const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mOutputLock);
 
     CaptureResult captureResult;
@@ -2581,6 +2644,7 @@
         CameraMetadata &collectedPartialResult,
         uint32_t frameNumber,
         bool reprocess) {
+    ATRACE_CALL();
     if (pendingMetadata.isEmpty())
         return;
 
@@ -2829,7 +2893,7 @@
 
 void Camera3Device::notifyError(const camera3_error_msg_t &msg,
         sp<NotificationListener> listener) {
-
+    ATRACE_CALL();
     // Map camera HAL error codes to ICameraDeviceCallback error codes
     // Index into this with the HAL error code
     static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
@@ -2907,6 +2971,7 @@
 
 void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
         sp<NotificationListener> listener) {
+    ATRACE_CALL();
     ssize_t idx;
 
     // Set timestamp for the request in the in-flight tracking
@@ -3237,7 +3302,7 @@
 void Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
         /*out*/device::V3_2::CaptureRequest* captureRequest,
         /*out*/std::vector<native_handle_t*>* handlesCreated) {
-
+    ATRACE_CALL();
     if (captureRequest == nullptr || handlesCreated == nullptr) {
         ALOGE("%s: captureRequest (%p) and handlesCreated (%p) must not be null",
                 __FUNCTION__, captureRequest, handlesCreated);
@@ -3559,11 +3624,13 @@
 
 void Camera3Device::RequestThread::setNotificationListener(
         wp<NotificationListener> listener) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mListener = listener;
 }
 
 void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
     // Prepare video stream for high speed recording.
@@ -3574,6 +3641,7 @@
         List<sp<CaptureRequest> > &requests,
         /*out*/
         int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end();
             ++it) {
@@ -3596,7 +3664,7 @@
 status_t Camera3Device::RequestThread::queueTrigger(
         RequestTrigger trigger[],
         size_t count) {
-
+    ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
     status_t ret;
 
@@ -3653,6 +3721,7 @@
         const RequestList &requests,
         /*out*/
         int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     if (lastFrameNumber != NULL) {
         *lastFrameNumber = mRepeatingLastFrameNumber;
@@ -3679,6 +3748,7 @@
 }
 
 status_t Camera3Device::RequestThread::clearRepeatingRequests(/*out*/int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     return clearRepeatingRequestsLocked(lastFrameNumber);
 
@@ -3695,6 +3765,7 @@
 
 status_t Camera3Device::RequestThread::clear(
         /*out*/int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     ALOGV("RequestThread::%s:", __FUNCTION__);
 
@@ -3750,6 +3821,7 @@
 }
 
 void Camera3Device::RequestThread::setPaused(bool paused) {
+    ATRACE_CALL();
     Mutex::Autolock l(mPauseLock);
     mDoPause = paused;
     mDoPauseSignal.signal();
@@ -3757,6 +3829,7 @@
 
 status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
         int32_t requestId, nsecs_t timeout) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLatestRequestMutex);
     status_t res;
     while (mLatestRequestId != requestId) {
@@ -3783,6 +3856,7 @@
 }
 
 void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
+    ATRACE_CALL();
     bool surfaceAbandoned = false;
     int64_t lastFrameNumber = 0;
     sp<NotificationListener> listener;
@@ -3811,6 +3885,7 @@
 }
 
 bool Camera3Device::RequestThread::sendRequestsBatch() {
+    ATRACE_CALL();
     status_t res;
     size_t batchSize = mNextRequests.size();
     std::vector<camera3_capture_request_t*> requests(batchSize);
@@ -4206,6 +4281,7 @@
 }
 
 CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+    ATRACE_CALL();
     Mutex::Autolock al(mLatestRequestMutex);
 
     ALOGV("RequestThread::%s", __FUNCTION__);
@@ -4215,6 +4291,7 @@
 
 bool Camera3Device::RequestThread::isStreamPending(
         sp<Camera3StreamInterface>& stream) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
 
     for (const auto& nextRequest : mNextRequests) {
@@ -4243,7 +4320,9 @@
     return false;
 }
 
-nsecs_t Camera3Device::getExpectedInFlightDurationLocked() {
+nsecs_t Camera3Device::getExpectedInFlightDuration() {
+    ATRACE_CALL();
+    Mutex::Autolock al(mInFlightLock);
     return mExpectedInflightDuration > kMinInflightDuration ?
             mExpectedInflightDuration : kMinInflightDuration;
 }
@@ -4314,6 +4393,7 @@
 }
 
 void Camera3Device::RequestThread::waitForNextRequestBatch() {
+    ATRACE_CALL();
     // Optimized a bit for the simple steady-state case (single repeating
     // request), to avoid putting that request in the queue temporarily.
     Mutex::Autolock l(mRequestLock);
@@ -4463,6 +4543,7 @@
 }
 
 bool Camera3Device::RequestThread::waitIfPaused() {
+    ATRACE_CALL();
     status_t res;
     Mutex::Autolock l(mPauseLock);
     while (mDoPause) {
@@ -4487,6 +4568,7 @@
 }
 
 void Camera3Device::RequestThread::unpauseForNewRequests() {
+    ATRACE_CALL();
     // With work to do, mark thread as unpaused.
     // If paused by request (setPaused), don't resume, to avoid
     // extra signaling/waiting overhead to waitUntilPaused
@@ -4518,7 +4600,7 @@
 
 status_t Camera3Device::RequestThread::insertTriggers(
         const sp<CaptureRequest> &request) {
-
+    ATRACE_CALL();
     Mutex::Autolock al(mTriggerMutex);
 
     sp<Camera3Device> parent = mParent.promote();
@@ -4607,6 +4689,7 @@
 
 status_t Camera3Device::RequestThread::removeTriggers(
         const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
     Mutex::Autolock al(mTriggerMutex);
 
     CameraMetadata &metadata = request->mSettings;
@@ -4723,6 +4806,7 @@
 }
 
 status_t Camera3Device::PreparerThread::prepare(int maxCount, sp<Camera3StreamInterface>& stream) {
+    ATRACE_CALL();
     status_t res;
 
     Mutex::Autolock l(mLock);
@@ -4766,6 +4850,7 @@
 }
 
 status_t Camera3Device::PreparerThread::clear() {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
 
     for (const auto& stream : mPendingStreams) {
@@ -4778,6 +4863,7 @@
 }
 
 void Camera3Device::PreparerThread::setNotificationListener(wp<NotificationListener> listener) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
     mListener = listener;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 686a28b..d700e03 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -486,7 +486,7 @@
      * CameraDeviceBase interface we shouldn't need to.
      * Must be called with mLock and mInterfaceLock both held.
      */
-    status_t internalPauseAndWaitLocked();
+    status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration);
 
     /**
      * Resume work after internalPauseAndWaitLocked()
@@ -512,7 +512,7 @@
      *
      * Need to be called with mLock and mInterfaceLock held.
      */
-    status_t waitUntilDrainedLocked();
+    status_t waitUntilDrainedLocked(nsecs_t maxExpectedDuration);
 
     /**
      * Do common work for setting up a streaming or single capture request.
@@ -915,11 +915,14 @@
     // Map from frame number to the in-flight request state
     typedef KeyedVector<uint32_t, InFlightRequest> InFlightMap;
 
-    nsecs_t                mExpectedInflightDuration = 0;
-    Mutex                  mInFlightLock; // Protects mInFlightMap
+
+    Mutex                  mInFlightLock; // Protects mInFlightMap and
+                                          // mExpectedInflightDuration
     InFlightMap            mInFlightMap;
+    nsecs_t                mExpectedInflightDuration = 0;
     int                    mInFlightStatusId;
 
+
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
             bool callback, nsecs_t maxExpectedDuration);
@@ -928,7 +931,7 @@
      * Returns the maximum expected time it'll take for all currently in-flight
      * requests to complete, based on their settings
      */
-    nsecs_t getExpectedInFlightDurationLocked();
+    nsecs_t getExpectedInFlightDuration();
 
     /**
      * Tracking for idle detection
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 35096eb..ff2dcef 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -293,6 +293,15 @@
 void Camera3InputStream::onBufferFreed(const wp<GraphicBuffer>& gb) {
     const sp<GraphicBuffer> buffer = gb.promote();
     if (buffer != nullptr) {
+        camera3_stream_buffer streamBuffer =
+                {nullptr, &buffer->handle, 0, -1, -1};
+        // Check if this buffer is outstanding.
+        if (isOutstandingBuffer(streamBuffer)) {
+            ALOGV("%s: Stream %d: Trying to free a buffer that is still being "
+                    "processed.", __FUNCTION__, mId);
+            return;
+        }
+
         sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
         if (callback != nullptr) {
             callback->onBufferFreed(mId, buffer->handle);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index e77421a..9e6ac79 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -479,6 +479,7 @@
     if (res == OK) {
         fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
         if (buffer->buffer) {
+            Mutex::Autolock l(mOutstandingBuffersLock);
             mOutstandingBuffers.push_back(*buffer->buffer);
         }
     }
@@ -486,11 +487,13 @@
     return res;
 }
 
-bool Camera3Stream::isOutstandingBuffer(const camera3_stream_buffer &buffer) {
+bool Camera3Stream::isOutstandingBuffer(const camera3_stream_buffer &buffer) const{
     if (buffer.buffer == nullptr) {
         return false;
     }
 
+    Mutex::Autolock l(mOutstandingBuffersLock);
+
     for (auto b : mOutstandingBuffers) {
         if (b == *buffer.buffer) {
             return true;
@@ -504,6 +507,8 @@
         return;
     }
 
+    Mutex::Autolock l(mOutstandingBuffersLock);
+
     for (auto b = mOutstandingBuffers.begin(); b != mOutstandingBuffers.end(); b++) {
         if (*b == *buffer.buffer) {
             mOutstandingBuffers.erase(b);
@@ -575,6 +580,7 @@
     if (res == OK) {
         fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
         if (buffer->buffer) {
+            Mutex::Autolock l(mOutstandingBuffersLock);
             mOutstandingBuffers.push_back(*buffer->buffer);
         }
     }
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 0940d62..44fe6b6 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -461,6 +461,9 @@
     // INVALID_OPERATION if they cannot be obtained.
     virtual status_t getEndpointUsage(uint32_t *usage) const = 0;
 
+    // Return whether the buffer is in the list of outstanding buffers.
+    bool isOutstandingBuffer(const camera3_stream_buffer& buffer) const;
+
     // Tracking for idle state
     wp<StatusTracker> mStatusTracker;
     // Status tracker component ID
@@ -483,9 +486,6 @@
 
     status_t        cancelPrepareLocked();
 
-    // Return whether the buffer is in the list of outstanding buffers.
-    bool isOutstandingBuffer(const camera3_stream_buffer& buffer);
-
     // Remove the buffer from the list of outstanding buffers.
     void removeOutstandingBuffer(const camera3_stream_buffer& buffer);
 
@@ -502,6 +502,7 @@
     // Number of buffers allocated on last prepare call.
     size_t mLastMaxCount;
 
+    mutable Mutex mOutstandingBuffersLock;
     // Outstanding buffers dequeued from the stream's buffer queue.
     List<buffer_handle_t> mOutstandingBuffers;
 
diff --git a/services/mediacodec/MediaCodecService.h b/services/mediacodec/MediaCodecService.h
index d64debb..0d2c9d8 100644
--- a/services/mediacodec/MediaCodecService.h
+++ b/services/mediacodec/MediaCodecService.h
@@ -19,7 +19,7 @@
 
 #include <binder/BinderService.h>
 #include <media/IMediaCodecService.h>
-#include <include/OMX.h>
+#include <media/stagefright/omx/OMX.h>
 
 namespace android {
 
diff --git a/services/mediacodec/main_codecservice.cpp b/services/mediacodec/main_codecservice.cpp
index c59944a..79d6da5 100644
--- a/services/mediacodec/main_codecservice.cpp
+++ b/services/mediacodec/main_codecservice.cpp
@@ -32,8 +32,8 @@
 #include "minijail.h"
 
 #include <hidl/HidlTransportSupport.h>
-#include <omx/1.0/Omx.h>
-#include <omx/1.0/OmxStore.h>
+#include <media/stagefright/omx/1.0/Omx.h>
+#include <media/stagefright/omx/1.0/OmxStore.h>
 
 using namespace android;
 
diff --git a/services/mediacodec/seccomp_policy/mediacodec-arm.policy b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
index 52658d1..cbd7fb9 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
@@ -12,6 +12,7 @@
 dup: 1
 ppoll: 1
 mmap2: 1
+getrandom: 1
 
 # mremap: Ensure |flags| are (MREMAP_MAYMOVE | MREMAP_FIXED) TODO: Once minijail
 # parser support for '<' is in this needs to be modified to also prevent
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 43203d4..952aa82 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -18,9 +18,17 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+
 #include <cstring>
+#include <utils/Trace.h>
+
 #include "AAudioMixer.h"
 
+#ifndef AAUDIO_MIXER_ATRACE_ENABLED
+#define AAUDIO_MIXER_ATRACE_ENABLED    1
+#endif
+
 using android::WrappingBuffer;
 using android::FifoBuffer;
 using android::fifo_frames_t;
@@ -41,13 +49,28 @@
     memset(mOutputBuffer, 0, mBufferSizeInBytes);
 }
 
-bool AAudioMixer::mix(FifoBuffer *fifo, float volume) {
+bool AAudioMixer::mix(int trackIndex, FifoBuffer *fifo, float volume) {
     WrappingBuffer wrappingBuffer;
     float *destination = mOutputBuffer;
     fifo_frames_t framesLeft = mFramesPerBurst;
 
+#if AAUDIO_MIXER_ATRACE_ENABLED
+    ATRACE_BEGIN("aaMix");
+#endif /* AAUDIO_MIXER_ATRACE_ENABLED */
+
     // Gather the data from the client. May be in two parts.
-    fifo->getFullDataAvailable(&wrappingBuffer);
+    fifo_frames_t fullFrames = fifo->getFullDataAvailable(&wrappingBuffer);
+#if AAUDIO_MIXER_ATRACE_ENABLED
+    if (ATRACE_ENABLED()) {
+        char rdyText[] = "aaMixRdy#";
+        char letter = 'A' + (trackIndex % 26);
+        rdyText[sizeof(rdyText) - 2] = letter;
+        ATRACE_INT(rdyText, fullFrames);
+    }
+#else /* MIXER_ATRACE_ENABLED */
+    (void) trackIndex;
+    (void) fullFrames;
+#endif /* AAUDIO_MIXER_ATRACE_ENABLED */
 
     // Mix data in one or two parts.
     int partIndex = 0;
@@ -65,11 +88,15 @@
         }
         partIndex++;
     }
-    fifo->getFifoControllerBase()->advanceReadIndex(mFramesPerBurst - framesLeft);
-    if (framesLeft > 0) {
-        //ALOGW("AAudioMixer::mix() UNDERFLOW by %d / %d frames ----- UNDERFLOW !!!!!!!!!!",
-        //      framesLeft, mFramesPerBurst);
-    }
+    // Always advance by one burst even if we do not have the data.
+    // Otherwise the stream timing will drift whenever there is an underflow.
+    // This actual underflow can then be detected by the client for XRun counting.
+    fifo->getFifoControllerBase()->advanceReadIndex(mFramesPerBurst);
+
+#if AAUDIO_MIXER_ATRACE_ENABLED
+    ATRACE_END();
+#endif /* AAUDIO_MIXER_ATRACE_ENABLED */
+
     return (framesLeft > 0); // did not get all the frames we needed, ie. "underflow"
 }
 
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index 9155fec..a8090bc 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -37,7 +37,7 @@
      * @param volume
      * @return true if underflowed
      */
-    bool mix(android::FifoBuffer *fifo, float volume);
+    bool mix(int trackIndex, android::FifoBuffer *fifo, float volume);
 
     void mixPart(float *destination, float *source, int32_t numFrames, float volume);
 
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 0f863fe..81f1d1b 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -64,8 +64,9 @@
     }
 
     result << "    Registered Streams:" << "\n";
+    result << AAudioServiceStreamShared::dumpHeader() << "\n";
     for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
-        result << sharedStream->dump();
+        result << sharedStream->dump() << "\n";
     }
 
     if (isLocked) {
@@ -177,7 +178,10 @@
             configuration.getSamplesPerFrame() != mStreamInternal->getSamplesPerFrame()) {
         return false;
     }
-
     return true;
 }
 
+
+aaudio_result_t AAudioServiceEndpoint::getTimestamp(int64_t *positionFrames, int64_t *timeNanos) {
+    return mStreamInternal->getTimestamp(CLOCK_MONOTONIC, positionFrames, timeNanos);
+}
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index e40a670..603d497 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -69,6 +69,8 @@
         mReferenceCount = count;
     }
 
+    aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos);
+
     bool matches(const AAudioStreamConfiguration& configuration);
 
     virtual AudioStreamInternal *getStreamInternal() = 0;
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 6a37330..6504cc1 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -62,6 +62,9 @@
 
     // result might be a frame count
     while (mCallbackEnabled.load() && getStreamInternal()->isActive() && (result >= 0)) {
+
+        int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
+
         // Read audio data from stream using a blocking read.
         result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
@@ -74,18 +77,32 @@
         }
 
         // Distribute data to each active stream.
-        { // use lock guard
+        { // brackets are for lock_guard
+
             std::lock_guard <std::mutex> lock(mLockStreams);
-            for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
-                if (sharedStream->isRunning()) {
-                    FifoBuffer *fifo = sharedStream->getDataFifoBuffer();
+            for (sp<AAudioServiceStreamShared> clientStream : mRegisteredStreams) {
+                if (clientStream->isRunning()) {
+                    FifoBuffer *fifo = clientStream->getDataFifoBuffer();
+
+                    // Determine offset between framePosition in client's stream vs the underlying
+                    // MMAP stream.
+                    int64_t clientFramesWritten = fifo->getWriteCounter();
+                    // There are two indices that refer to the same frame.
+                    int64_t positionOffset = mmapFramesRead - clientFramesWritten;
+                    clientStream->setTimestampPositionOffset(positionOffset);
+
                     if (fifo->getFifoControllerBase()->getEmptyFramesAvailable() <
                         getFramesPerBurst()) {
                         underflowCount++;
                     } else {
                         fifo->write(mDistributionBuffer, getFramesPerBurst());
                     }
-                    sharedStream->markTransferTime(AudioClock::getNanoseconds());
+
+                    // This timestamp represents the completion of data being written into the
+                    // client buffer. It is sent to the client and used in the timing model
+                    // to decide when data will be available to read.
+                    Timestamp timestamp(fifo->getWriteCounter(), AudioClock::getNanoseconds());
+                    clientStream->markTransferTime(timestamp);
                 }
             }
         }
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index e609ab5..8b1cc9f 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -65,7 +65,6 @@
 
 // Mix data from each application stream and write result to the shared MMAP stream.
 void *AAudioServiceEndpointPlay::callbackLoop() {
-    int32_t underflowCount = 0;
     aaudio_result_t result = AAUDIO_OK;
     int64_t timeoutNanos = getStreamInternal()->calculateReasonableTimeout();
 
@@ -73,17 +72,35 @@
     while (mCallbackEnabled.load() && getStreamInternal()->isActive() && (result >= 0)) {
         // Mix data from each active stream.
         mMixer.clear();
-        { // use lock guard
+        { // brackets are for lock_guard
+            int index = 0;
+            int64_t mmapFramesWritten = getStreamInternal()->getFramesWritten();
+
             std::lock_guard <std::mutex> lock(mLockStreams);
-            for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
-                if (sharedStream->isRunning()) {
-                    FifoBuffer *fifo = sharedStream->getDataFifoBuffer();
+            for (sp<AAudioServiceStreamShared> clientStream : mRegisteredStreams) {
+                if (clientStream->isRunning()) {
+                    FifoBuffer *fifo = clientStream->getDataFifoBuffer();
+                    // Determine offset between framePosition in client's stream vs the underlying
+                    // MMAP stream.
+                    int64_t clientFramesRead = fifo->getReadCounter();
+                    // These two indices refer to the same frame.
+                    int64_t positionOffset = mmapFramesWritten - clientFramesRead;
+                    clientStream->setTimestampPositionOffset(positionOffset);
+
                     float volume = 1.0; // to match legacy volume
-                    bool underflowed = mMixer.mix(fifo, volume);
-                    underflowCount += underflowed ? 1 : 0;
-                    // TODO log underflows in each stream
-                    sharedStream->markTransferTime(AudioClock::getNanoseconds());
+                    bool underflowed = mMixer.mix(index, fifo, volume);
+
+                    // This timestamp represents the completion of data being read out of the
+                    // client buffer. It is sent to the client and used in the timing model
+                    // to decide when the client has room to write more data.
+                    Timestamp timestamp(fifo->getReadCounter(), AudioClock::getNanoseconds());
+                    clientStream->markTransferTime(timestamp);
+
+                    if (underflowed) {
+                        clientStream->incrementXRunCount();
+                    }
                 }
+                index++;
             }
         }
 
@@ -100,8 +117,5 @@
         }
     }
 
-    ALOGW_IF((underflowCount > 0),
-             "AAudioServiceEndpointPlay(): callbackLoop() had %d underflows", underflowCount);
-
     return NULL; // TODO review
 }
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 5f7d179..e5f916c 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -18,6 +18,8 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <iomanip>
+#include <iostream>
 #include <mutex>
 
 #include "binding/IAAudioService.h"
@@ -37,7 +39,8 @@
 
 AAudioServiceStreamBase::AAudioServiceStreamBase()
         : mUpMessageQueue(nullptr)
-        , mAAudioThread() {
+        , mAAudioThread()
+        , mAtomicTimestamp() {
     mMmapClient.clientUid = -1;
     mMmapClient.clientPid = -1;
     mMmapClient.packageName = String16("");
@@ -53,16 +56,22 @@
                         "service stream still open, state = %d", mState);
 }
 
+std::string AAudioServiceStreamBase::dumpHeader() {
+    return std::string("    T   Handle   UId Run State Format Burst Chan Capacity");
+}
+
 std::string AAudioServiceStreamBase::dump() const {
     std::stringstream result;
 
-    result << "      -------- handle = 0x" << std::hex << mHandle << std::dec << "\n";
-    result << "      state          = " << AAudio_convertStreamStateToText(mState) << "\n";
-    result << "      format         = " << mAudioFormat << "\n";
-    result << "      framesPerBurst = " << mFramesPerBurst << "\n";
-    result << "      channelCount   = " << mSamplesPerFrame << "\n";
-    result << "      capacityFrames = " << mCapacityInFrames << "\n";
-    result << "      owner uid      = " << mMmapClient.clientUid << "\n";
+    result << "    0x" << std::setfill('0') << std::setw(8) << std::hex << mHandle
+           << std::dec << std::setfill(' ') ;
+    result << std::setw(6) << mMmapClient.clientUid;
+    result << std::setw(4) << (isRunning() ? "yes" : " no");
+    result << std::setw(6) << mState;
+    result << std::setw(7) << mAudioFormat;
+    result << std::setw(6) << mFramesPerBurst;
+    result << std::setw(5) << mSamplesPerFrame;
+    result << std::setw(9) << mCapacityInFrames;
 
     return result.str();
 }
@@ -211,15 +220,25 @@
 
 aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp() {
     AAudioServiceMessage command;
+    // Send a timestamp for the clock model.
     aaudio_result_t result = getFreeRunningPosition(&command.timestamp.position,
                                                     &command.timestamp.timestamp);
     if (result == AAUDIO_OK) {
-    //    ALOGD("sendCurrentTimestamp(): position = %lld, nanos = %lld",
-    //          (long long) command.timestamp.position,
-    //          (long long) command.timestamp.timestamp);
-        command.what = AAudioServiceMessage::code::TIMESTAMP;
+        command.what = AAudioServiceMessage::code::TIMESTAMP_SERVICE;
         result = writeUpMessageQueue(&command);
-    } else if (result == AAUDIO_ERROR_UNAVAILABLE) {
+
+        if (result == AAUDIO_OK) {
+            // Send a hardware timestamp for presentation time.
+            result = getHardwareTimestamp(&command.timestamp.position,
+                                          &command.timestamp.timestamp);
+            if (result == AAUDIO_OK) {
+                command.what = AAudioServiceMessage::code::TIMESTAMP_HARDWARE;
+                result = writeUpMessageQueue(&command);
+            }
+        }
+    }
+
+    if (result == AAUDIO_ERROR_UNAVAILABLE) {
         result = AAUDIO_OK; // just not available yet, try again later
     }
     return result;
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index cebefec..2f94614 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -20,6 +20,7 @@
 #include <assert.h>
 #include <mutex>
 
+#include <media/AudioClient.h>
 #include <utils/RefBase.h>
 
 #include "fifo/FifoBuffer.h"
@@ -27,7 +28,7 @@
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceMessage.h"
 #include "utility/AAudioUtilities.h"
-#include <media/AudioClient.h>
+#include "utility/AudioClock.h"
 
 #include "SharedRingBuffer.h"
 #include "AAudioThread.h"
@@ -53,7 +54,10 @@
         ILLEGAL_THREAD_ID = 0
     };
 
-    std::string dump() const;
+    static std::string dumpHeader();
+
+    // does not include EOL
+    virtual std::string dump() const;
 
     // -------------------------------------------------------------------
     /**
@@ -170,6 +174,8 @@
      */
     virtual aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) = 0;
 
+    virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) = 0;
+
     virtual aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable) = 0;
 
     aaudio_stream_state_t   mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
@@ -191,6 +197,8 @@
     android::AudioClient    mMmapClient;
     audio_port_handle_t     mClientHandle = AUDIO_PORT_HANDLE_NONE;
 
+    SimpleDoubleBuffer<Timestamp>  mAtomicTimestamp;
+
 private:
     aaudio_handle_t         mHandle = -1;
 };
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index afa2ff0..970d734 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -37,6 +37,11 @@
 #define AAUDIO_BUFFER_CAPACITY_MIN    4 * 512
 #define AAUDIO_SAMPLE_RATE_DEFAULT    48000
 
+// This is an estimate of the time difference between the HW and the MMAP time.
+// TODO Get presentation timestamps from the HAL instead of using these estimates.
+#define OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS  (3 * AAUDIO_NANOS_PER_MILLISECOND)
+#define INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS   (-1 * AAUDIO_NANOS_PER_MILLISECOND)
+
 /**
  * Service Stream that uses an MMAP buffer.
  */
@@ -113,10 +118,14 @@
         config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
                             ? AUDIO_CHANNEL_OUT_STEREO
                             : audio_channel_out_mask_from_count(aaudioSamplesPerFrame);
+        mHardwareTimeOffsetNanos = OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at DAC later
+
     } else if (direction == AAUDIO_DIRECTION_INPUT) {
         config.channel_mask =  (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
                             ? AUDIO_CHANNEL_IN_STEREO
                             : audio_channel_in_mask_from_count(aaudioSamplesPerFrame);
+        mHardwareTimeOffsetNanos = INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at ADC earlier
+
     } else {
         ALOGE("openMmapStream - invalid direction = %d", direction);
         return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
@@ -154,7 +163,7 @@
               status);
         return AAUDIO_ERROR_UNAVAILABLE;
     } else {
-        ALOGD("createMmapBuffer status %d, buffer_size, %d burst_size %d"
+        ALOGD("createMmapBuffer status = %d, buffer_size = %d, burst_size %d"
                 ", Sharable FD: %s",
               status,
               abs(mMmapBufferinfo.buffer_size_frames),
@@ -289,6 +298,7 @@
     return AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
 }
 
+// Get free-running DSP or DMA hardware position from the HAL.
 aaudio_result_t AAudioServiceStreamMMAP::getFreeRunningPosition(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
     struct audio_mmap_position position;
@@ -305,12 +315,29 @@
         disconnect();
     } else {
         mFramesRead.update32(position.position_frames);
-        *positionFrames = mFramesRead.get();
-        *timeNanos = position.time_nanoseconds;
+
+        Timestamp timestamp(mFramesRead.get(), position.time_nanoseconds);
+        mAtomicTimestamp.write(timestamp);
+        *positionFrames = timestamp.getPosition();
+        *timeNanos = timestamp.getNanoseconds();
     }
     return result;
 }
 
+// Get timestamp that was written by getFreeRunningPosition()
+aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
+                                                                int64_t *timeNanos) {
+    // TODO Get presentation timestamp from the HAL
+    if (mAtomicTimestamp.isValid()) {
+        Timestamp timestamp = mAtomicTimestamp.read();
+        *positionFrames = timestamp.getPosition();
+        *timeNanos = timestamp.getNanoseconds() + mHardwareTimeOffsetNanos;
+        return AAUDIO_OK;
+    } else {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+}
+
 void AAudioServiceStreamMMAP::onTearDown() {
     ALOGD("AAudioServiceStreamMMAP::onTearDown() called");
     disconnect();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 533e5a8..e6f8fad 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -100,6 +100,8 @@
     aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable) override;
 
     aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames,
+                                                 int64_t *timeNanos) override;
 
 private:
     // This proxy class was needed to prevent a crash in AudioFlinger
@@ -132,6 +134,7 @@
     MonotonicCounter                    mFramesRead;
     int32_t                             mPreviousFrameCounter = 0;   // from HAL
     int                                 mAudioDataFileDescriptor = -1;
+    int64_t                             mHardwareTimeOffsetNanos = 0; // TODO get from HAL
 
     // Interface to the AudioFlinger MMAP support.
     android::sp<android::MmapStreamInterface> mMmapStream;
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 5654113..d648c6d 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -18,6 +18,8 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <iomanip>
+#include <iostream>
 #include <mutex>
 
 #include <aaudio/AAudio.h>
@@ -41,9 +43,33 @@
 
 AAudioServiceStreamShared::AAudioServiceStreamShared(AAudioService &audioService)
     : mAudioService(audioService)
+    , mTimestampPositionOffset(0)
+    , mXRunCount(0)
     {
 }
 
+std::string AAudioServiceStreamShared::dumpHeader() {
+    std::stringstream result;
+    result << AAudioServiceStreamBase::dumpHeader();
+    result << "    Write#     Read#   Avail   XRuns";
+    return result.str();
+}
+
+std::string AAudioServiceStreamShared::dump() const {
+    std::stringstream result;
+    result << AAudioServiceStreamBase::dump();
+
+    auto fifo = mAudioDataQueue->getFifoBuffer();
+    int32_t readCounter = fifo->getReadCounter();
+    int32_t writeCounter = fifo->getWriteCounter();
+    result << std::setw(10) << writeCounter;
+    result << std::setw(10) << readCounter;
+    result << std::setw(8) << (writeCounter - readCounter);
+    result << std::setw(8) << getXRunCount();
+
+    return result.str();
+}
+
 int32_t AAudioServiceStreamShared::calculateBufferCapacity(int32_t requestedCapacityFrames,
                                                            int32_t framesPerBurst) {
 
@@ -196,6 +222,7 @@
     }
     AAudioServiceEndpoint *endpoint = mServiceEndpoint;
     if (endpoint == nullptr) {
+        ALOGE("AAudioServiceStreamShared::start() missing endpoint");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     // For output streams, this will add the stream to the mixer.
@@ -223,6 +250,7 @@
     }
     AAudioServiceEndpoint *endpoint = mServiceEndpoint;
     if (endpoint == nullptr) {
+        ALOGE("AAudioServiceStreamShared::pause() missing endpoint");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     endpoint->getStreamInternal()->stopClient(mClientHandle);
@@ -240,6 +268,7 @@
     }
     AAudioServiceEndpoint *endpoint = mServiceEndpoint;
     if (endpoint == nullptr) {
+        ALOGE("AAudioServiceStreamShared::stop() missing endpoint");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     endpoint->getStreamInternal()->stopClient(mClientHandle);
@@ -259,6 +288,7 @@
 aaudio_result_t AAudioServiceStreamShared::flush()  {
     AAudioServiceEndpoint *endpoint = mServiceEndpoint;
     if (endpoint == nullptr) {
+        ALOGE("AAudioServiceStreamShared::flush() missing endpoint");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     if (mState != AAUDIO_STREAM_STATE_PAUSED) {
@@ -307,15 +337,30 @@
     return AAUDIO_OK;
 }
 
-void AAudioServiceStreamShared::markTransferTime(int64_t nanoseconds) {
-    mMarkedPosition = mAudioDataQueue->getFifoBuffer()->getReadCounter();
-    mMarkedTime = nanoseconds;
+void AAudioServiceStreamShared::markTransferTime(Timestamp &timestamp) {
+    mAtomicTimestamp.write(timestamp);
 }
 
+// Get timestamp that was written by the real-time service thread, eg. mixer.
 aaudio_result_t AAudioServiceStreamShared::getFreeRunningPosition(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
-    // TODO get these two numbers as an atomic pair
-    *positionFrames = mMarkedPosition;
-    *timeNanos = mMarkedTime;
-    return AAUDIO_OK;
+    if (mAtomicTimestamp.isValid()) {
+        Timestamp timestamp = mAtomicTimestamp.read();
+        *positionFrames = timestamp.getPosition();
+        *timeNanos = timestamp.getNanoseconds();
+        return AAUDIO_OK;
+    } else {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+}
+
+// Get timestamp from lower level service.
+aaudio_result_t AAudioServiceStreamShared::getHardwareTimestamp(int64_t *positionFrames,
+                                                              int64_t *timeNanos) {
+
+    aaudio_result_t result = mServiceEndpoint->getTimestamp(positionFrames, timeNanos);
+    if (result == AAUDIO_OK) {
+        *positionFrames -= mTimestampPositionOffset.load(); // Offset from shared MMAP stream
+    }
+    return result;
 }
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 6b67337..36a56b8 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -46,6 +46,10 @@
     AAudioServiceStreamShared(android::AAudioService &aAudioService);
     virtual ~AAudioServiceStreamShared() = default;
 
+    static std::string dumpHeader();
+
+    std::string dump() const override;
+
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request,
                          aaudio::AAudioStreamConfiguration &configurationOutput) override;
 
@@ -85,7 +89,19 @@
     /* Keep a record of when a buffer transfer completed.
      * This allows for a more accurate timing model.
      */
-    void markTransferTime(int64_t nanoseconds);
+    void markTransferTime(Timestamp &timestamp);
+
+    void setTimestampPositionOffset(int64_t deltaFrames) {
+        mTimestampPositionOffset.store(deltaFrames);
+    }
+
+    void incrementXRunCount() {
+        mXRunCount++;
+    }
+
+    int32_t getXRunCount() const {
+        return mXRunCount.load();
+    }
 
 protected:
 
@@ -93,6 +109,9 @@
 
     aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
 
+    virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames,
+                                                 int64_t *timeNanos) override;
+
     /**
      * @param requestedCapacityFrames
      * @param framesPerBurst
@@ -106,8 +125,8 @@
     AAudioServiceEndpoint   *mServiceEndpoint = nullptr;
     SharedRingBuffer        *mAudioDataQueue = nullptr;
 
-    int64_t                  mMarkedPosition = 0;
-    int64_t                  mMarkedTime = 0;
+    std::atomic<int64_t>     mTimestampPositionOffset;
+    std::atomic<int32_t>     mXRunCount;
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index ebb50f8..c6fb57d 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -53,7 +53,7 @@
 
 aaudio_result_t AAudioThread::start(Runnable *runnable) {
     if (mHasThread) {
-        ALOGE("AAudioThread::start() - mHasThread.load() already true");
+        ALOGE("AAudioThread::start() - mHasThread already true");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     // mRunnable will be read by the new thread when it starts.
@@ -71,6 +71,7 @@
 
 aaudio_result_t AAudioThread::stop() {
     if (!mHasThread) {
+        ALOGE("AAudioThread::stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     int err = pthread_join(mThread, nullptr);