Merge "MPEG4Writer: Save container metadata during switching file output" into oc-mr1-dev
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 8b76cdf..629d75a 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -115,11 +115,13 @@
      * <p>The mode control selects how the image data is converted from the
      * sensor's native color into linear sRGB color.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_color_correction_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When auto-white balance (AWB) is enabled with ACAMERA_CONTROL_AWB_MODE, this
      * control is overridden by the AWB routine. When AWB is disabled, the
@@ -164,17 +166,19 @@
      * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
      * @see ACAMERA_CONTROL_AWB_MODE
      */
-    ACAMERA_COLOR_CORRECTION_MODE =                             // byte (enum)
+    ACAMERA_COLOR_CORRECTION_MODE =                             // byte (acamera_metadata_enum_android_color_correction_mode_t)
             ACAMERA_COLOR_CORRECTION_START,
     /**
      * <p>A color transform matrix to use to transform
      * from sensor RGB color space to output linear sRGB color space.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is either set by the camera device when the request
      * ACAMERA_COLOR_CORRECTION_MODE is not TRANSFORM_MATRIX, or
@@ -196,11 +200,13 @@
      * <p>Gains applying to Bayer raw color channels for
      * white-balance.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>These per-channel gains are either set by the camera device
      * when the request ACAMERA_COLOR_CORRECTION_MODE is not
@@ -221,11 +227,13 @@
     /**
      * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
      * can not focus on the same point after exiting from the lens. This metadata defines
@@ -239,7 +247,7 @@
      * applying aberration correction.</p>
      * <p>LEGACY devices will always be in FAST mode.</p>
      */
-    ACAMERA_COLOR_CORRECTION_ABERRATION_MODE =                  // byte (enum)
+    ACAMERA_COLOR_CORRECTION_ABERRATION_MODE =                  // byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)
             ACAMERA_COLOR_CORRECTION_START + 3,
     /**
      * <p>List of aberration correction modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE that are
@@ -247,10 +255,12 @@
      *
      * @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE.  If no
      * aberration correction modes are available for a device, this list will solely include
@@ -269,11 +279,13 @@
      * <p>The desired setting for the camera device's auto-exposure
      * algorithm's antibanding compensation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Some kinds of lighting fixtures, such as some fluorescent
      * lights, flicker at the rate of the power supply frequency
@@ -310,17 +322,19 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_STATISTICS_SCENE_FLICKER
      */
-    ACAMERA_CONTROL_AE_ANTIBANDING_MODE =                       // byte (enum)
+    ACAMERA_CONTROL_AE_ANTIBANDING_MODE =                       // byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)
             ACAMERA_CONTROL_START,
     /**
      * <p>Adjustment to auto-exposure (AE) target image
      * brightness.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The adjustment is measured as a count of steps, with the
      * step size defined by ACAMERA_CONTROL_AE_COMPENSATION_STEP and the
@@ -350,11 +364,13 @@
      * <p>Whether auto-exposure (AE) is currently locked to its latest
      * calculated values.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
      * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
@@ -398,17 +414,19 @@
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      * @see ACAMERA_SENSOR_SENSITIVITY
      */
-    ACAMERA_CONTROL_AE_LOCK =                                   // byte (enum)
+    ACAMERA_CONTROL_AE_LOCK =                                   // byte (acamera_metadata_enum_android_control_ae_lock_t)
             ACAMERA_CONTROL_START + 2,
     /**
      * <p>The desired mode for the camera device's
      * auto-exposure routine.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control is only effective if ACAMERA_CONTROL_MODE is
      * AUTO.</p>
@@ -436,16 +454,18 @@
      * @see ACAMERA_SENSOR_FRAME_DURATION
      * @see ACAMERA_SENSOR_SENSITIVITY
      */
-    ACAMERA_CONTROL_AE_MODE =                                   // byte (enum)
+    ACAMERA_CONTROL_AE_MODE =                                   // byte (acamera_metadata_enum_android_control_ae_mode_t)
             ACAMERA_CONTROL_START + 3,
     /**
      * <p>List of metering areas to use for auto-exposure adjustment.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[5*area_count]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not available if android.control.maxRegionsAe is 0.
      * Otherwise will always be present.</p>
@@ -486,11 +506,13 @@
      * adjust the capture frame rate to maintain good
      * exposure.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Only constrains auto-exposure (AE) algorithm, not
      * manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
@@ -505,11 +527,13 @@
      * <p>Whether the camera device will trigger a precapture
      * metering sequence when it processes this request.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This entry is normally set to IDLE, or is not
      * included at all in the request settings. When included and
@@ -563,17 +587,19 @@
      * @see ACAMERA_CONTROL_AF_TRIGGER
      * @see ACAMERA_CONTROL_CAPTURE_INTENT
      */
-    ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER =                     // byte (enum)
+    ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER =                     // byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)
             ACAMERA_CONTROL_START + 6,
     /**
      * <p>Whether auto-focus (AF) is currently enabled, and what
      * mode it is set to.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_af_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Only effective if ACAMERA_CONTROL_MODE = AUTO and the lens is not fixed focus
      * (i.e. <code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE &gt; 0</code>). Also note that
@@ -590,16 +616,18 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    ACAMERA_CONTROL_AF_MODE =                                   // byte (enum)
+    ACAMERA_CONTROL_AF_MODE =                                   // byte (acamera_metadata_enum_android_control_af_mode_t)
             ACAMERA_CONTROL_START + 7,
     /**
      * <p>List of metering areas to use for auto-focus.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[5*area_count]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not available if android.control.maxRegionsAf is 0.
      * Otherwise will always be present.</p>
@@ -638,11 +666,13 @@
     /**
      * <p>Whether the camera device will trigger autofocus for this request.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_af_trigger_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This entry is normally set to IDLE, or is not
      * included at all in the request settings.</p>
@@ -665,17 +695,19 @@
      * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
      * @see ACAMERA_CONTROL_AF_STATE
      */
-    ACAMERA_CONTROL_AF_TRIGGER =                                // byte (enum)
+    ACAMERA_CONTROL_AF_TRIGGER =                                // byte (acamera_metadata_enum_android_control_af_trigger_t)
             ACAMERA_CONTROL_START + 9,
     /**
      * <p>Whether auto-white balance (AWB) is currently locked to its
      * latest calculated values.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
      * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
@@ -699,18 +731,20 @@
      *
      * @see ACAMERA_CONTROL_AWB_MODE
      */
-    ACAMERA_CONTROL_AWB_LOCK =                                  // byte (enum)
+    ACAMERA_CONTROL_AWB_LOCK =                                  // byte (acamera_metadata_enum_android_control_awb_lock_t)
             ACAMERA_CONTROL_START + 10,
     /**
      * <p>Whether auto-white balance (AWB) is currently setting the color
      * transform fields, and what its illumination target
      * is.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control is only effective if ACAMERA_CONTROL_MODE is AUTO.</p>
      * <p>When set to the ON mode, the camera device's auto-white balance
@@ -739,17 +773,19 @@
      * @see ACAMERA_CONTROL_AWB_LOCK
      * @see ACAMERA_CONTROL_MODE
      */
-    ACAMERA_CONTROL_AWB_MODE =                                  // byte (enum)
+    ACAMERA_CONTROL_AWB_MODE =                                  // byte (acamera_metadata_enum_android_control_awb_mode_t)
             ACAMERA_CONTROL_START + 11,
     /**
      * <p>List of metering areas to use for auto-white-balance illuminant
      * estimation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[5*area_count]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not available if android.control.maxRegionsAwb is 0.
      * Otherwise will always be present.</p>
@@ -791,11 +827,13 @@
      * of this capture, to help the camera device to decide optimal 3A
      * strategy.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_capture_intent_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control (except for MANUAL) is only effective if
      * <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
@@ -807,16 +845,18 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      */
-    ACAMERA_CONTROL_CAPTURE_INTENT =                            // byte (enum)
+    ACAMERA_CONTROL_CAPTURE_INTENT =                            // byte (acamera_metadata_enum_android_control_capture_intent_t)
             ACAMERA_CONTROL_START + 13,
     /**
      * <p>A special color effect to apply.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_effect_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When this mode is set, a color effect will be applied
      * to images produced by the camera device. The interpretation
@@ -825,17 +865,19 @@
      * depended on to be consistent (or present) across all
      * devices.</p>
      */
-    ACAMERA_CONTROL_EFFECT_MODE =                               // byte (enum)
+    ACAMERA_CONTROL_EFFECT_MODE =                               // byte (acamera_metadata_enum_android_control_effect_mode_t)
             ACAMERA_CONTROL_START + 14,
     /**
      * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
      * routines.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
      * by the camera device is disabled. The application must set the fields for
@@ -856,16 +898,18 @@
      *
      * @see ACAMERA_CONTROL_AF_MODE
      */
-    ACAMERA_CONTROL_MODE =                                      // byte (enum)
+    ACAMERA_CONTROL_MODE =                                      // byte (acamera_metadata_enum_android_control_mode_t)
             ACAMERA_CONTROL_START + 15,
     /**
      * <p>Control for which scene mode is currently active.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_scene_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
      * capture settings.</p>
@@ -883,17 +927,19 @@
      * @see ACAMERA_CONTROL_AWB_MODE
      * @see ACAMERA_CONTROL_MODE
      */
-    ACAMERA_CONTROL_SCENE_MODE =                                // byte (enum)
+    ACAMERA_CONTROL_SCENE_MODE =                                // byte (acamera_metadata_enum_android_control_scene_mode_t)
             ACAMERA_CONTROL_START + 16,
     /**
      * <p>Whether video stabilization is
      * active.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Video stabilization automatically warps images from
      * the camera in order to stabilize motion between consecutive frames.</p>
@@ -923,7 +969,7 @@
      * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      */
-    ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE =                  // byte (enum)
+    ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE =                  // byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)
             ACAMERA_CONTROL_START + 17,
     /**
      * <p>List of auto-exposure antibanding modes for ACAMERA_CONTROL_AE_ANTIBANDING_MODE that are
@@ -931,10 +977,12 @@
      *
      * @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all of the auto-exposure anti-banding modes may be
      * supported by a given camera device. This field lists the
@@ -952,10 +1000,12 @@
      *
      * @see ACAMERA_CONTROL_AE_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all the auto-exposure modes may be supported by a
      * given camera device, especially if no flash unit is
@@ -980,10 +1030,12 @@
      *
      * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>For devices at the LEGACY level or above:</p>
      * <ul>
@@ -1025,12 +1077,13 @@
      * @see ACAMERA_CONTROL_AE_COMPENSATION_STEP
      * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_CONTROL_AE_COMPENSATION_RANGE =                     // int32[2]
             ACAMERA_CONTROL_START + 21,
@@ -1038,10 +1091,12 @@
      * <p>Smallest step by which the exposure compensation
      * can be changed.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the unit for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION. For example, if this key has
      * a value of <code>1/2</code>, then a setting of <code>-2</code> for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION means
@@ -1059,10 +1114,12 @@
      *
      * @see ACAMERA_CONTROL_AF_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all the auto-focus modes may be supported by a
      * given camera device. This entry lists the valid modes for
@@ -1086,10 +1143,12 @@
      *
      * @see ACAMERA_CONTROL_EFFECT_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains the color effect modes that can be applied to
      * images produced by the camera device.
@@ -1111,10 +1170,12 @@
      *
      * @see ACAMERA_CONTROL_SCENE_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains scene modes that can be set for the camera device.
      * Only scene modes that have been fully implemented for the
@@ -1136,10 +1197,12 @@
      *
      * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>OFF will always be listed.</p>
      */
@@ -1151,10 +1214,12 @@
      *
      * @see ACAMERA_CONTROL_AWB_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Not all the auto-white-balance modes may be supported by a
      * given camera device. This entry lists the valid modes for
@@ -1183,22 +1248,25 @@
      * @see ACAMERA_CONTROL_AF_REGIONS
      * @see ACAMERA_CONTROL_AWB_REGIONS
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_CONTROL_MAX_REGIONS =                               // int32[3]
             ACAMERA_CONTROL_START + 28,
     /**
      * <p>Current state of the auto-exposure (AE) algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Switching between or enabling AE modes (ACAMERA_CONTROL_AE_MODE) always
      * resets the AE state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1257,15 +1325,17 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_SCENE_MODE
      */
-    ACAMERA_CONTROL_AE_STATE =                                  // byte (enum)
+    ACAMERA_CONTROL_AE_STATE =                                  // byte (acamera_metadata_enum_android_control_ae_state_t)
             ACAMERA_CONTROL_START + 31,
     /**
      * <p>Current state of auto-focus (AF) algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_af_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Switching between or enabling AF modes (ACAMERA_CONTROL_AF_MODE) always
      * resets the AF state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1357,15 +1427,17 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_SCENE_MODE
      */
-    ACAMERA_CONTROL_AF_STATE =                                  // byte (enum)
+    ACAMERA_CONTROL_AF_STATE =                                  // byte (acamera_metadata_enum_android_control_af_state_t)
             ACAMERA_CONTROL_START + 32,
     /**
      * <p>Current state of auto-white balance (AWB) algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Switching between or enabling AWB modes (ACAMERA_CONTROL_AWB_MODE) always
      * resets the AWB state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1408,37 +1480,41 @@
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_SCENE_MODE
      */
-    ACAMERA_CONTROL_AWB_STATE =                                 // byte (enum)
+    ACAMERA_CONTROL_AWB_STATE =                                 // byte (acamera_metadata_enum_android_control_awb_state_t)
             ACAMERA_CONTROL_START + 34,
     /**
      * <p>Whether the camera device supports ACAMERA_CONTROL_AE_LOCK</p>
      *
      * @see ACAMERA_CONTROL_AE_LOCK
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_available_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
      * list <code>true</code>. This includes FULL devices.</p>
      */
-    ACAMERA_CONTROL_AE_LOCK_AVAILABLE =                         // byte (enum)
+    ACAMERA_CONTROL_AE_LOCK_AVAILABLE =                         // byte (acamera_metadata_enum_android_control_ae_lock_available_t)
             ACAMERA_CONTROL_START + 36,
     /**
      * <p>Whether the camera device supports ACAMERA_CONTROL_AWB_LOCK</p>
      *
      * @see ACAMERA_CONTROL_AWB_LOCK
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_available_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
      * always list <code>true</code>. This includes FULL devices.</p>
      */
-    ACAMERA_CONTROL_AWB_LOCK_AVAILABLE =                        // byte (enum)
+    ACAMERA_CONTROL_AWB_LOCK_AVAILABLE =                        // byte (acamera_metadata_enum_android_control_awb_lock_available_t)
             ACAMERA_CONTROL_START + 37,
     /**
      * <p>List of control modes for ACAMERA_CONTROL_MODE that are supported by this camera
@@ -1446,10 +1522,12 @@
      *
      * @see ACAMERA_CONTROL_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains control modes that can be set for the camera device.
      * LEGACY mode devices will always support AUTO mode. LIMITED and FULL
@@ -1463,10 +1541,12 @@
      *
      * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Devices support post RAW sensitivity boost  will advertise
      * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
@@ -1484,11 +1564,13 @@
      * <p>The amount of additional sensitivity boost applied to output images
      * after RAW sensor data is captured.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Some camera devices support additional digital sensitivity boosting in the
      * camera processing pipeline after sensor RAW image is captured.
@@ -1521,11 +1603,13 @@
      *
      * @see ACAMERA_CONTROL_CAPTURE_INTENT
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_control_enable_zsl_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
      * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
@@ -1552,7 +1636,7 @@
      * @see ACAMERA_CONTROL_CAPTURE_INTENT
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
-    ACAMERA_CONTROL_ENABLE_ZSL =                                // byte (enum)
+    ACAMERA_CONTROL_ENABLE_ZSL =                                // byte (acamera_metadata_enum_android_control_enable_zsl_t)
             ACAMERA_CONTROL_START + 41,
     ACAMERA_CONTROL_END,
 
@@ -1560,11 +1644,13 @@
      * <p>Operation mode for edge
      * enhancement.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_edge_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
      * no enhancement will be applied by the camera device.</p>
@@ -1586,7 +1672,7 @@
      * The camera device may adjust its internal edge enhancement parameters for best
      * image quality based on the android.reprocess.effectiveExposureFactor, if it is set.</p>
      */
-    ACAMERA_EDGE_MODE =                                         // byte (enum)
+    ACAMERA_EDGE_MODE =                                         // byte (acamera_metadata_enum_android_edge_mode_t)
             ACAMERA_EDGE_START,
     /**
      * <p>List of edge enhancement modes for ACAMERA_EDGE_MODE that are supported by this camera
@@ -1594,10 +1680,12 @@
      *
      * @see ACAMERA_EDGE_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Full-capability camera devices must always support OFF; camera devices that support
      * YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
@@ -1610,11 +1698,13 @@
     /**
      * <p>The desired mode for for the camera device's flash control.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_flash_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control is only effective when flash unit is available
      * (<code>ACAMERA_FLASH_INFO_AVAILABLE == true</code>).</p>
@@ -1635,16 +1725,18 @@
      * @see ACAMERA_FLASH_INFO_AVAILABLE
      * @see ACAMERA_FLASH_STATE
      */
-    ACAMERA_FLASH_MODE =                                        // byte (enum)
+    ACAMERA_FLASH_MODE =                                        // byte (acamera_metadata_enum_android_flash_mode_t)
             ACAMERA_FLASH_START + 2,
     /**
      * <p>Current state of the flash
      * unit.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_flash_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When the camera device doesn't have flash unit
      * (i.e. <code>ACAMERA_FLASH_INFO_AVAILABLE == false</code>), this state will always be UNAVAILABLE.
@@ -1664,7 +1756,7 @@
      * @see ACAMERA_FLASH_INFO_AVAILABLE
      * @see ACAMERA_FLASH_MODE
      */
-    ACAMERA_FLASH_STATE =                                       // byte (enum)
+    ACAMERA_FLASH_STATE =                                       // byte (acamera_metadata_enum_android_flash_state_t)
             ACAMERA_FLASH_START + 5,
     ACAMERA_FLASH_END,
 
@@ -1672,33 +1764,37 @@
      * <p>Whether this camera device has a
      * flash unit.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_flash_info_available_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Will be <code>false</code> if no flash is available.</p>
      * <p>If there is no flash unit, none of the flash controls do
      * anything.</p>
      */
-    ACAMERA_FLASH_INFO_AVAILABLE =                              // byte (enum)
+    ACAMERA_FLASH_INFO_AVAILABLE =                              // byte (acamera_metadata_enum_android_flash_info_available_t)
             ACAMERA_FLASH_INFO_START,
     ACAMERA_FLASH_INFO_END,
 
     /**
      * <p>Operational mode for hot pixel correction.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_hot_pixel_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
      * that do not accurately measure the incoming light (i.e. pixels that
      * are stuck at an arbitrary value or are oversensitive).</p>
      */
-    ACAMERA_HOT_PIXEL_MODE =                                    // byte (enum)
+    ACAMERA_HOT_PIXEL_MODE =                                    // byte (acamera_metadata_enum_android_hot_pixel_mode_t)
             ACAMERA_HOT_PIXEL_START,
     /**
      * <p>List of hot pixel correction modes for ACAMERA_HOT_PIXEL_MODE that are supported by this
@@ -1706,10 +1802,12 @@
      *
      * @see ACAMERA_HOT_PIXEL_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>FULL mode camera devices will always support FAST.</p>
      */
@@ -1721,13 +1819,14 @@
      * <p>GPS coordinates to include in output JPEG
      * EXIF.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: double[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_GPS_COORDINATES =                              // double[3]
             ACAMERA_JPEG_START,
@@ -1735,13 +1834,14 @@
      * <p>32 characters describing GPS algorithm to
      * include in EXIF.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_GPS_PROCESSING_METHOD =                        // byte
             ACAMERA_JPEG_START + 1,
@@ -1749,24 +1849,27 @@
      * <p>Time GPS fix was made to include in
      * EXIF.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_GPS_TIMESTAMP =                                // int64
             ACAMERA_JPEG_START + 2,
     /**
      * <p>The orientation for a JPEG image.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The clockwise rotation angle in degrees, relative to the orientation
      * to the camera, that the JPEG picture needs to be rotated by, to be viewed
@@ -1805,11 +1908,13 @@
      * <p>Compression quality of the final JPEG
      * image.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>85-95 is typical usage range.</p>
      */
@@ -1819,24 +1924,27 @@
      * <p>Compression quality of JPEG
      * thumbnail.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_JPEG_THUMBNAIL_QUALITY =                            // byte
             ACAMERA_JPEG_START + 5,
     /**
      * <p>Resolution of embedded JPEG thumbnail.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
      * but the captured JPEG will still be a valid image.</p>
@@ -1871,10 +1979,12 @@
      *
      * @see ACAMERA_JPEG_THUMBNAIL_SIZE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
      * thumbnail should be generated.</p>
@@ -1902,11 +2012,13 @@
      * <p>The desired lens aperture size, as a ratio of lens focal length to the
      * effective aperture diameter.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Setting this value is only supported on the camera devices that have a variable
      * aperture lens.</p>
@@ -1934,11 +2046,13 @@
     /**
      * <p>The desired setting for the lens neutral density filter(s).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control will not be supported on most camera devices.</p>
      * <p>Lens filters are typically used to lower the amount of light the
@@ -1960,11 +2074,13 @@
     /**
      * <p>The desired lens focal length; used for optical zoom.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This setting controls the physical focal length of the camera
      * device's lens. Changing the focal length changes the field of
@@ -1986,11 +2102,13 @@
      * <p>Desired distance to plane of sharpest focus,
      * measured from frontmost surface of the lens.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Should be zero for fixed-focus cameras</p>
      */
@@ -2000,11 +2118,13 @@
      * <p>Sets whether the camera device uses optical image stabilization (OIS)
      * when capturing images.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>OIS is used to compensate for motion blur due to small
      * movements of the camera during capture. Unlike digital image
@@ -2027,30 +2147,33 @@
      * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
      * @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
      */
-    ACAMERA_LENS_OPTICAL_STABILIZATION_MODE =                   // byte (enum)
+    ACAMERA_LENS_OPTICAL_STABILIZATION_MODE =                   // byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)
             ACAMERA_LENS_START + 4,
     /**
      * <p>Direction the camera faces relative to
      * device screen.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_facing_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
-    ACAMERA_LENS_FACING =                                       // byte (enum)
+    ACAMERA_LENS_FACING =                                       // byte (acamera_metadata_enum_android_lens_facing_t)
             ACAMERA_LENS_START + 5,
     /**
      * <p>The orientation of the camera relative to the sensor
      * coordinate system.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The four coefficients that describe the quaternion
      * rotation from the Android sensor coordinate system to a
@@ -2084,11 +2207,13 @@
     /**
      * <p>Position of the camera optical center.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The position of the camera device's lens optical center,
      * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
@@ -2129,10 +2254,12 @@
      * <p>The range of scene distances that are in
      * sharp focus (depth of field).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If variable focus not supported, can still report
      * fixed depth of field range</p>
@@ -2142,10 +2269,12 @@
     /**
      * <p>Current lens status.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_state_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>For lens parameters ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
      * ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE, when changes are requested,
@@ -2176,17 +2305,19 @@
      * @see ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS
      * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    ACAMERA_LENS_STATE =                                        // byte (enum)
+    ACAMERA_LENS_STATE =                                        // byte (acamera_metadata_enum_android_lens_state_t)
             ACAMERA_LENS_START + 9,
     /**
      * <p>The parameters for this camera device's intrinsic
      * calibration.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The five calibration parameters that describe the
      * transform from camera-centric 3D coordinates to sensor
@@ -2245,11 +2376,13 @@
      * <p>The correction coefficients to correct for this camera device's
      * radial and tangential lens distortion.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[6]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
      * kappa_3]</code> and two tangential distortion coefficients
@@ -2290,10 +2423,12 @@
      *
      * @see ACAMERA_LENS_APERTURE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the camera device doesn't support a variable lens aperture,
      * this list will contain only one value, which is the fixed aperture size.</p>
@@ -2308,10 +2443,12 @@
      *
      * @see ACAMERA_LENS_FILTER_DENSITY
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If a neutral density filter is not supported by this camera device,
      * this list will contain only 0. Otherwise, this list will include every
@@ -2325,10 +2462,12 @@
      *
      * @see ACAMERA_LENS_FOCAL_LENGTH
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If optical zoom is not supported, this list will only contain
      * a single value corresponding to the fixed focal length of the
@@ -2343,10 +2482,12 @@
      *
      * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If OIS is not supported by a given camera device, this list will
      * contain only OFF.</p>
@@ -2356,10 +2497,12 @@
     /**
      * <p>Hyperfocal distance for this lens.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the lens is not fixed focus, the camera device will report this
      * field when ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED.</p>
@@ -2372,10 +2515,12 @@
      * <p>Shortest distance from frontmost surface
      * of the lens that can be brought into sharp focus.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the lens is fixed-focus, this will be
      * 0.</p>
@@ -2385,10 +2530,12 @@
     /**
      * <p>Dimensions of lens shading map.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The map should be on the order of 30-40 rows and columns, and
      * must be smaller than 64x64.</p>
@@ -2398,10 +2545,12 @@
     /**
      * <p>The lens focus distance calibration quality.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The lens focus distance calibration quality determines the reliability of
      * focus related metadata entries, i.e. ACAMERA_LENS_FOCUS_DISTANCE,
@@ -2422,18 +2571,20 @@
      * @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
      * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION =              // byte (enum)
+    ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION =              // byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)
             ACAMERA_LENS_INFO_START + 7,
     ACAMERA_LENS_INFO_END,
 
     /**
      * <p>Mode of operation for the noise reduction algorithm.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_noise_reduction_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The noise reduction algorithm attempts to improve image quality by removing
      * excessive noise added by the capture process, especially in dark conditions.</p>
@@ -2463,7 +2614,7 @@
      *
      * @see ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
      */
-    ACAMERA_NOISE_REDUCTION_MODE =                              // byte (enum)
+    ACAMERA_NOISE_REDUCTION_MODE =                              // byte (acamera_metadata_enum_android_noise_reduction_mode_t)
             ACAMERA_NOISE_REDUCTION_START,
     /**
      * <p>List of noise reduction modes for ACAMERA_NOISE_REDUCTION_MODE that are supported
@@ -2471,10 +2622,12 @@
      *
      * @see ACAMERA_NOISE_REDUCTION_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Full-capability camera devices will always support OFF and FAST.</p>
      * <p>Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
@@ -2489,10 +2642,12 @@
      * <p>The maximum numbers of different types of output streams
      * that can be configured and used simultaneously by a camera device.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is a 3 element tuple that contains the max number of output simultaneous
      * streams for raw sensor, processed (but not stalling), and processed (and stalling)
@@ -2523,10 +2678,12 @@
      * through from when it was exposed to when the final completed result
      * was available to the framework.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Depending on what settings are used in the request, and
      * what streams are configured, the data may undergo less processing,
@@ -2542,10 +2699,12 @@
      * has to go through from when it's exposed to when it's available
      * to the framework.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A typical minimum value for this is 2 (one stage to expose,
      * one stage to readout) from the sensor. The ISP then usually adds
@@ -2568,10 +2727,12 @@
      * <p>Defines how many sub-components
      * a result will be composed of.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>In order to combat the pipeline latency, partial results
      * may be delivered to the application layer from the camera device as
@@ -2592,10 +2753,12 @@
      * <p>List of capabilities that this camera device
      * advertises as fully supporting.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A capability is a contract that the camera device makes in order
      * to be able to satisfy one or more use cases.</p>
@@ -2620,16 +2783,18 @@
      * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
      * @see ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS
      */
-    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES =                    // byte[n] (enum)
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES =                    // byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)
             ACAMERA_REQUEST_START + 12,
     /**
      * <p>A list of all keys that the camera device has available
      * to use with {@link ACaptureRequest}.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Attempting to set a key into a CaptureRequest that is not
      * listed here will result in an invalid request and will be rejected
@@ -2648,10 +2813,12 @@
      * to query with {@link ACameraMetadata} from
      * {@link ACameraCaptureSession_captureCallback_result}.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Attempting to get a key from a CaptureResult that is not
      * listed here will always return a <code>null</code> value. Getting a key from
@@ -2679,10 +2846,12 @@
      * to query with {@link ACameraMetadata} from
      * {@link ACameraManager_getCameraCharacteristics}.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This entry follows the same rules as
      * ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS (except that it applies for
@@ -2698,11 +2867,13 @@
     /**
      * <p>The desired region of the sensor to read out for this capture.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This control can be used to implement digital zoom.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
@@ -2748,10 +2919,12 @@
      *
      * @see ACAMERA_SCALER_CROP_REGION
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This represents the maximum amount of zooming possible by
      * the camera device, or equivalently, the minimum cropping
@@ -2767,10 +2940,12 @@
      * camera device supports
      * (i.e. format, width, height, output/input stream).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The configurations are listed as <code>(format, width, height, input?)</code>
      * tuples.</p>
@@ -2805,16 +2980,18 @@
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS =            // int32[n*4] (enum)
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS =            // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)
             ACAMERA_SCALER_START + 10,
     /**
      * <p>This lists the minimum frame duration for each
      * format/size combination.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This should correspond to the frame duration when only that
      * stream is active, with all processing (typically in android.*.mode)
@@ -2836,10 +3013,12 @@
      * <p>This lists the maximum stall duration for each
      * output format/size combination.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A stall duration is how much extra time would get added
      * to the normal minimum frame duration for a repeating request
@@ -2904,10 +3083,12 @@
     /**
      * <p>The crop type that this camera device supports.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_scaler_cropping_type_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When passing a non-centered crop region (ACAMERA_SCALER_CROP_REGION) to a camera
      * device that only supports CENTER_ONLY cropping, the camera device will move the
@@ -2922,7 +3103,7 @@
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    ACAMERA_SCALER_CROPPING_TYPE =                              // byte (enum)
+    ACAMERA_SCALER_CROPPING_TYPE =                              // byte (acamera_metadata_enum_android_scaler_cropping_type_t)
             ACAMERA_SCALER_START + 13,
     ACAMERA_SCALER_END,
 
@@ -2930,11 +3111,13 @@
      * <p>Duration each pixel is exposed to
      * light.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the sensor can't expose this exact duration, it will shorten the
      * duration exposed to the nearest possible value (rather than expose longer).
@@ -2951,11 +3134,13 @@
      * <p>Duration from start of frame exposure to
      * start of next frame exposure.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The maximum frame rate that can be supported by a camera subsystem is
      * a function of many factors:</p>
@@ -3037,11 +3222,13 @@
      * <p>The amount of gain applied to sensor data
      * before processing.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The sensitivity is the standard ISO sensitivity value,
      * as defined in ISO 12232:2006.</p>
@@ -3072,10 +3259,12 @@
      * @see ACAMERA_SENSOR_COLOR_TRANSFORM1
      * @see ACAMERA_SENSOR_FORWARD_MATRIX1
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The values in this key correspond to the values defined for the
      * EXIF LightSource tag. These illuminants are standard light sources
@@ -3092,7 +3281,7 @@
      * @see ACAMERA_SENSOR_FORWARD_MATRIX1
      * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
      */
-    ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 =                      // byte (enum)
+    ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 =                      // byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)
             ACAMERA_SENSOR_START + 3,
     /**
      * <p>The standard reference illuminant used as the scene light source when
@@ -3104,10 +3293,12 @@
      * @see ACAMERA_SENSOR_COLOR_TRANSFORM2
      * @see ACAMERA_SENSOR_FORWARD_MATRIX2
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>See ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 for more details.</p>
      * <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
@@ -3125,10 +3316,12 @@
      * <p>A per-device calibration transform matrix that maps from the
      * reference sensor colorspace to the actual device sensor colorspace.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to correct for per-device variations in the
      * sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3148,10 +3341,12 @@
      * reference sensor colorspace to the actual device sensor colorspace
      * (this is the colorspace of the raw buffer data).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to correct for per-device variations in the
      * sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3172,10 +3367,12 @@
      * <p>A matrix that transforms color values from CIE XYZ color space to
      * reference sensor color space.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert from the standard CIE XYZ color
      * space to the reference sensor colorspace, and is used when processing
@@ -3198,10 +3395,12 @@
      * <p>A matrix that transforms color values from CIE XYZ color space to
      * reference sensor color space.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert from the standard CIE XYZ color
      * space to the reference sensor colorspace, and is used when processing
@@ -3226,10 +3425,12 @@
      * <p>A matrix that transforms white balanced camera colors from the reference
      * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
      * is used when processing raw buffer data.</p>
@@ -3250,10 +3451,12 @@
      * <p>A matrix that transforms white balanced camera colors from the reference
      * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3*3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
      * is used when processing raw buffer data.</p>
@@ -3276,10 +3479,12 @@
      * <p>A fixed black level offset for each of the color filter arrangement
      * (CFA) mosaic channels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This key specifies the zero light value for each of the CFA mosaic
      * channels in the camera sensor.  The maximal value output by the
@@ -3310,10 +3515,12 @@
      * <p>Maximum sensitivity that is implemented
      * purely through analog gain.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>For ACAMERA_SENSOR_SENSITIVITY values less than or
      * equal to this, all applied gain must be analog. For
@@ -3328,10 +3535,12 @@
      * <p>Clockwise angle through which the output image needs to be rotated to be
      * upright on the device screen in its native orientation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Also defines the direction of rolling shutter readout, which is from top to bottom in
      * the sensor's coordinate system.</p>
@@ -3342,10 +3551,12 @@
      * <p>Time at start of exposure of first
      * row of the image sensor active array, in nanoseconds.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The timestamps are also included in all image
      * buffers produced for the same capture, and will be identical
@@ -3374,10 +3585,12 @@
      * <p>The estimated camera neutral color in the native sensor colorspace at
      * the time of capture.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: rational[3]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This value gives the neutral color point encoded as an RGB value in the
      * native sensor color space.  The neutral color point indicates the
@@ -3391,10 +3604,12 @@
     /**
      * <p>Noise model coefficients for each CFA mosaic channel.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: double[2*CFA Channels]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This key contains two noise model coefficients for each CFA channel
      * corresponding to the sensor amplification (S) and sensor readout
@@ -3421,10 +3636,12 @@
     /**
      * <p>The worst-case divergence between Bayer green channels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This value is an estimate of the worst case split between the
      * Bayer green channels in the red and blue rows in the sensor color
@@ -3465,11 +3682,13 @@
      *
      * @see ACAMERA_SENSOR_TEST_PATTERN_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Each color channel is treated as an unsigned 32-bit integer.
      * The camera device then uses the most significant X bits
@@ -3484,11 +3703,13 @@
      * <p>When enabled, the sensor sends a test pattern instead of
      * doing a real exposure from the camera.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When a test pattern is enabled, all manual sensor controls specified
      * by ACAMERA_SENSOR_* will be ignored. All other controls should
@@ -3498,7 +3719,7 @@
      * would not actually affect it).</p>
      * <p>Defaults to OFF.</p>
      */
-    ACAMERA_SENSOR_TEST_PATTERN_MODE =                          // int32 (enum)
+    ACAMERA_SENSOR_TEST_PATTERN_MODE =                          // int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)
             ACAMERA_SENSOR_START + 24,
     /**
      * <p>List of sensor test pattern modes for ACAMERA_SENSOR_TEST_PATTERN_MODE
@@ -3506,10 +3727,12 @@
      *
      * @see ACAMERA_SENSOR_TEST_PATTERN_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Defaults to OFF, and always includes OFF if defined.</p>
      */
@@ -3519,10 +3742,12 @@
      * <p>Duration between the start of first row exposure
      * and the start of last row exposure.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the exposure time skew between the first and last
      * row exposure start times. The first row and the last row are
@@ -3539,10 +3764,12 @@
      * <p>List of disjoint rectangles indicating the sensor
      * optically shielded black pixel regions.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4*num_regions]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>In most camera sensors, the active array is surrounded by some
      * optically shielded pixel areas. By blocking light, these pixels
@@ -3569,10 +3796,12 @@
      * <p>A per-frame dynamic black level offset for each of the color filter
      * arrangement (CFA) mosaic channels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Camera sensor black levels may vary dramatically for different
      * capture settings (e.g. ACAMERA_SENSOR_SENSITIVITY). The fixed black
@@ -3610,10 +3839,12 @@
     /**
      * <p>Maximum raw value output by sensor for this frame.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Since the ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may change for different
      * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY), the white
@@ -3637,10 +3868,12 @@
      * <p>The area of the image sensor which corresponds to active pixels after any geometric
      * distortion correction has been applied.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
      * the region that actually receives light from the scene) after any geometric correction
@@ -3668,10 +3901,12 @@
      *
      * @see ACAMERA_SENSOR_SENSITIVITY
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The values are the standard ISO sensitivity values,
      * as defined in ISO 12232:2006.</p>
@@ -3683,14 +3918,15 @@
      * represents the colors in the top-left 2x2 section of
      * the sensor, in reading order.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
-    ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =              // byte (enum)
+    ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =              // byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)
             ACAMERA_SENSOR_INFO_START + 2,
     /**
      * <p>The range of image exposure times for ACAMERA_SENSOR_EXPOSURE_TIME supported
@@ -3698,12 +3934,13 @@
      *
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE =                   // int64[2]
             ACAMERA_SENSOR_INFO_START + 3,
@@ -3713,10 +3950,12 @@
      *
      * @see ACAMERA_SENSOR_FRAME_DURATION
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Attempting to use frame durations beyond the maximum will result in the frame
      * duration being clipped to the maximum. See that control for a full definition of frame
@@ -3731,10 +3970,12 @@
      * <p>The physical dimensions of the full pixel
      * array.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This is the physical size of the sensor pixel
      * array defined by ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
@@ -3747,10 +3988,12 @@
      * <p>Dimensions of the full pixel array, possibly
      * including black calibration pixels.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The pixel count of the full pixel array of the image sensor, which covers
      * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area.  This represents the full pixel dimensions of
@@ -3773,10 +4016,12 @@
     /**
      * <p>Maximum raw value output by sensor.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This specifies the fully-saturated encoding level for the raw
      * sample values from the sensor.  This is typically caused by the
@@ -3802,26 +4047,30 @@
     /**
      * <p>The time base source for sensor capture start timestamps.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
      * may not based on a time source that can be compared to other system time sources.</p>
      * <p>This characteristic defines the source for the timestamps, and therefore whether they
      * can be compared against other system time sources/timestamps.</p>
      */
-    ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE =                      // byte (enum)
+    ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE =                      // byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)
             ACAMERA_SENSOR_INFO_START + 8,
     /**
      * <p>Whether the RAW images output from this camera device are subject to
      * lens shading correction.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If TRUE, all images produced by the camera device in the RAW image formats will
      * have lens shading correction already applied to it. If FALSE, the images will
@@ -3830,16 +4079,18 @@
      * <p>This key will be <code>null</code> for all devices do not report this information.
      * Devices with RAW capability will always report this information in this key.</p>
      */
-    ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED =                  // byte (enum)
+    ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED =                  // byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)
             ACAMERA_SENSOR_INFO_START + 9,
     /**
      * <p>The area of the image sensor which corresponds to active pixels prior to the
      * application of any geometric distortion correction.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
@@ -3906,11 +4157,13 @@
      * <p>Quality of lens shading correction applied
      * to the image data.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_shading_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to OFF mode, no lens shading correction will be applied by the
      * camera device, and an identity lens shading map data will be provided
@@ -3940,17 +4193,19 @@
      * @see ACAMERA_CONTROL_AWB_MODE
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      */
-    ACAMERA_SHADING_MODE =                                      // byte (enum)
+    ACAMERA_SHADING_MODE =                                      // byte (acamera_metadata_enum_android_shading_mode_t)
             ACAMERA_SHADING_START,
     /**
      * <p>List of lens shading modes for ACAMERA_SHADING_MODE that are supported by this camera device.</p>
      *
      * @see ACAMERA_SHADING_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This list contains lens shading modes that can be set for the camera device.
      * Camera devices that support the MANUAL_POST_PROCESSING capability will always
@@ -3965,41 +4220,47 @@
      * <p>Operating mode for the face detector
      * unit.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Whether face detection is enabled, and whether it
      * should output just the basic fields or the full set of
      * fields.</p>
      */
-    ACAMERA_STATISTICS_FACE_DETECT_MODE =                       // byte (enum)
+    ACAMERA_STATISTICS_FACE_DETECT_MODE =                       // byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)
             ACAMERA_STATISTICS_START,
     /**
      * <p>Operating mode for hot pixel map generation.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If set to <code>true</code>, a hot pixel map is returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.
      * If set to <code>false</code>, no hot pixel map will be returned.</p>
      *
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
      */
-    ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE =                     // byte (enum)
+    ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE =                     // byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)
             ACAMERA_STATISTICS_START + 3,
     /**
      * <p>List of unique IDs for detected faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
      * to the camera device.  A face that leaves the field of view and later returns may be
@@ -4014,10 +4275,12 @@
      * <p>List of landmarks for detected
      * faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*6]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
@@ -4032,10 +4295,12 @@
      * <p>List of the bounding rectangles for detected
      * faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*4]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
@@ -4051,10 +4316,12 @@
      * <p>List of the face confidence scores for
      * detected faces</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.</p>
      *
@@ -4067,10 +4334,12 @@
      * that lists the coefficients used to correct for vignetting and color shading,
      * for each Bayer color channel of RAW image data.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[4*n*m]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The map provided here is the same map that is used by the camera device to
      * correct both color shading and vignetting for output non-RAW images.</p>
@@ -4144,10 +4413,12 @@
      * <p>The camera device estimated scene illumination lighting
      * frequency.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_scene_flicker_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Many light sources, such as most fluorescent lights, flicker at a rate
      * that depends on the local utility power standards. This flicker must be
@@ -4167,15 +4438,17 @@
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_CONTROL_MODE
      */
-    ACAMERA_STATISTICS_SCENE_FLICKER =                          // byte (enum)
+    ACAMERA_STATISTICS_SCENE_FLICKER =                          // byte (acamera_metadata_enum_android_statistics_scene_flicker_t)
             ACAMERA_STATISTICS_START + 14,
     /**
      * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[2*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
      * <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
@@ -4193,11 +4466,13 @@
      * <p>Whether the camera device will output the lens
      * shading map in output result metadata.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When set to ON,
      * ACAMERA_STATISTICS_LENS_SHADING_MAP will be provided in
@@ -4206,7 +4481,7 @@
      *
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP
      */
-    ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE =                  // byte (enum)
+    ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE =                  // byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)
             ACAMERA_STATISTICS_START + 16,
     ACAMERA_STATISTICS_END,
 
@@ -4216,10 +4491,12 @@
      *
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>OFF is always supported.</p>
      */
@@ -4229,12 +4506,13 @@
      * <p>The maximum number of simultaneously detectable
      * faces.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
-     * <p>None</p>
      */
     ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT =                    // int32
             ACAMERA_STATISTICS_INFO_START + 2,
@@ -4244,10 +4522,12 @@
      *
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If no hotpixel map output is available for this camera device, this will contain only
      * <code>false</code>.</p>
@@ -4261,10 +4541,12 @@
      *
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If no lens shading map output is available for this camera device, this key will
      * contain only OFF.</p>
@@ -4282,11 +4564,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n*2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
      *
@@ -4301,11 +4585,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n*2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
      *
@@ -4320,11 +4606,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float[n*2]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Each channel's curve is defined by an array of control points:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED =
@@ -4375,11 +4663,13 @@
     /**
      * <p>High-level global contrast/gamma/tonemapping control.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_tonemap_mode_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When switching to an application-defined contrast curve by setting
      * ACAMERA_TONEMAP_MODE to CONTRAST_CURVE, the curve is defined
@@ -4402,16 +4692,18 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      */
-    ACAMERA_TONEMAP_MODE =                                      // byte (enum)
+    ACAMERA_TONEMAP_MODE =                                      // byte (acamera_metadata_enum_android_tonemap_mode_t)
             ACAMERA_TONEMAP_START + 3,
     /**
      * <p>Maximum number of supported points in the
      * tonemap curve that can be used for android.tonemap.curve.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If the actual number of points provided by the application (in ACAMERA_TONEMAPCURVE_*) is
      * less than this maximum, the camera device will resample the curve to its internal
@@ -4428,10 +4720,12 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
      * at least one of below mode combinations:</p>
@@ -4449,11 +4743,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The tonemap curve will be defined the following formula:
      * * OUT = pow(IN, 1.0 / gamma)
@@ -4474,11 +4770,13 @@
      *
      * @see ACAMERA_TONEMAP_MODE
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_tonemap_preset_curve_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The tonemap curve will be defined by specified standard.</p>
      * <p>sRGB (approximated by 16 control points):</p>
@@ -4488,17 +4786,19 @@
      * <p>Note that above figures show a 16 control points approximation of preset
      * curves. Camera devices may apply a different approximation to the curve.</p>
      */
-    ACAMERA_TONEMAP_PRESET_CURVE =                              // byte (enum)
+    ACAMERA_TONEMAP_PRESET_CURVE =                              // byte (acamera_metadata_enum_android_tonemap_preset_curve_t)
             ACAMERA_TONEMAP_START + 7,
     ACAMERA_TONEMAP_END,
 
     /**
      * <p>Generally classifies the overall set of the camera device functionality.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_info_supported_hardware_level_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>The supported hardware level is a high-level description of the camera device's
      * capabilities, summarizing several capabilities into one field.  Each level adds additional
@@ -4551,7 +4851,7 @@
      * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
      * @see ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
      */
-    ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL =                     // byte (enum)
+    ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL =                     // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
             ACAMERA_INFO_START,
     ACAMERA_INFO_END,
 
@@ -4559,11 +4859,13 @@
      * <p>Whether black-level compensation is locked
      * to its current values, or is free to vary.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_black_level_lock_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      *   <li>ACaptureRequest</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>Whether the black level offset was locked for this frame.  Should be
      * ON if ACAMERA_BLACK_LEVEL_LOCK was ON in the capture request, unless
@@ -4572,7 +4874,7 @@
      *
      * @see ACAMERA_BLACK_LEVEL_LOCK
      */
-    ACAMERA_BLACK_LEVEL_LOCK =                                  // byte (enum)
+    ACAMERA_BLACK_LEVEL_LOCK =                                  // byte (acamera_metadata_enum_android_black_level_lock_t)
             ACAMERA_BLACK_LEVEL_START,
     ACAMERA_BLACK_LEVEL_END,
 
@@ -4581,10 +4883,12 @@
      * with which the output result (metadata + buffers) has been fully
      * synchronized.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64 (acamera_metadata_enum_android_sync_frame_number_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>When a request is submitted to the camera device, there is usually a
      * delay of several frames before the controls get applied. A camera
@@ -4638,17 +4942,19 @@
      * @see ACAMERA_REQUEST_PIPELINE_MAX_DEPTH
      * @see ACAMERA_SYNC_FRAME_NUMBER
      */
-    ACAMERA_SYNC_FRAME_NUMBER =                                 // int64 (enum)
+    ACAMERA_SYNC_FRAME_NUMBER =                                 // int64 (acamera_metadata_enum_android_sync_frame_number_t)
             ACAMERA_SYNC_START,
     /**
      * <p>The maximum number of frames that can occur after a request
      * (different than the previous) has been submitted, and before the
      * result's state becomes synchronized.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32 (acamera_metadata_enum_android_sync_max_latency_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This defines the maximum distance (in number of metadata results),
      * between the frame number of the request that has new controls to apply
@@ -4657,7 +4963,7 @@
      * must occur before the camera device knows for a fact that the new
      * submitted camera settings have been applied in outgoing frames.</p>
      */
-    ACAMERA_SYNC_MAX_LATENCY =                                  // int32 (enum)
+    ACAMERA_SYNC_MAX_LATENCY =                                  // int32 (acamera_metadata_enum_android_sync_max_latency_t)
             ACAMERA_SYNC_START + 1,
     ACAMERA_SYNC_END,
 
@@ -4666,10 +4972,12 @@
      * configurations that this camera device supports
      * (i.e. format, width, height, output/input stream).</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>These are output stream configurations for use with
      * dataSpace HAL_DATASPACE_DEPTH. The configurations are
@@ -4683,16 +4991,18 @@
      * android.depth.maxDepthSamples, 1, OUTPUT)</code> in addition to
      * the entries for HAL_PIXEL_FORMAT_Y16.</p>
      */
-    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS =       // int32[n*4] (enum)
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS =       // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)
             ACAMERA_DEPTH_START + 1,
     /**
      * <p>This lists the minimum frame duration for each
      * format/size combination for depth output formats.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>This should correspond to the frame duration when only that
      * stream is active, with all processing (typically in android.*.mode)
@@ -4714,10 +5024,12 @@
      * <p>This lists the maximum stall duration for each
      * output format/size combination for depth streams.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>A stall duration is how much extra time would get added
      * to the normal minimum frame duration for a repeating request
@@ -4737,10 +5049,12 @@
      * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
      * YUV_420_888, JPEG, or RAW) simultaneously.</p>
      *
-     * <p>This tag may appear in:</p>
+     * <p>Type: byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)</p>
+     *
+     * <p>This tag may appear in:
      * <ul>
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
+     * </ul></p>
      *
      * <p>If TRUE, including both depth and color outputs in a single
      * capture request is not supported. An application must interleave color
@@ -4751,7 +5065,7 @@
      * measure depth values, which causes the color image to be
      * corrupted during depth measurement.</p>
      */
-    ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE =                          // byte (enum)
+    ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE =                          // byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)
             ACAMERA_DEPTH_START + 4,
     ACAMERA_DEPTH_END,
 
@@ -6966,6 +7280,7 @@
 
 } acamera_metadata_enum_android_depth_depth_is_exclusive_t;
 
+
 #endif /* __ANDROID_API__ >= 24 */
 
 __END_DECLS
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index de0167a..bc32bbe 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -78,7 +78,7 @@
 static bool gWantFrameTime = false;     // do we want times on each frame?
 static uint32_t gVideoWidth = 0;        // default width+height
 static uint32_t gVideoHeight = 0;
-static uint32_t gBitRate = 4000000;     // 4Mbps
+static uint32_t gBitRate = 20000000;     // 20Mbps
 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
 
 // Set by signal handler to stop recording.
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index d7c2e87..d70282b 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -870,7 +870,9 @@
 
             sp<IMemory> mem =
                     retriever->getFrameAtTime(-1,
-                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+                            MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+                            HAL_PIXEL_FORMAT_RGB_565,
+                            false /*metaOnly*/);
 
             if (mem != NULL) {
                 failed = false;
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index bac60ff..bc37557 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -257,6 +257,11 @@
                 plugin = hPlugin;
             }
         );
+
+    if (!hResult.isOk()) {
+        ALOGE("createPlugin remote call failed");
+    }
+
     return plugin;
 }
 
diff --git a/drm/mediadrm/plugins/clearkey/ClearKeyUUID.cpp b/drm/mediadrm/plugins/clearkey/ClearKeyUUID.cpp
index ed050f7..0259a42 100644
--- a/drm/mediadrm/plugins/clearkey/ClearKeyUUID.cpp
+++ b/drm/mediadrm/plugins/clearkey/ClearKeyUUID.cpp
@@ -21,12 +21,19 @@
 namespace clearkeydrm {
 
 bool isClearKeyUUID(const uint8_t uuid[16]) {
-    static const uint8_t kClearKeyUUID[16] = {
+    static const uint8_t kCommonPsshBoxUUID[16] = {
         0x10,0x77,0xEF,0xEC,0xC0,0xB2,0x4D,0x02,
         0xAC,0xE3,0x3C,0x1E,0x52,0xE2,0xFB,0x4B
     };
 
-    return !memcmp(uuid, kClearKeyUUID, sizeof(kClearKeyUUID));
+    // To be used in mpd to specify drm scheme for players
+    static const uint8_t kClearKeyUUID[16] = {
+        0xE2,0x71,0x9D,0x58,0xA9,0x85,0xB3,0xC9,
+        0x78,0x1A,0xB0,0x30,0xAF,0x78,0xD3,0x0E
+    };
+
+    return !memcmp(uuid, kCommonPsshBoxUUID, sizeof(kCommonPsshBoxUUID)) ||
+           !memcmp(uuid, kClearKeyUUID, sizeof(kClearKeyUUID));
 }
 
 } // namespace clearkeydrm
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 51050cd..a9d4dd1 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -30,14 +30,41 @@
 class VideoFrame
 {
 public:
-    VideoFrame(): mWidth(0), mHeight(0), mDisplayWidth(0), mDisplayHeight(0), mSize(0),
-            mRotationAngle(0), mData(0) {}
+    // Construct a VideoFrame object with the specified parameters,
+    // will allocate frame buffer if |allocate| is set to true, will
+    // allocate buffer to hold ICC data if |iccData| and |iccSize|
+    // indicate its presence.
+    VideoFrame(uint32_t width, uint32_t height,
+            uint32_t displayWidth, uint32_t displayHeight,
+            uint32_t angle, uint32_t bpp, bool allocate,
+            const void *iccData, size_t iccSize):
+        mWidth(width), mHeight(height),
+        mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
+        mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
+        mSize(0), mIccSize(0), mReserved(0), mData(0), mIccData(0) {
+        if (allocate) {
+            mSize = mRowBytes * mHeight;
+            mData = new uint8_t[mSize];
+            if (mData == NULL) {
+                mSize = 0;
+            }
+        }
 
+        if (iccData != NULL && iccSize > 0) {
+            mIccSize = iccSize;
+            mIccData = new uint8_t[iccSize];
+            if (mIccData != NULL) {
+                memcpy(mIccData, iccData, iccSize);
+            } else {
+                mIccSize = 0;
+            }
+        }
+    }
+
+    // Deep copy of both the information fields and the frame data
     VideoFrame(const VideoFrame& copy) {
-        mWidth = copy.mWidth;
-        mHeight = copy.mHeight;
-        mDisplayWidth = copy.mDisplayWidth;
-        mDisplayHeight = copy.mDisplayHeight;
+        copyInfoOnly(copy);
+
         mSize = copy.mSize;
         mData = NULL;  // initialize it first
         if (mSize > 0 && copy.mData != NULL) {
@@ -48,26 +75,99 @@
                 mSize = 0;
             }
         }
-        mRotationAngle = copy.mRotationAngle;
+
+        mIccSize = copy.mIccSize;
+        mIccData = NULL;  // initialize it first
+        if (mIccSize > 0 && copy.mIccData != NULL) {
+            mIccData = new uint8_t[mIccSize];
+            if (mIccData != NULL) {
+                memcpy(mIccData, copy.mIccData, mIccSize);
+            } else {
+                mIccSize = 0;
+            }
+        }
     }
 
     ~VideoFrame() {
         if (mData != 0) {
             delete[] mData;
         }
+        if (mIccData != 0) {
+            delete[] mIccData;
+        }
+    }
+
+    // Copy |copy| to a flattened VideoFrame in IMemory, 'this' must point to
+    // a chunk of memory back by IMemory of size at least getFlattenedSize()
+    // of |copy|.
+    void copyFlattened(const VideoFrame& copy) {
+        copyInfoOnly(copy);
+
+        mSize = copy.mSize;
+        mData = NULL;  // initialize it first
+        if (copy.mSize > 0 && copy.mData != NULL) {
+            memcpy(getFlattenedData(), copy.mData, copy.mSize);
+        }
+
+        mIccSize = copy.mIccSize;
+        mIccData = NULL;  // initialize it first
+        if (copy.mIccSize > 0 && copy.mIccData != NULL) {
+            memcpy(getFlattenedIccData(), copy.mIccData, copy.mIccSize);
+        }
+    }
+
+    // Calculate the flattened size to put it in IMemory
+    size_t getFlattenedSize() const {
+        return sizeof(VideoFrame) + mSize + mIccSize;
+    }
+
+    // Get the pointer to the frame data in a flattened VideoFrame in IMemory
+    uint8_t* getFlattenedData() const {
+        return (uint8_t*)this + sizeof(VideoFrame);
+    }
+
+    // Get the pointer to the ICC data in a flattened VideoFrame in IMemory
+    uint8_t* getFlattenedIccData() const {
+        return (uint8_t*)this + sizeof(VideoFrame) + mSize;
     }
 
     // Intentional public access modifier:
-    uint32_t mWidth;
-    uint32_t mHeight;
-    uint32_t mDisplayWidth;
-    uint32_t mDisplayHeight;
+    uint32_t mWidth;           // Decoded image width before rotation
+    uint32_t mHeight;          // Decoded image height before rotation
+    uint32_t mDisplayWidth;    // Display width before rotation
+    uint32_t mDisplayHeight;   // Display height before rotation
+    int32_t  mRotationAngle;   // Rotation angle, clockwise, should be multiple of 90
+    uint32_t mBytesPerPixel;   // Number of bytes per pixel
+    uint32_t mRowBytes;        // Number of bytes per row before rotation
     uint32_t mSize;            // Number of bytes in mData
-    int32_t  mRotationAngle;   // rotation angle, clockwise, should be multiple of 90
-    // mData should be 64 bit aligned to prevent additional padding
+    uint32_t mIccSize;         // Number of bytes in mIccData
+    uint32_t mReserved;        // (padding to make mData 64-bit aligned)
+
+    // mData should be 64-bit aligned to prevent additional padding
     uint8_t* mData;            // Actual binary data
-    // pad structure so it's the same size on 64 bit and 32 bit
+    // pad structure so it's the same size on 64-bit and 32-bit
     char     mPadding[8 - sizeof(mData)];
+
+    // mIccData should be 64-bit aligned to prevent additional padding
+    uint8_t* mIccData;            // Actual binary data
+    // pad structure so it's the same size on 64-bit and 32-bit
+    char     mIccPadding[8 - sizeof(mIccData)];
+
+private:
+    //
+    // Utility methods used only within VideoFrame struct
+    //
+
+    // Copy the information fields only
+    void copyInfoOnly(const VideoFrame& copy) {
+        mWidth = copy.mWidth;
+        mHeight = copy.mHeight;
+        mDisplayWidth = copy.mDisplayWidth;
+        mDisplayHeight = copy.mDisplayHeight;
+        mRotationAngle = copy.mRotationAngle;
+        mBytesPerPixel = copy.mBytesPerPixel;
+        mRowBytes = copy.mRowBytes;
+    }
 };
 
 }; // namespace android
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index 0125c0f..87fb40b 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -16,11 +16,14 @@
 
 // Play sine waves using AAudio.
 
-#include <stdio.h>
-#include <stdlib.h>
-#include <math.h>
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
+#include <asm/fcntl.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
 #include "AAudioExampleUtils.h"
 #include "AAudioSimplePlayer.h"
 #include "AAudioArgsParser.h"
@@ -48,6 +51,8 @@
     float   *floatData = nullptr;
     int16_t *shortData = nullptr;
 
+    int      testFd = -1;
+
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
     setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
@@ -95,6 +100,9 @@
         goto finish;
     }
 
+    testFd = open("/data/aaudio_temp.raw", O_CREAT | O_RDWR, S_IRWXU);
+    printf("testFd = %d, pid = %d\n", testFd, getpid());
+
     // Start the stream.
     printf("call player.start()\n");
     result = player.start();
@@ -176,7 +184,17 @@
     }
 
 finish:
+    printf("testFd = %d, fcntl before aaudio close returns 0x%08X\n",
+           testFd, fcntl(testFd, F_GETFD));
     player.close();
+    printf("testFd = %d, fcntl after aaudio close returns 0x%08X\n",
+           testFd, fcntl(testFd, F_GETFD));
+    if (::close(testFd) != 0) {
+        printf("ERROR SharedMemoryParcelable::close() of testFd = %d, errno = %s\n",
+               testFd, strerror(errno));
+    }
+    printf("testFd = %d, fcntl after close() returns 0x%08X\n", testFd, fcntl(testFd, F_GETFD));
+
     delete[] floatData;
     delete[] shortData;
     printf("exiting - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 30fbdd6..3c23736 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -234,6 +234,15 @@
                                                    int32_t channelCount);
 
 /**
+ * Identical to AAudioStreamBuilder_setChannelCount().
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param samplesPerFrame Number of samples in a frame.
+ */
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+                                                       int32_t samplesPerFrame);
+
+/**
  * Request a sample data format, for example AAUDIO_FORMAT_PCM_I16.
  *
  * The default, if you do not call this function, is AAUDIO_UNSPECIFIED.
@@ -721,6 +730,14 @@
 AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream);
 
 /**
+ * Identical to AAudioStream_getChannelCount().
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual number of samples frame
+ */
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream);
+
+/**
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual device ID
  */
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index b9012e5..2ba5250 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -11,6 +11,7 @@
     AAudioStreamBuilder_setErrorCallback;
     AAudioStreamBuilder_setFramesPerDataCallback;
     AAudioStreamBuilder_setSampleRate;
+    AAudioStreamBuilder_setSamplesPerFrame;
     AAudioStreamBuilder_setChannelCount;
     AAudioStreamBuilder_setFormat;
     AAudioStreamBuilder_setSharingMode;
@@ -34,6 +35,7 @@
     AAudioStream_getBufferCapacityInFrames;
     AAudioStream_getXRunCount;
     AAudioStream_getSampleRate;
+    AAudioStream_getSamplesPerFrame;
     AAudioStream_getChannelCount;
     AAudioStream_getPerformanceMode;
     AAudioStream_getDeviceId;
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index d05abb0..1a97555 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -28,6 +28,7 @@
 #include "binding/RingBufferParcelable.h"
 #include "binding/AudioEndpointParcelable.h"
 
+using android::base::unique_fd;
 using android::NO_ERROR;
 using android::status_t;
 using android::Parcel;
@@ -49,7 +50,8 @@
  * Add the file descriptor to the table.
  * @return index in table or negative error
  */
-int32_t AudioEndpointParcelable::addFileDescriptor(int fd, int32_t sizeInBytes) {
+int32_t AudioEndpointParcelable::addFileDescriptor(const unique_fd& fd,
+                                                   int32_t sizeInBytes) {
     if (mNumSharedMemories >= MAX_SHARED_MEMORIES) {
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index 993075c..aa8573f 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -20,6 +20,7 @@
 #include <stdint.h>
 
 //#include <sys/mman.h>
+#include <android-base/unique_fd.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 
@@ -47,7 +48,7 @@
      * Add the file descriptor to the table.
      * @return index in table or negative error
      */
-    int32_t addFileDescriptor(int fd, int32_t sizeInBytes);
+    int32_t addFileDescriptor(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
     virtual status_t writeToParcel(Parcel* parcel) const override;
 
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 899eb04..90217ab 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -24,11 +24,13 @@
 #include <sys/mman.h>
 #include <aaudio/AAudio.h>
 
+#include <android-base/unique_fd.h>
 #include <binder/Parcelable.h>
 #include <utility/AAudioUtilities.h>
 
 #include "binding/SharedMemoryParcelable.h"
 
+using android::base::unique_fd;
 using android::NO_ERROR;
 using android::status_t;
 using android::Parcel;
@@ -39,17 +41,19 @@
 SharedMemoryParcelable::SharedMemoryParcelable() {}
 SharedMemoryParcelable::~SharedMemoryParcelable() {};
 
-void SharedMemoryParcelable::setup(int fd, int32_t sizeInBytes) {
-    mFd = fd;
+void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
+    mFd.reset(dup(fd.get())); // store a duplicate fd
+    ALOGV("SharedMemoryParcelable::setup(%d -> %d, %d) this = %p\n",
+          fd.get(), mFd.get(), sizeInBytes, this);
     mSizeInBytes = sizeInBytes;
-
 }
 
 status_t SharedMemoryParcelable::writeToParcel(Parcel* parcel) const {
     status_t status = parcel->writeInt32(mSizeInBytes);
     if (status != NO_ERROR) return status;
     if (mSizeInBytes > 0) {
-        status = parcel->writeDupFileDescriptor(mFd);
+        ALOGV("SharedMemoryParcelable::writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
+        status = parcel->writeUniqueFileDescriptor(mFd);
         ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
                  status);
     }
@@ -62,15 +66,16 @@
         return status;
     }
     if (mSizeInBytes > 0) {
-        // Keep the original FD until you are done with the mFd.
-        // If you close it in here then it will prevent mFd from working.
-        mOriginalFd = parcel->readFileDescriptor();
-        ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mOriginalFd = %d\n", mOriginalFd);
-        mFd = fcntl(mOriginalFd, F_DUPFD_CLOEXEC, 0);
-        ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mFd = %d\n", mFd);
-        if (mFd == -1) {
-            status = -errno;
-            ALOGE("SharedMemoryParcelable readFromParcel fcntl() failed : %d", status);
+        // The Parcel owns the file descriptor and will close it later.
+        unique_fd mmapFd;
+        status = parcel->readUniqueFileDescriptor(&mmapFd);
+        if (status != NO_ERROR) {
+            ALOGE("SharedMemoryParcelable::readFromParcel() readUniqueFileDescriptor() failed : %d",
+                  status);
+        } else {
+            // Resolve the memory now while we still have the FD from the Parcel.
+            // Closing the FD will not affect the shared memory once mmap() has been called.
+            status = AAudioConvert_androidToAAudioResult(resolveSharedMemory(mmapFd));
         }
     }
     return status;
@@ -85,45 +90,50 @@
         }
         mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
     }
-    if (mFd != -1) {
-        ALOGV("SharedMemoryParcelable::close() LEAK? mFd = %d\n", mFd);
-        ::close(mFd);
-        mFd = -1;
-    }
-    if (mOriginalFd != -1) {
-        ALOGV("SharedMemoryParcelable::close() LEAK? mOriginalFd = %d\n", mOriginalFd);
-        ::close(mOriginalFd);
-        mOriginalFd = -1;
+    return AAUDIO_OK;
+}
+
+aaudio_result_t SharedMemoryParcelable::resolveSharedMemory(const unique_fd& fd) {
+    mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
+                                        MAP_SHARED, fd.get(), 0);
+    if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
+        ALOGE("SharedMemoryParcelable mmap() failed for fd = %d, errno = %s",
+              fd.get(), strerror(errno));
+        return AAUDIO_ERROR_INTERNAL;
     }
     return AAUDIO_OK;
 }
 
 aaudio_result_t SharedMemoryParcelable::resolve(int32_t offsetInBytes, int32_t sizeInBytes,
                                               void **regionAddressPtr) {
-
     if (offsetInBytes < 0) {
         ALOGE("SharedMemoryParcelable illegal offsetInBytes = %d", offsetInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     } else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
         ALOGE("SharedMemoryParcelable out of range, offsetInBytes = %d, "
-              "sizeInBytes = %d, mSizeInBytes = %d",
+                      "sizeInBytes = %d, mSizeInBytes = %d",
               offsetInBytes, sizeInBytes, mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
+
+    aaudio_result_t result = AAUDIO_OK;
+
     if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
-        mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ|PROT_WRITE,
-                                          MAP_SHARED, mFd, 0);
-        if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
-            ALOGE("SharedMemoryParcelable mmap failed for fd = %d, errno = %s",
-                  mFd, strerror(errno));
-            return AAUDIO_ERROR_INTERNAL;
+        if (mFd.get() != -1) {
+            result = resolveSharedMemory(mFd);
+        } else {
+            ALOGE("SharedMemoryParcelable has no file descriptor for shared memory.");
+            result = AAUDIO_ERROR_INTERNAL;
         }
     }
-    *regionAddressPtr = mResolvedAddress + offsetInBytes;
-    ALOGV("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
-    ALOGV("SharedMemoryParcelable offset by %d, *regionAddressPtr = %p",
-          offsetInBytes, *regionAddressPtr);
-    return AAUDIO_OK;
+
+    if (result == AAUDIO_OK && mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
+        *regionAddressPtr = mResolvedAddress + offsetInBytes;
+        ALOGV("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
+        ALOGV("SharedMemoryParcelable offset by %d, *regionAddressPtr = %p",
+              offsetInBytes, *regionAddressPtr);
+    }
+    return result;
 }
 
 int32_t SharedMemoryParcelable::getSizeInBytes() {
@@ -135,17 +145,11 @@
         ALOGE("SharedMemoryParcelable invalid mSizeInBytes = %d", mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
-    if (mSizeInBytes > 0) {
-        if (mFd == -1) {
-            ALOGE("SharedMemoryParcelable uninitialized mFd = %d", mFd);
-            return AAUDIO_ERROR_INTERNAL;
-        }
-    }
     return AAUDIO_OK;
 }
 
 void SharedMemoryParcelable::dump() {
-    ALOGD("SharedMemoryParcelable mFd = %d", mFd);
+    ALOGD("SharedMemoryParcelable mFd = %d", mFd.get());
     ALOGD("SharedMemoryParcelable mSizeInBytes = %d", mSizeInBytes);
     ALOGD("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
 }
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4b94b46..2a634e0 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -18,15 +18,12 @@
 #define ANDROID_AAUDIO_SHARED_MEMORY_PARCELABLE_H
 
 #include <stdint.h>
-
 #include <sys/mman.h>
+
+#include <android-base/unique_fd.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 namespace aaudio {
 
 // Arbitrary limits for sanity checks. TODO remove after debugging.
@@ -37,17 +34,24 @@
 /**
  * This is a parcelable description of a shared memory referenced by a file descriptor.
  * It may be divided into several regions.
+ * The memory can be shared using Binder or simply shared between threads.
  */
-class SharedMemoryParcelable : public Parcelable {
+class SharedMemoryParcelable : public android::Parcelable {
 public:
     SharedMemoryParcelable();
     virtual ~SharedMemoryParcelable();
 
-    void setup(int fd, int32_t sizeInBytes);
+    /**
+     * Make a dup() of the fd and store it for later use.
+     *
+     * @param fd
+     * @param sizeInBytes
+     */
+    void setup(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
+    virtual android::status_t writeToParcel(android::Parcel* parcel) const override;
 
-    virtual status_t readFromParcel(const Parcel* parcel) override;
+    virtual android::status_t readFromParcel(const android::Parcel* parcel) override;
 
     // mmap() shared memory
     aaudio_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
@@ -55,8 +59,6 @@
     // munmap() any mapped memory
     aaudio_result_t close();
 
-    bool isFileDescriptorSafe();
-
     int32_t getSizeInBytes();
 
     aaudio_result_t validate();
@@ -67,10 +69,11 @@
 
 #define MMAP_UNRESOLVED_ADDRESS    reinterpret_cast<uint8_t*>(MAP_FAILED)
 
-    int      mFd = -1;
-    int      mOriginalFd = -1;
-    int32_t  mSizeInBytes = 0;
-    uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+    aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
+
+    android::base::unique_fd   mFd;
+    int32_t     mSizeInBytes = 0;
+    uint8_t    *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 6ec285f..604eed5 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -32,14 +32,17 @@
 #define RIDICULOUSLY_LARGE_FRAME_SIZE        4096
 
 AudioEndpoint::AudioEndpoint()
-    : mFreeRunning(false)
+    : mUpCommandQueue(nullptr)
+    , mDataQueue(nullptr)
+    , mFreeRunning(false)
     , mDataReadCounter(0)
     , mDataWriteCounter(0)
 {
 }
 
-AudioEndpoint::~AudioEndpoint()
-{
+AudioEndpoint::~AudioEndpoint() {
+    delete mDataQueue;
+    delete mUpCommandQueue;
 }
 
 static aaudio_result_t AudioEndpoint_validateQueueDescriptor(const char *type,
@@ -118,24 +121,28 @@
 {
     aaudio_result_t result = AudioEndpoint_validateDescriptor(pEndpointDescriptor);
     if (result != AAUDIO_OK) {
-        ALOGE("AudioEndpoint_validateQueueDescriptor returned %d %s",
-              result, AAudio_convertResultToText(result));
         return result;
     }
 
     // ============================ up message queue =============================
     const RingBufferDescriptor *descriptor = &pEndpointDescriptor->upMessageQueueDescriptor;
     if(descriptor->bytesPerFrame != sizeof(AAudioServiceMessage)) {
-        ALOGE("AudioEndpoint::configure() bytesPerFrame != sizeof(AAudioServiceMessage) = %d",
+        ALOGE("AudioEndpoint.configure() bytesPerFrame != sizeof(AAudioServiceMessage) = %d",
               descriptor->bytesPerFrame);
         return AAUDIO_ERROR_INTERNAL;
     }
 
     if(descriptor->readCounterAddress == nullptr || descriptor->writeCounterAddress == nullptr) {
-        ALOGE("AudioEndpoint_validateQueueDescriptor() NULL counter address");
+        ALOGE("AudioEndpoint.configure() NULL counter address");
         return AAUDIO_ERROR_NULL;
     }
 
+    // Prevent memory leak and reuse.
+    if(mUpCommandQueue != nullptr || mDataQueue != nullptr) {
+        ALOGE("AudioEndpoint.configure() endpoint already used");
+        return AAUDIO_ERROR_INTERNAL;
+    }
+
     mUpCommandQueue = new FifoBuffer(
             descriptor->bytesPerFrame,
             descriptor->capacityInFrames,
@@ -146,8 +153,8 @@
 
     // ============================ data queue =============================
     descriptor = &pEndpointDescriptor->dataQueueDescriptor;
-    ALOGV("AudioEndpoint::configure() data framesPerBurst = %d", descriptor->framesPerBurst);
-    ALOGV("AudioEndpoint::configure() data readCounterAddress = %p",
+    ALOGV("AudioEndpoint.configure() data framesPerBurst = %d", descriptor->framesPerBurst);
+    ALOGV("AudioEndpoint.configure() data readCounterAddress = %p",
           descriptor->readCounterAddress);
 
     // An example of free running is when the other side is read or written by hardware DMA
@@ -156,7 +163,7 @@
                              ? descriptor->readCounterAddress // read by other side
                              : descriptor->writeCounterAddress; // written by other side
     mFreeRunning = (remoteCounter == nullptr);
-    ALOGV("AudioEndpoint::configure() mFreeRunning = %d", mFreeRunning ? 1 : 0);
+    ALOGV("AudioEndpoint.configure() mFreeRunning = %d", mFreeRunning ? 1 : 0);
 
     int64_t *readCounterAddress = (descriptor->readCounterAddress == nullptr)
                                   ? &mDataReadCounter
@@ -254,3 +261,7 @@
     ALOGD("AudioEndpoint: data readCounter  = %lld", (long long) mDataQueue->getReadCounter());
     ALOGD("AudioEndpoint: data writeCounter = %lld", (long long) mDataQueue->getWriteCounter());
 }
+
+void AudioEndpoint::eraseDataMemory() {
+    mDataQueue->eraseMemory();
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 81a4f7b..f5b67e8 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -86,6 +86,11 @@
 
     int32_t getBufferCapacityInFrames() const;
 
+    /**
+     * Write zeros to the data queue memory.
+     */
+    void eraseDataMemory();
+
     void dump() const;
 
 private:
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index a136c0d..bdebf8f 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -80,9 +80,16 @@
 aaudio_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
 
     aaudio_result_t result = AAUDIO_OK;
+    int32_t capacity;
     AAudioStreamRequest request;
-    AAudioStreamConfiguration configuration;
+    AAudioStreamConfiguration configurationOutput;
 
+    if (getState() != AAUDIO_STREAM_STATE_UNINITIALIZED) {
+        ALOGE("AudioStreamInternal::open(): already open! state = %d", getState());
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+
+    // Copy requested parameters to the stream.
     result = AudioStream::open(builder);
     if (result < 0) {
         return result;
@@ -109,87 +116,95 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
-    mServiceStreamHandle = mServiceInterface.openStream(request, configuration);
+    mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
     if (mServiceStreamHandle < 0) {
         result = mServiceStreamHandle;
-        ALOGE("AudioStreamInternal.open(): openStream() returned %d", result);
-    } else {
-        result = configuration.validate();
-        if (result != AAUDIO_OK) {
-            close();
-            return result;
-        }
-        // Save results of the open.
-        setSampleRate(configuration.getSampleRate());
-        setSamplesPerFrame(configuration.getSamplesPerFrame());
-        setDeviceId(configuration.getDeviceId());
-        setSharingMode(configuration.getSharingMode());
-
-        // Save device format so we can do format conversion and volume scaling together.
-        mDeviceFormat = configuration.getFormat();
-
-        result = mServiceInterface.getStreamDescription(mServiceStreamHandle, mEndPointParcelable);
-        if (result != AAUDIO_OK) {
-            mServiceInterface.closeStream(mServiceStreamHandle);
-            return result;
-        }
-
-        // resolve parcelable into a descriptor
-        result = mEndPointParcelable.resolve(&mEndpointDescriptor);
-        if (result != AAUDIO_OK) {
-            mServiceInterface.closeStream(mServiceStreamHandle);
-            return result;
-        }
-
-        // Configure endpoint based on descriptor.
-        mAudioEndpoint.configure(&mEndpointDescriptor, getDirection());
-
-        mFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
-        int32_t capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
-
-        // Validate result from server.
-        if (mFramesPerBurst < 16 || mFramesPerBurst > 16 * 1024) {
-            ALOGE("AudioStream::open(): framesPerBurst out of range = %d", mFramesPerBurst);
-            return AAUDIO_ERROR_OUT_OF_RANGE;
-        }
-        if (capacity < mFramesPerBurst || capacity > 32 * 1024) {
-            ALOGE("AudioStream::open(): bufferCapacity out of range = %d", capacity);
-            return AAUDIO_ERROR_OUT_OF_RANGE;
-        }
-
-        mClockModel.setSampleRate(getSampleRate());
-        mClockModel.setFramesPerBurst(mFramesPerBurst);
-
-        if (getDataCallbackProc()) {
-            mCallbackFrames = builder.getFramesPerDataCallback();
-            if (mCallbackFrames > getBufferCapacity() / 2) {
-                ALOGE("AudioStreamInternal::open(): framesPerCallback too big = %d, capacity = %d",
-                      mCallbackFrames, getBufferCapacity());
-                mServiceInterface.closeStream(mServiceStreamHandle);
-                return AAUDIO_ERROR_OUT_OF_RANGE;
-
-            } else if (mCallbackFrames < 0) {
-                ALOGE("AudioStreamInternal::open(): framesPerCallback negative");
-                mServiceInterface.closeStream(mServiceStreamHandle);
-                return AAUDIO_ERROR_OUT_OF_RANGE;
-
-            }
-            if (mCallbackFrames == AAUDIO_UNSPECIFIED) {
-                mCallbackFrames = mFramesPerBurst;
-            }
-
-            int32_t bytesPerFrame = getSamplesPerFrame()
-                                    * AAudioConvert_formatToSizeInBytes(getFormat());
-            int32_t callbackBufferSize = mCallbackFrames * bytesPerFrame;
-            mCallbackBuffer = new uint8_t[callbackBufferSize];
-        }
-
-        setState(AAUDIO_STREAM_STATE_OPEN);
-        // only connect to AudioManager if this is a playback stream running in client process
-        if (!mInService && getDirection() == AAUDIO_DIRECTION_OUTPUT) {
-            init(android::PLAYER_TYPE_AAUDIO, AUDIO_USAGE_MEDIA);
-        }
+        ALOGE("AudioStreamInternal::open(): openStream() returned %d", result);
+        return result;
     }
+
+    result = configurationOutput.validate();
+    if (result != AAUDIO_OK) {
+        goto error;
+    }
+    // Save results of the open.
+    setSampleRate(configurationOutput.getSampleRate());
+    setSamplesPerFrame(configurationOutput.getSamplesPerFrame());
+    setDeviceId(configurationOutput.getDeviceId());
+    setSharingMode(configurationOutput.getSharingMode());
+
+    // Save device format so we can do format conversion and volume scaling together.
+    mDeviceFormat = configurationOutput.getFormat();
+
+    result = mServiceInterface.getStreamDescription(mServiceStreamHandle, mEndPointParcelable);
+    if (result != AAUDIO_OK) {
+        goto error;
+    }
+
+    // Resolve parcelable into a descriptor.
+    result = mEndPointParcelable.resolve(&mEndpointDescriptor);
+    if (result != AAUDIO_OK) {
+        goto error;
+    }
+
+    // Configure endpoint based on descriptor.
+    result = mAudioEndpoint.configure(&mEndpointDescriptor, getDirection());
+    if (result != AAUDIO_OK) {
+        goto error;
+    }
+
+    mFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+
+    // Validate result from server.
+    if (mFramesPerBurst < 16 || mFramesPerBurst > 16 * 1024) {
+        ALOGE("AudioStreamInternal::open(): framesPerBurst out of range = %d", mFramesPerBurst);
+        result = AAUDIO_ERROR_OUT_OF_RANGE;
+        goto error;
+    }
+    if (capacity < mFramesPerBurst || capacity > 32 * 1024) {
+        ALOGE("AudioStreamInternal::open(): bufferCapacity out of range = %d", capacity);
+        result = AAUDIO_ERROR_OUT_OF_RANGE;
+        goto error;
+    }
+
+    mClockModel.setSampleRate(getSampleRate());
+    mClockModel.setFramesPerBurst(mFramesPerBurst);
+
+    if (getDataCallbackProc()) {
+        mCallbackFrames = builder.getFramesPerDataCallback();
+        if (mCallbackFrames > getBufferCapacity() / 2) {
+            ALOGE("AudioStreamInternal::open(): framesPerCallback too big = %d, capacity = %d",
+                  mCallbackFrames, getBufferCapacity());
+            result = AAUDIO_ERROR_OUT_OF_RANGE;
+            goto error;
+
+        } else if (mCallbackFrames < 0) {
+            ALOGE("AudioStreamInternal::open(): framesPerCallback negative");
+            result = AAUDIO_ERROR_OUT_OF_RANGE;
+            goto error;
+
+        }
+        if (mCallbackFrames == AAUDIO_UNSPECIFIED) {
+            mCallbackFrames = mFramesPerBurst;
+        }
+
+        int32_t bytesPerFrame = getSamplesPerFrame()
+                                * AAudioConvert_formatToSizeInBytes(getFormat());
+        int32_t callbackBufferSize = mCallbackFrames * bytesPerFrame;
+        mCallbackBuffer = new uint8_t[callbackBufferSize];
+    }
+
+    setState(AAUDIO_STREAM_STATE_OPEN);
+    // Only connect to AudioManager if this is a playback stream running in client process.
+    if (!mInService && getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+        init(android::PLAYER_TYPE_AAUDIO, AUDIO_USAGE_MEDIA);
+    }
+
+    return result;
+
+error:
+    close();
     return result;
 }
 
@@ -224,7 +239,6 @@
     }
 }
 
-
 static void *aaudio_callback_thread_proc(void *context)
 {
     AudioStreamInternal *stream = (AudioStreamInternal *)context;
@@ -440,10 +454,14 @@
             setState(AAUDIO_STREAM_STATE_CLOSED);
             break;
         case AAUDIO_SERVICE_EVENT_DISCONNECTED:
+            // Prevent hardware from looping on old data and making buzzing sounds.
+            if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+                mAudioEndpoint.eraseDataMemory();
+            }
             result = AAUDIO_ERROR_DISCONNECTED;
             setState(AAUDIO_STREAM_STATE_DISCONNECTED);
             ALOGW("WARNING - AudioStreamInternal::onEventFromServer()"
-                          " AAUDIO_SERVICE_EVENT_DISCONNECTED");
+                          " AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared");
             break;
         case AAUDIO_SERVICE_EVENT_VOLUME:
             mStreamVolume = (float)message->event.dataDouble;
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 82d96e0..5089b00 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -101,7 +101,6 @@
  */
 static aaudio_policy_t s_MMapPolicy = AAUDIO_UNSPECIFIED;
 
-
 static AudioStream *convertAAudioStreamToAudioStream(AAudioStream* stream)
 {
     return (AudioStream*) stream;
@@ -144,12 +143,18 @@
 }
 
 AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* builder,
-                                                       int32_t channelCount)
+                                                    int32_t channelCount)
 {
     AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
     streamBuilder->setSamplesPerFrame(channelCount);
 }
 
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+                                                       int32_t channelCount)
+{
+    AAudioStreamBuilder_setChannelCount(builder, channelCount);
+}
+
 AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* builder,
                                              aaudio_direction_t direction)
 {
@@ -350,6 +355,11 @@
     return audioStream->getSamplesPerFrame();
 }
 
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream)
+{
+    return AAudioStream_getChannelCount(stream);
+}
+
 AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 8d2c62d..a869886 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -210,3 +210,9 @@
     return mFifo->getCapacity();
 }
 
+void FifoBuffer::eraseMemory() {
+    int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
+    if (numBytes > 0) {
+        memset(mStorage, 0, (size_t) numBytes);
+    }
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index a94e9b0..f5a9e27 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -111,6 +111,11 @@
         mFifo->setWriteCounter(n);
     }
 
+    /*
+     * This is generally only called before or after the buffer is used.
+     */
+    void eraseMemory();
+
 private:
 
     void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index 79beed6..c51fbce 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -19,6 +19,7 @@
 #include <stdlib.h>
 #include <math.h>
 
+#include <android-base/unique_fd.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <cutils/ashmem.h>
@@ -28,6 +29,7 @@
 #include <aaudio/AAudio.h>
 #include <binding/AudioEndpointParcelable.h>
 
+using android::base::unique_fd;
 using namespace android;
 using namespace aaudio;
 
@@ -48,7 +50,7 @@
     SharedMemoryParcelable sharedMemoryA;
     SharedMemoryParcelable sharedMemoryB;
     const size_t memSizeBytes = 840;
-    int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+    unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
     ASSERT_LE(0, fd);
     sharedMemoryA.setup(fd, memSizeBytes);
     void *region1;
@@ -81,7 +83,7 @@
     SharedRegionParcelable sharedRegionA;
     SharedRegionParcelable sharedRegionB;
     const size_t memSizeBytes = 840;
-    int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+    unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
     ASSERT_LE(0, fd);
     sharedMemories[0].setup(fd, memSizeBytes);
     int32_t regionOffset1 = 32;
@@ -119,7 +121,7 @@
     const int32_t counterSizeBytes = sizeof(int64_t);
     const size_t memSizeBytes = dataSizeBytes + (2 * counterSizeBytes);
 
-    int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+    unique_fd fd(ashmem_create_region("TestMarshalling Z", memSizeBytes));
     ASSERT_LE(0, fd);
     sharedMemories[0].setup(fd, memSizeBytes);
 
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index b49b975..61fb6bab 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -40,7 +40,7 @@
 namespace android {
 
 EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
-        : mEffect(effect), mEffectId(effectId), mBuffersChanged(true) {
+        : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
 }
 
 EffectHalHidl::~EffectHalHidl() {
@@ -49,6 +49,9 @@
         mEffect.clear();
         hardware::IPCThreadState::self()->flushCommands();
     }
+    if (mEfGroup) {
+        EventFlag::deleteEventFlag(&mEfGroup);
+    }
 }
 
 // static
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index 519f4a8..b4a1d77 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -563,6 +563,10 @@
             return -EINVAL;
         }
         effect_param_t *cmd = (effect_param_t *) pCmdData;
+        if (cmd->psize != sizeof(int32_t)) {
+            android_errorWriteLog(0x534e4554, "63662938");
+            return -EINVAL;
+        }
         *(int *)pReplyData = Downmix_setParameter(pDownmixer, *(int32_t *)cmd->data,
                 cmd->vsize, cmd->data + sizeof(int32_t));
         break;
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index ab6b63c..1717b49 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -188,12 +188,13 @@
 void Reverb_free            (ReverbContext *pContext);
 int  Reverb_setConfig       (ReverbContext *pContext, effect_config_t *pConfig);
 void Reverb_getConfig       (ReverbContext *pContext, effect_config_t *pConfig);
-int  Reverb_setParameter    (ReverbContext *pContext, void *pParam, void *pValue);
+int  Reverb_setParameter    (ReverbContext *pContext, void *pParam, void *pValue, int vsize);
 int  Reverb_getParameter    (ReverbContext *pContext,
                              void          *pParam,
                              uint32_t      *pValueSize,
                              void          *pValue);
 int Reverb_LoadPreset       (ReverbContext   *pContext);
+int Reverb_paramValueSize   (int32_t param);
 
 /* Effect Library Interface Implementation */
 
@@ -1870,12 +1871,13 @@
 //  pContext         - handle to instance data
 //  pParam           - pointer to parameter
 //  pValue           - pointer to value
+//  vsize            - value size
 //
 // Outputs:
 //
 //----------------------------------------------------------------------------
 
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue){
+int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize){
     int status = 0;
     int16_t level;
     int16_t ratio;
@@ -1899,6 +1901,11 @@
         return 0;
     }
 
+    if (vsize < Reverb_paramValueSize(param)) {
+        android_errorWriteLog(0x534e4554, "63526567");
+        return -EINVAL;
+    }
+
     switch (param){
         case REVERB_PARAM_PROPERTIES:
             ALOGV("\tReverb_setParameter() REVERB_PARAM_PROPERTIES");
@@ -1974,6 +1981,31 @@
     return status;
 } /* end Reverb_setParameter */
 
+
+/**
+ * returns the size in bytes of the value of each environmental reverb parameter
+ */
+int Reverb_paramValueSize(int32_t param) {
+    switch (param) {
+    case REVERB_PARAM_ROOM_LEVEL:
+    case REVERB_PARAM_ROOM_HF_LEVEL:
+    case REVERB_PARAM_REFLECTIONS_LEVEL:
+    case REVERB_PARAM_REVERB_LEVEL:
+        return sizeof(int16_t); // millibel
+    case REVERB_PARAM_DECAY_TIME:
+    case REVERB_PARAM_REFLECTIONS_DELAY:
+    case REVERB_PARAM_REVERB_DELAY:
+        return sizeof(uint32_t); // milliseconds
+    case REVERB_PARAM_DECAY_HF_RATIO:
+    case REVERB_PARAM_DIFFUSION:
+    case REVERB_PARAM_DENSITY:
+        return sizeof(int16_t); // permille
+    case REVERB_PARAM_PROPERTIES:
+        return sizeof(s_reverb_settings); // struct of all reverb properties
+    }
+    return sizeof(int32_t);
+}
+
 } // namespace
 } // namespace
 
@@ -2144,7 +2176,8 @@
 
             *(int *)pReplyData = android::Reverb_setParameter(pContext,
                                                              (void *)p->data,
-                                                              p->data + p->psize);
+                                                              p->data + p->psize,
+                                                              p->vsize);
         } break;
 
         case EFFECT_CMD_ENABLE:
diff --git a/media/libheif/Android.bp b/media/libheif/Android.bp
new file mode 100644
index 0000000..7d5a4eb
--- /dev/null
+++ b/media/libheif/Android.bp
@@ -0,0 +1,23 @@
+cc_library_shared {
+    name: "libheif",
+
+    srcs: [
+        "HeifDecoderImpl.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libutils",
+        "libmedia",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    include_dirs: [],
+
+    export_include_dirs: ["include"],
+}
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
new file mode 100644
index 0000000..080313c
--- /dev/null
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -0,0 +1,316 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HeifDecoderImpl"
+
+#include "HeifDecoderImpl.h"
+
+#include <stdio.h>
+
+#include <binder/IMemory.h>
+#include <drm/drm_framework_common.h>
+#include <media/IDataSource.h>
+#include <media/mediametadataretriever.h>
+#include <media/stagefright/MediaSource.h>
+#include <private/media/VideoFrame.h>
+#include <utils/Log.h>
+#include <utils/RefBase.h>
+
+HeifDecoder* createHeifDecoder() {
+    return new android::HeifDecoderImpl();
+}
+
+namespace android {
+
+/*
+ * HeifDataSource
+ *
+ * Proxies data requests over IDataSource interface from MediaMetadataRetriever
+ * to the HeifStream interface we received from the heif decoder client.
+ */
+class HeifDataSource : public BnDataSource {
+public:
+    /*
+     * Constructs HeifDataSource; will take ownership of |stream|.
+     */
+    HeifDataSource(HeifStream* stream)
+        : mStream(stream), mReadPos(0), mEOS(false) {}
+
+    ~HeifDataSource() override {}
+
+    /*
+     * Initializes internal resources.
+     */
+    bool init();
+
+    sp<IMemory> getIMemory() override { return mMemory; }
+    ssize_t readAt(off64_t offset, size_t size) override;
+    status_t getSize(off64_t* size) override ;
+    void close() {}
+    uint32_t getFlags() override { return 0; }
+    String8 toString() override { return String8("HeifDataSource"); }
+    sp<DecryptHandle> DrmInitialization(const char*) override {
+        return nullptr;
+    }
+
+private:
+    /*
+     * Buffer size for passing the read data to mediaserver. Set to 64K
+     * (which is what MediaDataSource Java API's jni implementation uses).
+     */
+    enum {
+        kBufferSize = 64 * 1024,
+    };
+    sp<IMemory> mMemory;
+    std::unique_ptr<HeifStream> mStream;
+    off64_t mReadPos;
+    bool mEOS;
+};
+
+bool HeifDataSource::init() {
+    sp<MemoryDealer> memoryDealer =
+            new MemoryDealer(kBufferSize, "HeifDataSource");
+    mMemory = memoryDealer->allocate(kBufferSize);
+    if (mMemory == nullptr) {
+        ALOGE("Failed to allocate shared memory!");
+        return false;
+    }
+    return true;
+}
+
+ssize_t HeifDataSource::readAt(off64_t offset, size_t size) {
+    ALOGV("readAt: offset=%lld, size=%zu", (long long)offset, size);
+
+    if (size == 0) {
+        return mEOS ? ERROR_END_OF_STREAM : 0;
+    }
+
+    if (offset < mReadPos) {
+        // try seek, then rewind/skip, fail if none worked
+        if (mStream->seek(offset)) {
+            ALOGV("readAt: seek to offset=%lld", (long long)offset);
+            mReadPos = offset;
+            mEOS = false;
+        } else if (mStream->rewind()) {
+            ALOGV("readAt: rewind to offset=0");
+            mReadPos = 0;
+            mEOS = false;
+        } else {
+            ALOGE("readAt: couldn't seek or rewind!");
+            mEOS = true;
+        }
+    }
+
+    if (mEOS) {
+        ALOGV("readAt: EOS");
+        return ERROR_END_OF_STREAM;
+    }
+
+    if (offset > mReadPos) {
+        // skipping
+        size_t skipSize = offset - mReadPos;
+        size_t bytesSkipped = mStream->read(nullptr, skipSize);
+        if (bytesSkipped <= skipSize) {
+            mReadPos += bytesSkipped;
+        }
+        if (bytesSkipped != skipSize) {
+            mEOS = true;
+            return ERROR_END_OF_STREAM;
+        }
+    }
+
+    if (size > kBufferSize) {
+        size = kBufferSize;
+    }
+    size_t bytesRead = mStream->read(mMemory->pointer(), size);
+    if (bytesRead > size || bytesRead == 0) {
+        // bytesRead is invalid
+        mEOS = true;
+        return ERROR_END_OF_STREAM;
+    } if (bytesRead < size) {
+        // read some bytes but not all, set EOS and return ERROR_END_OF_STREAM next time
+        mEOS = true;
+    }
+    mReadPos += bytesRead;
+    return bytesRead;
+}
+
+status_t HeifDataSource::getSize(off64_t* size) {
+    if (!mStream->hasLength()) {
+        *size = -1;
+        ALOGE("getSize: not supported!");
+        return ERROR_UNSUPPORTED;
+    }
+    *size = mStream->getLength();
+    ALOGV("getSize: size=%lld", (long long)*size);
+    return OK;
+}
+
+/////////////////////////////////////////////////////////////////////////
+
+HeifDecoderImpl::HeifDecoderImpl() :
+    // output color format should always be set via setOutputColor(), in case
+    // it's not, default to HAL_PIXEL_FORMAT_RGB_565.
+    mOutputColor(HAL_PIXEL_FORMAT_RGB_565),
+    mCurScanline(0) {
+}
+
+HeifDecoderImpl::~HeifDecoderImpl() {
+}
+
+bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {
+    sp<HeifDataSource> dataSource = new HeifDataSource(stream);
+    if (!dataSource->init()) {
+        return false;
+    }
+    mDataSource = dataSource;
+
+    mRetriever = new MediaMetadataRetriever();
+    status_t err = mRetriever->setDataSource(mDataSource, "video/mp4");
+    if (err != OK) {
+        ALOGE("failed to set data source!");
+
+        mRetriever.clear();
+        mDataSource.clear();
+        return false;
+    }
+    ALOGV("successfully set data source.");
+
+    const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
+    if (!hasVideo || strcasecmp(hasVideo, "yes")) {
+        ALOGE("no video: %s", hasVideo ? hasVideo : "null");
+        return false;
+    }
+
+    mFrameMemory = mRetriever->getFrameAtTime(0,
+            IMediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+            mOutputColor, true /*metaOnly*/);
+    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+        ALOGE("getFrameAtTime: videoFrame is a nullptr");
+        return false;
+    }
+
+    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+
+    ALOGV("Meta dimension %dx%d, display %dx%d, angle %d, iccSize %d",
+            videoFrame->mWidth,
+            videoFrame->mHeight,
+            videoFrame->mDisplayWidth,
+            videoFrame->mDisplayHeight,
+            videoFrame->mRotationAngle,
+            videoFrame->mIccSize);
+
+    if (frameInfo != nullptr) {
+        frameInfo->set(
+                videoFrame->mDisplayWidth,
+                videoFrame->mDisplayHeight,
+                videoFrame->mRotationAngle,
+                videoFrame->mBytesPerPixel,
+                videoFrame->mIccSize,
+                videoFrame->getFlattenedIccData());
+    }
+    return true;
+}
+
+bool HeifDecoderImpl::getEncodedColor(HeifEncodedColor* /*outColor*/) const {
+    ALOGW("getEncodedColor: not implemented!");
+    return false;
+}
+
+bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
+    switch(heifColor) {
+        case kHeifColorFormat_RGB565:
+        {
+            mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+            return true;
+        }
+        case kHeifColorFormat_RGBA_8888:
+        {
+            mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+            return true;
+        }
+        case kHeifColorFormat_BGRA_8888:
+        {
+            mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+            return true;
+        }
+        default:
+            break;
+    }
+    ALOGE("Unsupported output color format %d", heifColor);
+    return false;
+}
+
+bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
+    mFrameMemory = mRetriever->getFrameAtTime(0,
+            IMediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
+    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+        ALOGE("getFrameAtTime: videoFrame is a nullptr");
+        return false;
+    }
+
+    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+    ALOGV("Decoded dimension %dx%d, display %dx%d, angle %d, rowbytes %d, size %d",
+            videoFrame->mWidth,
+            videoFrame->mHeight,
+            videoFrame->mDisplayWidth,
+            videoFrame->mDisplayHeight,
+            videoFrame->mRotationAngle,
+            videoFrame->mRowBytes,
+            videoFrame->mSize);
+
+    if (frameInfo != nullptr) {
+        frameInfo->set(
+                videoFrame->mDisplayWidth,
+                videoFrame->mDisplayHeight,
+                videoFrame->mRotationAngle,
+                videoFrame->mBytesPerPixel,
+                videoFrame->mIccSize,
+                videoFrame->getFlattenedIccData());
+    }
+    return true;
+}
+
+bool HeifDecoderImpl::getScanline(uint8_t* dst) {
+    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+        return false;
+    }
+    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+    if (mCurScanline >= videoFrame->mDisplayHeight) {
+        ALOGE("no more scanline available");
+        return false;
+    }
+    uint8_t* src = videoFrame->getFlattenedData() + videoFrame->mRowBytes * mCurScanline++;
+    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
+    return true;
+}
+
+size_t HeifDecoderImpl::skipScanlines(size_t count) {
+    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+        return 0;
+    }
+    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+
+    uint32_t oldScanline = mCurScanline;
+    mCurScanline += count;
+    if (mCurScanline > videoFrame->mDisplayHeight) {
+        mCurScanline = videoFrame->mDisplayHeight;
+    }
+    return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
+}
+
+} // namespace android
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
new file mode 100644
index 0000000..2f8f0f8
--- /dev/null
+++ b/media/libheif/HeifDecoderImpl.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _HEIF_DECODER_IMPL_
+#define _HEIF_DECODER_IMPL_
+
+#include "include/HeifDecoderAPI.h"
+#include <system/graphics.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class IDataSource;
+class IMemory;
+class MediaMetadataRetriever;
+
+/*
+ * An implementation of HeifDecoder based on Android's MediaMetadataRetriever.
+ */
+class HeifDecoderImpl : public HeifDecoder {
+public:
+
+    HeifDecoderImpl();
+    ~HeifDecoderImpl() override;
+
+    bool init(HeifStream* stream, HeifFrameInfo* frameInfo) override;
+
+    bool getEncodedColor(HeifEncodedColor* outColor) const override;
+
+    bool setOutputColor(HeifColorFormat heifColor) override;
+
+    bool decode(HeifFrameInfo* frameInfo) override;
+
+    bool getScanline(uint8_t* dst) override;
+
+    size_t skipScanlines(size_t count) override;
+
+private:
+    sp<IDataSource> mDataSource;
+    sp<MediaMetadataRetriever> mRetriever;
+    sp<IMemory> mFrameMemory;
+    android_pixel_format_t mOutputColor;
+    size_t mCurScanline;
+};
+
+} // namespace android
+
+#endif // _HEIF_DECODER_IMPL_
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
new file mode 100644
index 0000000..5183c39
--- /dev/null
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _HEIF_DECODER_API_
+#define _HEIF_DECODER_API_
+
+#include <memory>
+
+/*
+ * The output color pixel format of heif decoder.
+ */
+typedef enum {
+    kHeifColorFormat_RGB565     = 0,
+    kHeifColorFormat_RGBA_8888  = 1,
+    kHeifColorFormat_BGRA_8888  = 2,
+} HeifColorFormat;
+
+/*
+ * The color spaces encoded in the heif image.
+ */
+typedef enum {
+    kHeifEncodedColor_RGB = 0,
+    kHeifEncodedColor_YUV = 1,
+    kHeifEncodedColor_CMYK = 2,
+} HeifEncodedColor;
+
+/*
+ * Represents a color converted (RGB-based) video frame
+ */
+struct HeifFrameInfo
+{
+    HeifFrameInfo() :
+        mWidth(0), mHeight(0), mRotationAngle(0), mBytesPerPixel(0),
+        mIccSize(0), mIccData(nullptr) {}
+
+    // update the frame info, will make a copy of |iccData| internally
+    void set(uint32_t width, uint32_t height, int32_t rotation, uint32_t bpp,
+            uint32_t iccSize, uint8_t* iccData) {
+        mWidth = width;
+        mHeight = height;
+        mRotationAngle = rotation;
+        mBytesPerPixel = bpp;
+
+        if (mIccData != nullptr) {
+            mIccData.reset(nullptr);
+        }
+        mIccSize = iccSize;
+        if (iccSize > 0) {
+            mIccData.reset(new uint8_t[iccSize]);
+            if (mIccData.get() != nullptr) {
+                memcpy(mIccData.get(), iccData, iccSize);
+            } else {
+                mIccSize = 0;
+            }
+        }
+    }
+
+    // Intentional public access modifiers:
+    uint32_t mWidth;
+    uint32_t mHeight;
+    int32_t  mRotationAngle;           // Rotation angle, clockwise, should be multiple of 90
+    uint32_t mBytesPerPixel;           // Number of bytes for one pixel
+    uint32_t mIccSize;                 // Number of bytes in mIccData
+    std::unique_ptr<uint8_t> mIccData; // Actual ICC data, memory is owned by this structure
+};
+
+/*
+ * Abstract interface to provide data to HeifDecoder.
+ */
+struct HeifStream {
+    HeifStream() {}
+
+    virtual ~HeifStream() {}
+
+    /*
+     * Reads or skips size number of bytes. return the number of bytes actually
+     * read or skipped.
+     * If |buffer| == NULL, skip size bytes, return how many were skipped.
+     * If |buffer| != NULL, copy size bytes into buffer, return how many were copied.
+     */
+    virtual size_t read(void* buffer, size_t size) = 0;
+
+    /*
+     * Rewinds to the beginning of the stream. Returns true if the stream is known
+     * to be at the beginning after this call returns.
+     */
+    virtual bool rewind() = 0;
+
+    /*
+     * Seeks to an absolute position in the stream. If this cannot be done, returns false.
+     * If an attempt is made to seek past the end of the stream, the position will be set
+     * to the end of the stream.
+     */
+    virtual bool seek(size_t /*position*/) = 0;
+
+    /** Returns true if this stream can report its total length. */
+    virtual bool hasLength() const = 0;
+
+    /** Returns the total length of the stream. If this cannot be done, returns 0. */
+    virtual size_t getLength() const = 0;
+
+private:
+    HeifStream(const HeifFrameInfo&) = delete;
+    HeifStream& operator=(const HeifFrameInfo&) = delete;
+};
+
+/*
+ * Abstract interface to decode heif images from a HeifStream data source.
+ */
+struct HeifDecoder {
+    HeifDecoder() {}
+
+    virtual ~HeifDecoder() {}
+
+    /*
+     * Returns true if it successfully sets outColor to the encoded color,
+     * and false otherwise.
+     */
+    virtual bool getEncodedColor(HeifEncodedColor* outColor) const = 0;
+
+    /*
+     * Returns true if it successfully sets the output color format to color,
+     * and false otherwise.
+     */
+    virtual bool setOutputColor(HeifColorFormat color) = 0;
+
+    /*
+     * Returns true if it successfully initialize heif decoder with source,
+     * and false otherwise. |frameInfo| will be filled with information of
+     * the primary picture upon success and unmodified upon failure.
+     * Takes ownership of |stream| regardless of result.
+     */
+    virtual bool init(HeifStream* stream, HeifFrameInfo* frameInfo) = 0;
+
+    /*
+     * Decode the picture internally, returning whether it succeeded. |frameInfo|
+     * will be filled with information of the primary picture upon success and
+     * unmodified upon failure.
+     *
+     * After this succeeded, getScanline can be called to read the scanlines
+     * that were decoded.
+     */
+    virtual bool decode(HeifFrameInfo* frameInfo) = 0;
+
+    /*
+     * Read the next scanline (in top-down order), returns true upon success
+     * and false otherwise.
+     */
+    virtual bool getScanline(uint8_t* dst) = 0;
+
+    /*
+     * Skip the next |count| scanlines, returns true upon success and
+     * false otherwise.
+     */
+    virtual size_t skipScanlines(size_t count) = 0;
+
+private:
+    HeifDecoder(const HeifFrameInfo&) = delete;
+    HeifDecoder& operator=(const HeifFrameInfo&) = delete;
+};
+
+/*
+ * Creates a HeifDecoder. Returns a HeifDecoder instance upon success, or NULL
+ * if the creation has failed.
+ */
+HeifDecoder* createHeifDecoder();
+
+#endif // _HEIF_DECODER_API_
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index fb4fe4b..a462f3a 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -15,6 +15,9 @@
 cc_library {
     name: "libmedia_helper",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
     srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
     cflags: [
         "-Werror",
@@ -121,6 +124,9 @@
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     defaults: ["libmedia_omx_defaults"],
 }
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 7058ee8..5ea2e8b 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -127,22 +127,32 @@
         return reply.readInt32();
     }
 
-    status_t setDataSource(const sp<IDataSource>& source)
+    status_t setDataSource(const sp<IDataSource>& source, const char *mime)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
         data.writeStrongBinder(IInterface::asBinder(source));
+
+        if (mime != NULL) {
+            data.writeInt32(1);
+            data.writeCString(mime);
+        } else {
+            data.writeInt32(0);
+        }
         remote()->transact(SET_DATA_SOURCE_CALLBACK, data, &reply);
         return reply.readInt32();
     }
 
-    sp<IMemory> getFrameAtTime(int64_t timeUs, int option)
+    sp<IMemory> getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly)
     {
-        ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
+        ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d) metaOnly(%d)",
+                timeUs, option, colorFormat, metaOnly);
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
         data.writeInt64(timeUs);
         data.writeInt32(option);
+        data.writeInt32(colorFormat);
+        data.writeInt32(metaOnly);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
         sendSchedPolicy(data);
 #endif
@@ -258,7 +268,12 @@
             if (source == NULL) {
                 reply->writeInt32(BAD_VALUE);
             } else {
-                reply->writeInt32(setDataSource(source));
+                int32_t hasMime = data.readInt32();
+                const char *mime = NULL;
+                if (hasMime) {
+                    mime = data.readCString();
+                }
+                reply->writeInt32(setDataSource(source, mime));
             }
             return NO_ERROR;
         } break;
@@ -266,11 +281,14 @@
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
             int64_t timeUs = data.readInt64();
             int option = data.readInt32();
-            ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
+            int colorFormat = data.readInt32();
+            bool metaOnly = (data.readInt32() != 0);
+            ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d), metaOnly(%d)",
+                    timeUs, option, colorFormat, metaOnly);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
             setSchedPolicy(data);
 #endif
-            sp<IMemory> bitmap = getFrameAtTime(timeUs, option);
+            sp<IMemory> bitmap = getFrameAtTime(timeUs, option, colorFormat, metaOnly);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
                 reply->writeStrongBinder(IInterface::asBinder(bitmap));
diff --git a/media/libmedia/MediaUtils.cpp b/media/libmedia/MediaUtils.cpp
index dc2bc82..bcdc3bd 100644
--- a/media/libmedia/MediaUtils.cpp
+++ b/media/libmedia/MediaUtils.cpp
@@ -24,6 +24,8 @@
 
 #include "MediaUtils.h"
 
+extern "C" size_t __cfi_shadow_size();
+
 namespace android {
 
 void limitProcessMemory(
@@ -62,6 +64,19 @@
     if (propVal > 0 && uint64_t(propVal) <= SIZE_MAX) {
         maxMem = propVal;
     }
+
+    // Increase by the size of the CFI shadow mapping. Most of the shadow is not
+    // backed with physical pages, and it is possible for the result to be
+    // higher than total physical memory. This is fine for RLIMIT_AS.
+    size_t cfi_size = __cfi_shadow_size();
+    if (cfi_size) {
+      ALOGV("cfi shadow size: %zu", cfi_size);
+      if (maxMem <= SIZE_MAX - cfi_size) {
+        maxMem += cfi_size;
+      } else {
+        maxMem = SIZE_MAX;
+      }
+    }
     ALOGV("actual limit: %zu", maxMem);
 
     struct rlimit limit;
diff --git a/media/libmedia/include/media/IDataSource.h b/media/libmedia/include/media/IDataSource.h
index 655f337..3858f78 100644
--- a/media/libmedia/include/media/IDataSource.h
+++ b/media/libmedia/include/media/IDataSource.h
@@ -35,7 +35,9 @@
     // Get the memory that readAt writes into.
     virtual sp<IMemory> getIMemory() = 0;
     // Read up to |size| bytes into the memory returned by getIMemory(). Returns
-    // the number of bytes read, or -1 on error. |size| must not be larger than
+    // the number of bytes read, or negative value on error (eg.
+    // ERROR_END_OF_STREAM indicating EOS. This is needed by CallbackDataSource
+    // to properly handle reading of last chunk). |size| must not be larger than
     // the buffer.
     virtual ssize_t readAt(off64_t offset, size_t size) = 0;
     // Get the size, or -1 if the size is unknown.
diff --git a/media/libmedia/include/media/IMediaMetadataRetriever.h b/media/libmedia/include/media/IMediaMetadataRetriever.h
index c90f254..ea95161 100644
--- a/media/libmedia/include/media/IMediaMetadataRetriever.h
+++ b/media/libmedia/include/media/IMediaMetadataRetriever.h
@@ -19,13 +19,12 @@
 #define ANDROID_IMEDIAMETADATARETRIEVER_H
 
 #include <binder/IInterface.h>
-#include <binder/Parcel.h>
 #include <binder/IMemory.h>
 #include <utils/KeyedVector.h>
 #include <utils/RefBase.h>
 
 namespace android {
-
+class Parcel;
 class IDataSource;
 struct IMediaHTTPService;
 
@@ -41,8 +40,10 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t        setDataSource(const sp<IDataSource>& dataSource) = 0;
-    virtual sp<IMemory>     getFrameAtTime(int64_t timeUs, int option) = 0;
+    virtual status_t        setDataSource(
+            const sp<IDataSource>& dataSource, const char *mime) = 0;
+    virtual sp<IMemory>     getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
     virtual sp<IMemory>     extractAlbumArt() = 0;
     virtual const char*     extractMetadata(int keyCode) = 0;
 };
diff --git a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
index a5e1350..257002d 100644
--- a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
+++ b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
@@ -41,8 +41,9 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t setDataSource(const sp<DataSource>& source) = 0;
-    virtual VideoFrame* getFrameAtTime(int64_t timeUs, int option) = 0;
+    virtual status_t setDataSource(const sp<DataSource>& source, const char *mime) = 0;
+    virtual VideoFrame* getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
     virtual MediaAlbumArt* extractAlbumArt() = 0;
     virtual const char* extractMetadata(int keyCode) = 0;
 };
@@ -54,7 +55,9 @@
     MediaMetadataRetrieverInterface() {}
 
     virtual             ~MediaMetadataRetrieverInterface() {}
-    virtual VideoFrame* getFrameAtTime(int64_t /*timeUs*/, int /*option*/) { return NULL; }
+    virtual VideoFrame* getFrameAtTime(
+            int64_t /*timeUs*/, int /*option*/, int /*colorFormat*/, bool /*metaOnly*/)
+    { return NULL; }
     virtual MediaAlbumArt* extractAlbumArt() { return NULL; }
     virtual const char* extractMetadata(int /*keyCode*/) { return NULL; }
 };
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 8ed07ee..65c266b 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -76,8 +76,10 @@
             const KeyedVector<String8, String8> *headers = NULL);
 
     status_t setDataSource(int fd, int64_t offset, int64_t length);
-    status_t setDataSource(const sp<IDataSource>& dataSource);
-    sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
+    status_t setDataSource(
+            const sp<IDataSource>& dataSource, const char *mime = NULL);
+    sp<IMemory> getFrameAtTime(int64_t timeUs, int option,
+            int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
     sp<IMemory> extractAlbumArt();
     const char* extractMetadata(int keyCode);
 
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 08a9e6a..7d27d57 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -130,7 +130,7 @@
 }
 
 status_t MediaMetadataRetriever::setDataSource(
-    const sp<IDataSource>& dataSource)
+    const sp<IDataSource>& dataSource, const char *mime)
 {
     ALOGV("setDataSource(IDataSource)");
     Mutex::Autolock _l(mLock);
@@ -138,18 +138,20 @@
         ALOGE("retriever is not initialized");
         return INVALID_OPERATION;
     }
-    return mRetriever->setDataSource(dataSource);
+    return mRetriever->setDataSource(dataSource, mime);
 }
 
-sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)
+sp<IMemory> MediaMetadataRetriever::getFrameAtTime(
+        int64_t timeUs, int option, int colorFormat, bool metaOnly)
 {
-    ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d)", timeUs, option);
+    ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d) colorFormat(%d) metaOnly(%d)",
+            timeUs, option, colorFormat, metaOnly);
     Mutex::Autolock _l(mLock);
     if (mRetriever == 0) {
         ALOGE("retriever is not initialized");
         return NULL;
     }
-    return mRetriever->getFrameAtTime(timeUs, option);
+    return mRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
 }
 
 const char* MediaMetadataRetriever::extractMetadata(int keyCode)
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 793f476..5a468f3 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -175,7 +175,7 @@
 }
 
 status_t MetadataRetrieverClient::setDataSource(
-        const sp<IDataSource>& source)
+        const sp<IDataSource>& source, const char *mime)
 {
     ALOGV("setDataSource(IDataSource)");
     Mutex::Autolock lock(mLock);
@@ -186,16 +186,18 @@
     ALOGV("player type = %d", playerType);
     sp<MediaMetadataRetrieverBase> p = createRetriever(playerType);
     if (p == NULL) return NO_INIT;
-    status_t ret = p->setDataSource(dataSource);
+    status_t ret = p->setDataSource(dataSource, mime);
     if (ret == NO_ERROR) mRetriever = p;
     return ret;
 }
 
 Mutex MetadataRetrieverClient::sLock;
 
-sp<IMemory> MetadataRetrieverClient::getFrameAtTime(int64_t timeUs, int option)
+sp<IMemory> MetadataRetrieverClient::getFrameAtTime(
+        int64_t timeUs, int option, int colorFormat, bool metaOnly)
 {
-    ALOGV("getFrameAtTime: time(%lld us) option(%d)", (long long)timeUs, option);
+    ALOGV("getFrameAtTime: time(%lld us) option(%d) colorFormat(%d), metaOnly(%d)",
+            (long long)timeUs, option, colorFormat, metaOnly);
     Mutex::Autolock lock(mLock);
     Mutex::Autolock glock(sLock);
     mThumbnail.clear();
@@ -203,12 +205,13 @@
         ALOGE("retriever is not initialized");
         return NULL;
     }
-    VideoFrame *frame = mRetriever->getFrameAtTime(timeUs, option);
+    VideoFrame *frame = mRetriever->getFrameAtTime(
+            timeUs, option, colorFormat, metaOnly);
     if (frame == NULL) {
         ALOGE("failed to capture a video frame");
         return NULL;
     }
-    size_t size = sizeof(VideoFrame) + frame->mSize;
+    size_t size = frame->getFlattenedSize();
     sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
     if (heap == NULL) {
         ALOGE("failed to create MemoryDealer");
@@ -222,16 +225,7 @@
         return NULL;
     }
     VideoFrame *frameCopy = static_cast<VideoFrame *>(mThumbnail->pointer());
-    frameCopy->mWidth = frame->mWidth;
-    frameCopy->mHeight = frame->mHeight;
-    frameCopy->mDisplayWidth = frame->mDisplayWidth;
-    frameCopy->mDisplayHeight = frame->mDisplayHeight;
-    frameCopy->mSize = frame->mSize;
-    frameCopy->mRotationAngle = frame->mRotationAngle;
-    ALOGV("rotation: %d", frameCopy->mRotationAngle);
-    frameCopy->mData = (uint8_t *)frameCopy + sizeof(VideoFrame);
-    memcpy(frameCopy->mData, frame->mData, frame->mSize);
-    frameCopy->mData = 0;
+    frameCopy->copyFlattened(*frame);
     delete frame;  // Fix memory leakage
     return mThumbnail;
 }
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index fe7547c..c78cd4b 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -49,8 +49,9 @@
             const KeyedVector<String8, String8> *headers);
 
     virtual status_t                setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t                setDataSource(const sp<IDataSource>& source);
-    virtual sp<IMemory>             getFrameAtTime(int64_t timeUs, int option);
+    virtual status_t                setDataSource(const sp<IDataSource>& source, const char *mime);
+    virtual sp<IMemory>             getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly);
     virtual sp<IMemory>             extractAlbumArt();
     virtual const char*             extractMetadata(int keyCode);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 8fe255b..ac187cc 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -343,7 +343,7 @@
             format, mSurface, crypto, 0 /* flags */);
 
     if (err != OK) {
-        ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
+        ALOGE("Failed to configure [%s] decoder (err=%d)", mComponentName.c_str(), err);
         mCodec->release();
         mCodec.clear();
         handleError(err);
@@ -372,7 +372,7 @@
 
     err = mCodec->start();
     if (err != OK) {
-        ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
+        ALOGE("Failed to start [%s] decoder (err=%d)", mComponentName.c_str(), err);
         mCodec->release();
         mCodec.clear();
         handleError(err);
@@ -460,6 +460,12 @@
     if (notifyComplete) {
         mResumePending = true;
     }
+
+    if (mCodec == NULL) {
+        ALOGE("[%s] onResume without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return;
+    }
     mCodec->start();
 }
 
@@ -481,7 +487,7 @@
     }
 
     if (err != OK) {
-        ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
+        ALOGE("failed to flush [%s] (err=%d)", mComponentName.c_str(), err);
         handleError(err);
         // finish with posting kWhatFlushCompleted.
         // we attempt to release the buffers even if flush fails.
@@ -530,7 +536,7 @@
     releaseAndResetMediaBuffers();
 
     if (err != OK) {
-        ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
+        ALOGE("failed to release [%s] (err=%d)", mComponentName.c_str(), err);
         handleError(err);
         // finish with posting kWhatShutdownCompleted.
     }
@@ -631,10 +637,17 @@
         return false;
     }
 
+    if (mCodec == NULL) {
+        ALOGE("[%s] handleAnInputBuffer without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
     sp<MediaCodecBuffer> buffer;
     mCodec->getInputBuffer(index, &buffer);
 
     if (buffer == NULL) {
+        ALOGE("[%s] handleAnInputBuffer, failed to get input buffer", mComponentName.c_str());
         handleError(UNKNOWN_ERROR);
         return false;
     }
@@ -697,11 +710,18 @@
         size_t size,
         int64_t timeUs,
         int32_t flags) {
+    if (mCodec == NULL) {
+        ALOGE("[%s] handleAnOutputBuffer without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
 //    CHECK_LT(bufferIx, mOutputBuffers.size());
     sp<MediaCodecBuffer> buffer;
     mCodec->getOutputBuffer(index, &buffer);
 
     if (buffer == NULL) {
+        ALOGE("[%s] handleAnOutputBuffer, failed to get output buffer", mComponentName.c_str());
         handleError(UNKNOWN_ERROR);
         return false;
     }
@@ -949,6 +969,12 @@
 }
 
 bool NuPlayer::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {
+    if (mCodec == NULL) {
+        ALOGE("[%s] onInputBufferFetched without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
     size_t bufferIx;
     CHECK(msg->findSize("buffer-ix", &bufferIx));
     CHECK_LT(bufferIx, mInputBuffers.size());
@@ -979,7 +1005,7 @@
         }
 
         if (streamErr != ERROR_END_OF_STREAM) {
-            ALOGE("Stream error for %s (err=%d), EOS %s queued",
+            ALOGE("Stream error for [%s] (err=%d), EOS %s queued",
                     mComponentName.c_str(),
                     streamErr,
                     err == OK ? "successfully" : "unsuccessfully");
@@ -1073,7 +1099,7 @@
         } // no cryptInfo
 
         if (err != OK) {
-            ALOGE("onInputBufferFetched: queue%sInputBuffer failed for %s (err=%d, %s)",
+            ALOGE("onInputBufferFetched: queue%sInputBuffer failed for [%s] (err=%d, %s)",
                     (cryptInfo != NULL ? "Secure" : ""),
                     mComponentName.c_str(), err, errorDetailMsg.c_str());
             handleError(err);
@@ -1102,7 +1128,9 @@
         }
     }
 
-    if (msg->findInt32("render", &render) && render) {
+    if (mCodec == NULL) {
+        err = NO_INIT;
+    } else if (msg->findInt32("render", &render) && render) {
         int64_t timestampNs;
         CHECK(msg->findInt64("timestampNs", &timestampNs));
         err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
@@ -1111,7 +1139,7 @@
         err = mCodec->releaseOutputBuffer(bufferIx);
     }
     if (err != OK) {
-        ALOGE("failed to release output buffer for %s (err=%d)",
+        ALOGE("failed to release output buffer for [%s] (err=%d)",
                 mComponentName.c_str(), err);
         handleError(err);
     }
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d4ec30d..63ad0e0 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1866,16 +1866,15 @@
             mFlags |= kFlagIsGrallocUsageProtected;
             mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
         }
+    }
+    if (mFlags & kFlagIsSecure) {
+        // use native_handles for secure input buffers
+        err = setPortMode(kPortIndexInput, IOMX::kPortModePresetSecureBuffer);
 
-        if (mFlags & kFlagIsSecure) {
-            // use native_handles for secure input buffers
-            err = setPortMode(kPortIndexInput, IOMX::kPortModePresetSecureBuffer);
-
-            if (err != OK) {
-                ALOGI("falling back to non-native_handles");
-                setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
-                err = OK; // ignore error for now
-            }
+        if (err != OK) {
+            ALOGI("falling back to non-native_handles");
+            setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
+            err = OK; // ignore error for now
         }
     }
     if (haveNativeWindow) {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 19973bd..348abf8 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -30,6 +30,7 @@
         "FrameRenderTracker.cpp",
         "HTTPBase.cpp",
         "HevcUtils.cpp",
+        "ItemTable.cpp",
         "JPEGSource.cpp",
         "MP3Extractor.cpp",
         "MPEG2TSWriter.cpp",
diff --git a/media/libstagefright/CallbackDataSource.cpp b/media/libstagefright/CallbackDataSource.cpp
index 4309372..6dfe2de 100644
--- a/media/libstagefright/CallbackDataSource.cpp
+++ b/media/libstagefright/CallbackDataSource.cpp
@@ -127,10 +127,6 @@
 }
 
 ssize_t TinyCacheSource::readAt(off64_t offset, void* data, size_t size) {
-    if (size >= kCacheSize) {
-        return mSource->readAt(offset, data, size);
-    }
-
     // Check if the cache satisfies the read.
     if (mCachedOffset <= offset
             && offset < (off64_t) (mCachedOffset + mCachedSize)) {
@@ -154,6 +150,9 @@
         }
     }
 
+    if (size >= kCacheSize) {
+        return mSource->readAt(offset, data, size);
+    }
 
     // Fill the cache and copy to the caller.
     const ssize_t numRead = mSource->readAt(offset, mCache, kCacheSize);
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index a5760d1..c22053e 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -92,6 +92,48 @@
     return true;
 }
 
+bool DataSource::getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
+    if (size == 2) {
+        return getUInt16(offset, x);
+    }
+    if (size == 1) {
+        uint8_t tmp;
+        if (readAt(offset, &tmp, 1) == 1) {
+            *x = tmp;
+            return true;
+        }
+    }
+    return false;
+}
+
+bool DataSource::getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
+    if (size == 4) {
+        return getUInt32(offset, x);
+    }
+    if (size == 2) {
+        uint16_t tmp;
+        if (getUInt16(offset, &tmp)) {
+            *x = tmp;
+            return true;
+        }
+    }
+    return false;
+}
+
+bool DataSource::getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
+    if (size == 8) {
+        return getUInt64(offset, x);
+    }
+    if (size == 4) {
+        uint32_t tmp;
+        if (getUInt32(offset, &tmp)) {
+            *x = tmp;
+            return true;
+        }
+    }
+    return false;
+}
+
 status_t DataSource::getSize(off64_t *size) {
     *size = 0;
 
diff --git a/media/libstagefright/ItemTable.cpp b/media/libstagefright/ItemTable.cpp
new file mode 100644
index 0000000..3ec416b
--- /dev/null
+++ b/media/libstagefright/ItemTable.cpp
@@ -0,0 +1,1553 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ItemTable"
+
+#include <include/ItemTable.h>
+#include <media/MediaDefs.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <utils/Log.h>
+
+namespace android {
+
+namespace heif {
+
+/////////////////////////////////////////////////////////////////////
+//
+//  struct to keep track of one image item
+//
+
+struct ImageItem {
+    friend struct ItemReference;
+    friend struct ItemProperty;
+
+    ImageItem() : ImageItem(0) {}
+    ImageItem(uint32_t _type) : type(_type),
+            rows(0), columns(0), width(0), height(0), rotation(0),
+            offset(0), size(0), nextTileIndex(0) {}
+
+    bool isGrid() const {
+        return type == FOURCC('g', 'r', 'i', 'd');
+    }
+
+    status_t getNextTileItemId(uint32_t *nextTileItemId, bool reset) {
+        if (reset) {
+            nextTileIndex = 0;
+        }
+        if (nextTileIndex >= dimgRefs.size()) {
+            return ERROR_END_OF_STREAM;
+        }
+        *nextTileItemId = dimgRefs[nextTileIndex++];
+        return OK;
+    }
+
+    uint32_t type;
+    int32_t rows;
+    int32_t columns;
+    int32_t width;
+    int32_t height;
+    int32_t rotation;
+    off64_t offset;
+    size_t size;
+    sp<ABuffer> hvcc;
+    sp<ABuffer> icc;
+
+    Vector<uint32_t> thumbnails;
+    Vector<uint32_t> dimgRefs;
+    size_t nextTileIndex;
+};
+
+
+/////////////////////////////////////////////////////////////////////
+//
+//  ISO boxes
+//
+
+struct Box {
+protected:
+    Box(const sp<DataSource> source, uint32_t type) :
+        mDataSource(source), mType(type) {}
+
+    virtual ~Box() {}
+
+    virtual status_t onChunkData(
+            uint32_t /*type*/, off64_t /*offset*/, size_t /*size*/) {
+        return OK;
+    }
+
+    inline uint32_t type() const { return mType; }
+
+    inline sp<DataSource> source() const { return mDataSource; }
+
+    status_t parseChunk(off64_t *offset);
+
+    status_t parseChunks(off64_t offset, size_t size);
+
+private:
+    sp<DataSource> mDataSource;
+    uint32_t mType;
+};
+
+status_t Box::parseChunk(off64_t *offset) {
+    if (*offset < 0) {
+        ALOGE("b/23540914");
+        return ERROR_MALFORMED;
+    }
+    uint32_t hdr[2];
+    if (mDataSource->readAt(*offset, hdr, 8) < 8) {
+        return ERROR_IO;
+    }
+    uint64_t chunk_size = ntohl(hdr[0]);
+    int32_t chunk_type = ntohl(hdr[1]);
+    off64_t data_offset = *offset + 8;
+
+    if (chunk_size == 1) {
+        if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
+            return ERROR_IO;
+        }
+        chunk_size = ntoh64(chunk_size);
+        data_offset += 8;
+
+        if (chunk_size < 16) {
+            // The smallest valid chunk is 16 bytes long in this case.
+            return ERROR_MALFORMED;
+        }
+    } else if (chunk_size == 0) {
+        // This shouldn't happen since we should never be top level
+        ALOGE("invalid chunk size 0 for non-top level box");
+        return ERROR_MALFORMED;
+    } else if (chunk_size < 8) {
+        // The smallest valid chunk is 8 bytes long.
+        ALOGE("invalid chunk size: %lld", (long long)chunk_size);
+        return ERROR_MALFORMED;
+    }
+
+    char chunk[5];
+    MakeFourCCString(chunk_type, chunk);
+    ALOGV("chunk: %s @ %lld", chunk, (long long)*offset);
+
+    off64_t chunk_data_size = chunk_size - (data_offset - *offset);
+    if (chunk_data_size < 0) {
+        ALOGE("b/23540914");
+        return ERROR_MALFORMED;
+    }
+
+    status_t err = onChunkData(chunk_type, data_offset, chunk_data_size);
+
+    if (err != OK) {
+        return err;
+    }
+    *offset += chunk_size;
+    return OK;
+}
+
+status_t Box::parseChunks(off64_t offset, size_t size) {
+    off64_t stopOffset = offset + size;
+    while (offset < stopOffset) {
+        status_t err = parseChunk(&offset);
+        if (err != OK) {
+            return err;
+        }
+    }
+    if (offset != stopOffset) {
+        return ERROR_MALFORMED;
+    }
+    return OK;
+}
+
+///////////////////////////////////////////////////////////////////////
+
+struct FullBox : public Box {
+protected:
+    FullBox(const sp<DataSource> source, uint32_t type) :
+        Box(source, type), mVersion(0), mFlags(0) {}
+
+    inline uint8_t version() const { return mVersion; }
+
+    inline uint32_t flags() const { return mFlags; }
+
+    status_t parseFullBoxHeader(off64_t *offset, size_t *size);
+
+private:
+    uint8_t mVersion;
+    uint32_t mFlags;
+};
+
+status_t FullBox::parseFullBoxHeader(off64_t *offset, size_t *size) {
+    if (*size < 4) {
+        return ERROR_MALFORMED;
+    }
+    if (!source()->readAt(*offset, &mVersion, 1)) {
+        return ERROR_IO;
+    }
+    if (!source()->getUInt24(*offset + 1, &mFlags)) {
+        return ERROR_IO;
+    }
+    *offset += 4;
+    *size -= 4;
+    return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+//  PrimaryImage box
+//
+
+struct PitmBox : public FullBox {
+    PitmBox(const sp<DataSource> source) :
+        FullBox(source, FOURCC('p', 'i', 't', 'm')) {}
+
+    status_t parse(off64_t offset, size_t size, uint32_t *primaryItemId);
+};
+
+status_t PitmBox::parse(off64_t offset, size_t size, uint32_t *primaryItemId) {
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+
+    size_t itemIdSize = (version() == 0) ? 2 : 4;
+    if (size < itemIdSize) {
+        return ERROR_MALFORMED;
+    }
+    uint32_t itemId;
+    if (!source()->getUInt32Var(offset, &itemId, itemIdSize)) {
+        return ERROR_IO;
+    }
+
+    ALOGV("primary id %d", itemId);
+    *primaryItemId = itemId;
+
+    return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+//  ItemLocation related boxes
+//
+
+struct ExtentEntry {
+    uint64_t extentIndex;
+    uint64_t extentOffset;
+    uint64_t extentLength;
+};
+
+struct ItemLoc {
+    ItemLoc() : ItemLoc(0, 0, 0, 0) {}
+    ItemLoc(uint32_t item_id, uint16_t construction_method,
+            uint16_t data_reference_index, uint64_t base_offset) :
+        itemId(item_id),
+        constructionMethod(construction_method),
+        dataReferenceIndex(data_reference_index),
+        baseOffset(base_offset) {}
+
+    void addExtent(const ExtentEntry& extent) {
+        extents.push_back(extent);
+    }
+
+    status_t getLoc(off64_t *offset, size_t *size,
+            off64_t idatOffset, size_t idatSize) const {
+        // TODO: fix extent handling, fix constructionMethod = 2
+        CHECK(extents.size() == 1);
+        if (constructionMethod == 0) {
+            *offset = baseOffset + extents[0].extentOffset;
+            *size = extents[0].extentLength;
+            return OK;
+        } else if (constructionMethod == 1) {
+            if (baseOffset + extents[0].extentOffset + extents[0].extentLength
+                    > idatSize) {
+                return ERROR_MALFORMED;
+            }
+            *offset = baseOffset + extents[0].extentOffset + idatOffset;
+            *size = extents[0].extentLength;
+            return OK;
+        }
+        return ERROR_UNSUPPORTED;
+    }
+
+    // parsed info
+    uint32_t itemId;
+    uint16_t constructionMethod;
+    uint16_t dataReferenceIndex;
+    off64_t baseOffset;
+    Vector<ExtentEntry> extents;
+};
+
+struct IlocBox : public FullBox {
+    IlocBox(const sp<DataSource> source, KeyedVector<uint32_t, ItemLoc> *itemLocs) :
+        FullBox(source, FOURCC('i', 'l', 'o', 'c')),
+        mItemLocs(itemLocs), mHasConstructMethod1(false) {}
+
+    status_t parse(off64_t offset, size_t size);
+
+    bool hasConstructMethod1() { return mHasConstructMethod1; }
+
+private:
+    static bool isSizeFieldValid(uint32_t offset_size) {
+        return offset_size == 0 || offset_size == 4 || offset_size == 8;
+    }
+    KeyedVector<uint32_t, ItemLoc> *mItemLocs;
+    bool mHasConstructMethod1;
+};
+
+status_t IlocBox::parse(off64_t offset, size_t size) {
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+    if (version() > 2) {
+        ALOGE("%s: invalid version %d", __FUNCTION__, version());
+        return ERROR_MALFORMED;
+    }
+
+    if (size < 2) {
+        return ERROR_MALFORMED;
+    }
+    uint8_t offset_size;
+    if (!source()->readAt(offset++, &offset_size, 1)) {
+        return ERROR_IO;
+    }
+    uint8_t length_size = (offset_size & 0xF);
+    offset_size >>= 4;
+
+    uint8_t base_offset_size;
+    if (!source()->readAt(offset++, &base_offset_size, 1)) {
+        return ERROR_IO;
+    }
+    uint8_t index_size = 0;
+    if (version() == 1 || version() == 2) {
+        index_size = (base_offset_size & 0xF);
+    }
+    base_offset_size >>= 4;
+    size -= 2;
+
+    if (!isSizeFieldValid(offset_size)
+            || !isSizeFieldValid(length_size)
+            || !isSizeFieldValid(base_offset_size)
+            || !isSizeFieldValid((index_size))) {
+        ALOGE("%s: offset size not valid: %d, %d, %d, %d", __FUNCTION__,
+                offset_size, length_size, base_offset_size, index_size);
+        return ERROR_MALFORMED;
+    }
+
+    uint32_t item_count;
+    size_t itemFieldSize = version() < 2 ? 2 : 4;
+    if (size < itemFieldSize) {
+        return ERROR_MALFORMED;
+    }
+    if (!source()->getUInt32Var(offset, &item_count, itemFieldSize)) {
+        return ERROR_IO;
+    }
+
+    ALOGV("item_count %lld", (long long) item_count);
+    offset += itemFieldSize;
+    size -= itemFieldSize;
+
+    for (size_t i = 0; i < item_count; i++) {
+        uint32_t item_id;
+        if (!source()->getUInt32Var(offset, &item_id, itemFieldSize)) {
+            return ERROR_IO;
+        }
+        ALOGV("item[%zu]: id %lld", i, (long long)item_id);
+        offset += itemFieldSize;
+
+        uint8_t construction_method = 0;
+        if (version() == 1 || version() == 2) {
+            uint8_t buf[2];
+            if (!source()->readAt(offset, buf, 2)) {
+                return ERROR_IO;
+            }
+            construction_method = (buf[1] & 0xF);
+            ALOGV("construction_method %d", construction_method);
+            if (construction_method == 1) {
+                mHasConstructMethod1 = true;
+            }
+
+            offset += 2;
+        }
+
+        uint16_t data_reference_index;
+        if (!source()->getUInt16(offset, &data_reference_index)) {
+            return ERROR_IO;
+        }
+        ALOGV("data_reference_index %d", data_reference_index);
+        if (data_reference_index != 0) {
+            // we don't support reference to other files
+            return ERROR_UNSUPPORTED;
+        }
+        offset += 2;
+
+        uint64_t base_offset = 0;
+        if (base_offset_size != 0) {
+            if (!source()->getUInt64Var(offset, &base_offset, base_offset_size)) {
+                return ERROR_IO;
+            }
+            offset += base_offset_size;
+        }
+        ALOGV("base_offset %lld", (long long) base_offset);
+
+        ssize_t index = mItemLocs->add(item_id, ItemLoc(
+                item_id, construction_method, data_reference_index, base_offset));
+        ItemLoc &item = mItemLocs->editValueAt(index);
+
+        uint16_t extent_count;
+        if (!source()->getUInt16(offset, &extent_count)) {
+            return ERROR_IO;
+        }
+        ALOGV("extent_count %d", extent_count);
+
+        if (extent_count > 1 && (offset_size == 0 || length_size == 0)) {
+            // if the item is dividec into more than one extents, offset and
+            // length must be present.
+            return ERROR_MALFORMED;
+        }
+        offset += 2;
+
+        for (size_t j = 0; j < extent_count; j++) {
+            uint64_t extent_index = 1; // default=1
+            if ((version() == 1 || version() == 2) && (index_size > 0)) {
+                if (!source()->getUInt64Var(offset, &extent_index, index_size)) {
+                    return ERROR_IO;
+                }
+                // TODO: add support for this mode
+                offset += index_size;
+                ALOGV("extent_index %lld", (long long)extent_index);
+            }
+
+            uint64_t extent_offset = 0; // default=0
+            if (offset_size > 0) {
+                if (!source()->getUInt64Var(offset, &extent_offset, offset_size)) {
+                    return ERROR_IO;
+                }
+                offset += offset_size;
+            }
+            ALOGV("extent_offset %lld", (long long)extent_offset);
+
+            uint64_t extent_length = 0; // this indicates full length of file
+            if (length_size > 0) {
+                if (!source()->getUInt64Var(offset, &extent_length, length_size)) {
+                    return ERROR_IO;
+                }
+                offset += length_size;
+            }
+            ALOGV("extent_length %lld", (long long)extent_length);
+
+            item.addExtent({ extent_index, extent_offset, extent_length });
+        }
+    }
+    return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+//  ItemReference related boxes
+//
+
+struct ItemReference : public Box, public RefBase {
+    ItemReference(const sp<DataSource> source, uint32_t type, uint32_t itemIdSize) :
+        Box(source, type), mItemId(0), mRefIdSize(itemIdSize) {}
+
+    status_t parse(off64_t offset, size_t size);
+
+    uint32_t itemId() { return mItemId; }
+
+    void apply(KeyedVector<uint32_t, ImageItem> &itemIdToImageMap) const {
+        ssize_t imageIndex = itemIdToImageMap.indexOfKey(mItemId);
+
+        // ignore non-image items
+        if (imageIndex < 0) {
+            return;
+        }
+
+        ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
+
+        if (type() == FOURCC('d', 'i', 'm', 'g')) {
+            ImageItem &image = itemIdToImageMap.editValueAt(imageIndex);
+            if (!image.dimgRefs.empty()) {
+                ALOGW("dimgRefs if not clean!");
+            }
+            image.dimgRefs.appendVector(mRefs);
+        } else if (type() == FOURCC('t', 'h', 'm', 'b')) {
+            for (size_t i = 0; i < mRefs.size(); i++) {
+                imageIndex = itemIdToImageMap.indexOfKey(mRefs[i]);
+
+                // ignore non-image items
+                if (imageIndex < 0) {
+                    continue;
+                }
+                ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
+                ImageItem &image = itemIdToImageMap.editValueAt(imageIndex);
+                if (!image.thumbnails.empty()) {
+                    ALOGW("already has thumbnails!");
+                }
+                image.thumbnails.push_back(mItemId);
+            }
+        } else {
+            ALOGW("ignoring unsupported ref type 0x%x", type());
+        }
+    }
+
+private:
+    uint32_t mItemId;
+    uint32_t mRefIdSize;
+    Vector<uint32_t> mRefs;
+
+    DISALLOW_EVIL_CONSTRUCTORS(ItemReference);
+};
+
+status_t ItemReference::parse(off64_t offset, size_t size) {
+    if (size < mRefIdSize + 2) {
+        return ERROR_MALFORMED;
+    }
+    if (!source()->getUInt32Var(offset, &mItemId, mRefIdSize)) {
+        return ERROR_IO;
+    }
+    offset += mRefIdSize;
+
+    uint16_t count;
+    if (!source()->getUInt16(offset, &count)) {
+        return ERROR_IO;
+    }
+    offset += 2;
+    size -= (mRefIdSize + 2);
+
+    if (size < count * mRefIdSize) {
+        return ERROR_MALFORMED;
+    }
+
+    for (size_t i = 0; i < count; i++) {
+        uint32_t refItemId;
+        if (!source()->getUInt32Var(offset, &refItemId, mRefIdSize)) {
+            return ERROR_IO;
+        }
+        offset += mRefIdSize;
+        mRefs.push_back(refItemId);
+        ALOGV("item id %d: referencing item id %d", mItemId, refItemId);
+    }
+
+    return OK;
+}
+
+struct IrefBox : public FullBox {
+    IrefBox(const sp<DataSource> source, Vector<sp<ItemReference> > *itemRefs) :
+        FullBox(source, FOURCC('i', 'r', 'e', 'f')), mRefIdSize(0), mItemRefs(itemRefs) {}
+
+    status_t parse(off64_t offset, size_t size);
+
+protected:
+    status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+    uint32_t mRefIdSize;
+    Vector<sp<ItemReference> > *mItemRefs;
+};
+
+status_t IrefBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+
+    mRefIdSize = (version() == 0) ? 2 : 4;
+    return parseChunks(offset, size);
+}
+
+status_t IrefBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+    sp<ItemReference> itemRef = new ItemReference(source(), type, mRefIdSize);
+
+    status_t err = itemRef->parse(offset, size);
+    if (err != OK) {
+        return err;
+    }
+    mItemRefs->push_back(itemRef);
+    return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+//  ItemProperty related boxes
+//
+
+struct AssociationEntry {
+    uint32_t itemId;
+    bool essential;
+    uint16_t index;
+};
+
+struct ItemProperty : public RefBase {
+    ItemProperty() {}
+
+    virtual void attachTo(ImageItem &/*image*/) const {
+        ALOGW("Unrecognized property");
+    }
+    virtual status_t parse(off64_t /*offset*/, size_t /*size*/) {
+        ALOGW("Unrecognized property");
+        return OK;
+    }
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(ItemProperty);
+};
+
+struct IspeBox : public FullBox, public ItemProperty {
+    IspeBox(const sp<DataSource> source) :
+        FullBox(source, FOURCC('i', 's', 'p', 'e')), mWidth(0), mHeight(0) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.width = mWidth;
+        image.height = mHeight;
+    }
+
+private:
+    uint32_t mWidth;
+    uint32_t mHeight;
+};
+
+status_t IspeBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+
+    if (size < 8) {
+        return ERROR_MALFORMED;
+    }
+    if (!source()->getUInt32(offset, &mWidth)
+            || !source()->getUInt32(offset + 4, &mHeight)) {
+        return ERROR_IO;
+    }
+    ALOGV("property ispe: %dx%d", mWidth, mHeight);
+
+    return OK;
+}
+
+struct HvccBox : public Box, public ItemProperty {
+    HvccBox(const sp<DataSource> source) :
+        Box(source, FOURCC('h', 'v', 'c', 'C')) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.hvcc = mHVCC;
+    }
+
+private:
+    sp<ABuffer> mHVCC;
+};
+
+status_t HvccBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    mHVCC = new ABuffer(size);
+
+    if (mHVCC->data() == NULL) {
+        ALOGE("b/28471206");
+        return NO_MEMORY;
+    }
+
+    if (source()->readAt(offset, mHVCC->data(), size) < (ssize_t)size) {
+        return ERROR_IO;
+    }
+
+    ALOGV("property hvcC");
+
+    return OK;
+}
+
+struct IrotBox : public Box, public ItemProperty {
+    IrotBox(const sp<DataSource> source) :
+        Box(source, FOURCC('i', 'r', 'o', 't')), mAngle(0) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.rotation = mAngle * 90;
+    }
+
+private:
+    uint8_t mAngle;
+};
+
+status_t IrotBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    if (size < 1) {
+        return ERROR_MALFORMED;
+    }
+    if (source()->readAt(offset, &mAngle, 1) != 1) {
+        return ERROR_IO;
+    }
+    mAngle &= 0x3;
+    ALOGV("property irot: %d", mAngle);
+
+    return OK;
+}
+
+struct ColrBox : public Box, public ItemProperty {
+    ColrBox(const sp<DataSource> source) :
+        Box(source, FOURCC('c', 'o', 'l', 'r')) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.icc = mICCData;
+    }
+
+private:
+    sp<ABuffer> mICCData;
+};
+
+status_t ColrBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    if (size < 4) {
+        return ERROR_MALFORMED;
+    }
+    uint32_t colour_type;
+    if (!source()->getUInt32(offset, &colour_type)) {
+        return ERROR_IO;
+    }
+    offset += 4;
+    size -= 4;
+    if (colour_type == FOURCC('n', 'c', 'l', 'x')) {
+        return OK;
+    }
+    if ((colour_type != FOURCC('r', 'I', 'C', 'C')) &&
+        (colour_type != FOURCC('p', 'r', 'o', 'f'))) {
+        return ERROR_MALFORMED;
+    }
+
+    mICCData = new ABuffer(size);
+    if (mICCData->data() == NULL) {
+        ALOGE("b/28471206");
+        return NO_MEMORY;
+    }
+
+    if (source()->readAt(offset, mICCData->data(), size) != (ssize_t)size) {
+        return ERROR_IO;
+    }
+
+    ALOGV("property Colr: size %zd", size);
+    return OK;
+}
+
+struct IpmaBox : public FullBox {
+    IpmaBox(const sp<DataSource> source, Vector<AssociationEntry> *associations) :
+        FullBox(source, FOURCC('i', 'p', 'm', 'a')), mAssociations(associations) {}
+
+    status_t parse(off64_t offset, size_t size);
+private:
+    Vector<AssociationEntry> *mAssociations;
+};
+
+status_t IpmaBox::parse(off64_t offset, size_t size) {
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+
+    if (size < 4) {
+        return ERROR_MALFORMED;
+    }
+    uint32_t entryCount;
+    if (!source()->getUInt32(offset, &entryCount)) {
+        return ERROR_IO;
+    }
+    offset += 4;
+    size -= 4;
+
+    for (size_t k = 0; k < entryCount; ++k) {
+        uint32_t itemId = 0;
+        size_t itemIdSize = (version() < 1) ? 2 : 4;
+
+        if (size < itemIdSize + 1) {
+            return ERROR_MALFORMED;
+        }
+
+        if (!source()->getUInt32Var(offset, &itemId, itemIdSize)) {
+            return ERROR_IO;
+        }
+        offset += itemIdSize;
+        size -= itemIdSize;
+
+        uint8_t associationCount;
+        if (!source()->readAt(offset, &associationCount, 1)) {
+            return ERROR_IO;
+        }
+        offset++;
+        size--;
+
+        for (size_t i = 0; i < associationCount; ++i) {
+            size_t propIndexSize = (flags() & 1) ? 2 : 1;
+            if (size < propIndexSize) {
+                return ERROR_MALFORMED;
+            }
+            uint16_t propIndex;
+            if (!source()->getUInt16Var(offset, &propIndex, propIndexSize)) {
+                return ERROR_IO;
+            }
+            offset += propIndexSize;
+            size -= propIndexSize;
+            uint16_t bitmask = (1 << (8 * propIndexSize - 1));
+            AssociationEntry entry = {
+                    .itemId = itemId,
+                    .essential = !!(propIndex & bitmask),
+                    .index = (uint16_t) (propIndex & ~bitmask)
+            };
+
+            ALOGV("item id %d associated to property %d (essential %d)",
+                    itemId, entry.index, entry.essential);
+
+            mAssociations->push_back(entry);
+        }
+    }
+
+    return OK;
+}
+
+struct IpcoBox : public Box {
+    IpcoBox(const sp<DataSource> source, Vector<sp<ItemProperty> > *properties) :
+        Box(source, FOURCC('i', 'p', 'c', 'o')), mItemProperties(properties) {}
+
+    status_t parse(off64_t offset, size_t size);
+protected:
+    status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+    Vector<sp<ItemProperty> > *mItemProperties;
+};
+
+status_t IpcoBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+    // push dummy as the index is 1-based
+    mItemProperties->push_back(new ItemProperty());
+    return parseChunks(offset, size);
+}
+
+status_t IpcoBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+    sp<ItemProperty> itemProperty;
+    switch(type) {
+        case FOURCC('h', 'v', 'c', 'C'):
+        {
+            itemProperty = new HvccBox(source());
+            break;
+        }
+        case FOURCC('i', 's', 'p', 'e'):
+        {
+            itemProperty = new IspeBox(source());
+            break;
+        }
+        case FOURCC('i', 'r', 'o', 't'):
+        {
+            itemProperty = new IrotBox(source());
+            break;
+        }
+        case FOURCC('c', 'o', 'l', 'r'):
+        {
+            itemProperty = new ColrBox(source());
+            break;
+        }
+        default:
+        {
+            // push dummy to maintain correct item property index
+            itemProperty = new ItemProperty();
+            break;
+        }
+    }
+    status_t err = itemProperty->parse(offset, size);
+    if (err != OK) {
+        return err;
+    }
+    mItemProperties->push_back(itemProperty);
+    return OK;
+}
+
+struct IprpBox : public Box {
+    IprpBox(const sp<DataSource> source,
+            Vector<sp<ItemProperty> > *properties,
+            Vector<AssociationEntry> *associations) :
+        Box(source, FOURCC('i', 'p', 'r', 'p')),
+        mProperties(properties), mAssociations(associations) {}
+
+    status_t parse(off64_t offset, size_t size);
+protected:
+    status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+    Vector<sp<ItemProperty> > *mProperties;
+    Vector<AssociationEntry> *mAssociations;
+};
+
+status_t IprpBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    status_t err = parseChunks(offset, size);
+    if (err != OK) {
+        return err;
+    }
+    return OK;
+}
+
+status_t IprpBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+    switch(type) {
+        case FOURCC('i', 'p', 'c', 'o'):
+        {
+            IpcoBox ipcoBox(source(), mProperties);
+            return ipcoBox.parse(offset, size);
+        }
+        case FOURCC('i', 'p', 'm', 'a'):
+        {
+            IpmaBox ipmaBox(source(), mAssociations);
+            return ipmaBox.parse(offset, size);
+        }
+        default:
+        {
+            ALOGW("Unrecognized box.");
+            break;
+        }
+    }
+    return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+//  ItemInfo related boxes
+//
+struct ItemInfo {
+    uint32_t itemId;
+    uint32_t itemType;
+};
+
+struct InfeBox : public FullBox {
+    InfeBox(const sp<DataSource> source) :
+        FullBox(source, FOURCC('i', 'n', 'f', 'e')) {}
+
+    status_t parse(off64_t offset, size_t size, ItemInfo *itemInfo);
+
+private:
+    bool parseNullTerminatedString(off64_t *offset, size_t *size, String8 *out);
+};
+
+bool InfeBox::parseNullTerminatedString(
+        off64_t *offset, size_t *size, String8 *out) {
+    char tmp[256];
+    size_t len = 0;
+    off64_t newOffset = *offset;
+    off64_t stopOffset = *offset + *size;
+    while (newOffset < stopOffset) {
+        if (!source()->readAt(newOffset++, &tmp[len], 1)) {
+            return false;
+        }
+        if (tmp[len] == 0) {
+            out->append(tmp, len);
+
+            *offset = newOffset;
+            *size = stopOffset - newOffset;
+
+            return true;
+        }
+        if (++len >= sizeof(tmp)) {
+            out->append(tmp, len);
+            len = 0;
+        }
+    }
+    return false;
+}
+
+status_t InfeBox::parse(off64_t offset, size_t size, ItemInfo *itemInfo) {
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+
+    if (version() == 0 || version() == 1) {
+        if (size < 4) {
+            return ERROR_MALFORMED;
+        }
+        uint16_t item_id;
+        if (!source()->getUInt16(offset, &item_id)) {
+            return ERROR_IO;
+        }
+        ALOGV("item_id %d", item_id);
+        uint16_t item_protection_index;
+        if (!source()->getUInt16(offset + 2, &item_protection_index)) {
+            return ERROR_IO;
+        }
+        offset += 4;
+        size -= 4;
+
+        String8 item_name;
+        if (!parseNullTerminatedString(&offset, &size, &item_name)) {
+            return ERROR_MALFORMED;
+        }
+
+        String8 content_type;
+        if (!parseNullTerminatedString(&offset, &size, &content_type)) {
+            return ERROR_MALFORMED;
+        }
+
+        String8 content_encoding;
+        if (!parseNullTerminatedString(&offset, &size, &content_encoding)) {
+            return ERROR_MALFORMED;
+        }
+
+        if (version() == 1) {
+            uint32_t extension_type;
+            if (!source()->getUInt32(offset, &extension_type)) {
+                return ERROR_IO;
+            }
+            offset++;
+            size--;
+            // TODO: handle this case
+        }
+    } else { // version >= 2
+        uint32_t item_id;
+        size_t itemIdSize = (version() == 2) ? 2 : 4;
+        if (size < itemIdSize + 6) {
+            return ERROR_MALFORMED;
+        }
+        if (!source()->getUInt32Var(offset, &item_id, itemIdSize)) {
+            return ERROR_IO;
+        }
+        ALOGV("item_id %d", item_id);
+        offset += itemIdSize;
+        uint16_t item_protection_index;
+        if (!source()->getUInt16(offset, &item_protection_index)) {
+            return ERROR_IO;
+        }
+        ALOGV("item_protection_index %d", item_protection_index);
+        offset += 2;
+        uint32_t item_type;
+        if (!source()->getUInt32(offset, &item_type)) {
+            return ERROR_IO;
+        }
+
+        itemInfo->itemId = item_id;
+        itemInfo->itemType = item_type;
+
+        char itemTypeString[5];
+        MakeFourCCString(item_type, itemTypeString);
+        ALOGV("item_type %s", itemTypeString);
+        offset += 4;
+        size -= itemIdSize + 6;
+
+        String8 item_name;
+        if (!parseNullTerminatedString(&offset, &size, &item_name)) {
+            return ERROR_MALFORMED;
+        }
+        ALOGV("item_name %s", item_name.c_str());
+
+        if (item_type == FOURCC('m', 'i', 'm', 'e')) {
+            String8 content_type;
+            if (!parseNullTerminatedString(&offset, &size, &content_type)) {
+                return ERROR_MALFORMED;
+            }
+
+            String8 content_encoding;
+            if (!parseNullTerminatedString(&offset, &size, &content_encoding)) {
+                return ERROR_MALFORMED;
+            }
+        } else if (item_type == FOURCC('u', 'r', 'i', ' ')) {
+            String8 item_uri_type;
+            if (!parseNullTerminatedString(&offset, &size, &item_uri_type)) {
+                return ERROR_MALFORMED;
+            }
+        }
+    }
+    return OK;
+}
+
+struct IinfBox : public FullBox {
+    IinfBox(const sp<DataSource> source, Vector<ItemInfo> *itemInfos) :
+        FullBox(source, FOURCC('i', 'i', 'n', 'f')),
+        mItemInfos(itemInfos), mHasGrids(false) {}
+
+    status_t parse(off64_t offset, size_t size);
+
+    bool hasGrids() { return mHasGrids; }
+
+protected:
+    status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+    Vector<ItemInfo> *mItemInfos;
+    bool mHasGrids;
+};
+
+status_t IinfBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    status_t err = parseFullBoxHeader(&offset, &size);
+    if (err != OK) {
+        return err;
+    }
+
+    size_t entryCountSize = version() == 0 ? 2 : 4;
+    if (size < entryCountSize) {
+        return ERROR_MALFORMED;
+    }
+    uint32_t entry_count;
+    if (!source()->getUInt32Var(offset, &entry_count, entryCountSize)) {
+        return ERROR_IO;
+    }
+    ALOGV("entry_count %d", entry_count);
+
+    off64_t stopOffset = offset + size;
+    offset += entryCountSize;
+    for (size_t i = 0; i < entry_count && offset < stopOffset; i++) {
+        ALOGV("entry %zu", i);
+        status_t err = parseChunk(&offset);
+        if (err != OK) {
+            return err;
+        }
+    }
+    if (offset != stopOffset) {
+        return ERROR_MALFORMED;
+    }
+
+    return OK;
+}
+
+status_t IinfBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+    if (type != FOURCC('i', 'n', 'f', 'e')) {
+        return OK;
+    }
+
+    InfeBox infeBox(source());
+    ItemInfo itemInfo;
+    status_t err = infeBox.parse(offset, size, &itemInfo);
+    if (err != OK) {
+        return err;
+    }
+    mItemInfos->push_back(itemInfo);
+    mHasGrids |= (itemInfo.itemType == FOURCC('g', 'r', 'i', 'd'));
+    return OK;
+}
+
+//////////////////////////////////////////////////////////////////
+
+ItemTable::ItemTable(const sp<DataSource> &source)
+    : mDataSource(source),
+      mPrimaryItemId(0),
+      mIdatOffset(0),
+      mIdatSize(0),
+      mImageItemsValid(false),
+      mCurrentImageIndex(0) {
+    mRequiredBoxes.insert('iprp');
+    mRequiredBoxes.insert('iloc');
+    mRequiredBoxes.insert('pitm');
+    mRequiredBoxes.insert('iinf');
+}
+
+ItemTable::~ItemTable() {}
+
+status_t ItemTable::parse(uint32_t type, off64_t data_offset, size_t chunk_data_size) {
+    switch(type) {
+        case FOURCC('i', 'l', 'o', 'c'):
+        {
+            return parseIlocBox(data_offset, chunk_data_size);
+        }
+        case FOURCC('i', 'i', 'n', 'f'):
+        {
+            return parseIinfBox(data_offset, chunk_data_size);
+        }
+        case FOURCC('i', 'p', 'r', 'p'):
+        {
+            return parseIprpBox(data_offset, chunk_data_size);
+        }
+        case FOURCC('p', 'i', 't', 'm'):
+        {
+            return parsePitmBox(data_offset, chunk_data_size);
+        }
+        case FOURCC('i', 'd', 'a', 't'):
+        {
+            return parseIdatBox(data_offset, chunk_data_size);
+        }
+        case FOURCC('i', 'r', 'e', 'f'):
+        {
+            return parseIrefBox(data_offset, chunk_data_size);
+        }
+        case FOURCC('i', 'p', 'r', 'o'):
+        {
+            ALOGW("ipro box not supported!");
+            break;
+        }
+        default:
+        {
+            ALOGW("unrecognized box type: 0x%x", type);
+            break;
+        }
+    }
+    return ERROR_UNSUPPORTED;
+}
+
+status_t ItemTable::parseIlocBox(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    IlocBox ilocBox(mDataSource, &mItemLocs);
+    status_t err = ilocBox.parse(offset, size);
+    if (err != OK) {
+        return err;
+    }
+
+    if (ilocBox.hasConstructMethod1()) {
+        mRequiredBoxes.insert('idat');
+    }
+
+    return buildImageItemsIfPossible('iloc');
+}
+
+status_t ItemTable::parseIinfBox(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    IinfBox iinfBox(mDataSource, &mItemInfos);
+    status_t err = iinfBox.parse(offset, size);
+    if (err != OK) {
+        return err;
+    }
+
+    if (iinfBox.hasGrids()) {
+        mRequiredBoxes.insert('iref');
+    }
+
+    return buildImageItemsIfPossible('iinf');
+}
+
+status_t ItemTable::parsePitmBox(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    PitmBox pitmBox(mDataSource);
+    status_t err = pitmBox.parse(offset, size, &mPrimaryItemId);
+    if (err != OK) {
+        return err;
+    }
+
+    return buildImageItemsIfPossible('pitm');
+}
+
+status_t ItemTable::parseIprpBox(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    IprpBox iprpBox(mDataSource, &mItemProperties, &mAssociations);
+    status_t err = iprpBox.parse(offset, size);
+    if (err != OK) {
+        return err;
+    }
+
+    return buildImageItemsIfPossible('iprp');
+}
+
+status_t ItemTable::parseIdatBox(off64_t offset, size_t size) {
+    ALOGV("%s: idat offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    // only remember the offset and size of idat box for later use
+    mIdatOffset = offset;
+    mIdatSize = size;
+
+    return buildImageItemsIfPossible('idat');
+}
+
+status_t ItemTable::parseIrefBox(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    IrefBox irefBox(mDataSource, &mItemReferences);
+    status_t err = irefBox.parse(offset, size);
+    if (err != OK) {
+        return err;
+    }
+
+    return buildImageItemsIfPossible('iref');
+}
+
+status_t ItemTable::buildImageItemsIfPossible(uint32_t type) {
+    if (mImageItemsValid) {
+        return OK;
+    }
+
+    mBoxesSeen.insert(type);
+
+    // need at least 'iprp', 'iloc', 'pitm', 'iinf';
+    // need 'idat' if any items used construction_method of 2;
+    // need 'iref' if there are grids.
+    if (!std::includes(
+            mBoxesSeen.begin(), mBoxesSeen.end(),
+            mRequiredBoxes.begin(), mRequiredBoxes.end())) {
+        return OK;
+    }
+
+    ALOGV("building image table...");
+
+    for (size_t i = 0; i < mItemInfos.size(); i++) {
+        const ItemInfo &info = mItemInfos[i];
+
+
+        // ignore non-image items
+        if (info.itemType != FOURCC('g', 'r', 'i', 'd') &&
+            info.itemType != FOURCC('h', 'v', 'c', '1')) {
+            continue;
+        }
+
+        ssize_t imageIndex = mItemIdToImageMap.indexOfKey(info.itemId);
+        if (imageIndex >= 0) {
+            ALOGW("ignoring duplicate image item id %d", info.itemId);
+            continue;
+        }
+
+        ssize_t ilocIndex = mItemLocs.indexOfKey(info.itemId);
+        if (ilocIndex < 0) {
+            ALOGE("iloc missing for image item id %d", info.itemId);
+            continue;
+        }
+        const ItemLoc &iloc = mItemLocs[ilocIndex];
+
+        off64_t offset;
+        size_t size;
+        if (iloc.getLoc(&offset, &size, mIdatOffset, mIdatSize) != OK) {
+            return ERROR_MALFORMED;
+        }
+
+        ImageItem image(info.itemType);
+
+        ALOGV("adding %s: itemId %d", image.isGrid() ? "grid" : "image", info.itemId);
+
+        if (image.isGrid()) {
+            if (size > 12) {
+                return ERROR_MALFORMED;
+            }
+            uint8_t buf[12];
+            if (!mDataSource->readAt(offset, buf, size)) {
+                return ERROR_IO;
+            }
+
+            image.rows = buf[2] + 1;
+            image.columns = buf[3] + 1;
+
+            ALOGV("rows %d, columans %d", image.rows, image.columns);
+        } else {
+            image.offset = offset;
+            image.size = size;
+        }
+        mItemIdToImageMap.add(info.itemId, image);
+    }
+
+    for (size_t i = 0; i < mAssociations.size(); i++) {
+        attachProperty(mAssociations[i]);
+    }
+
+    for (size_t i = 0; i < mItemReferences.size(); i++) {
+        mItemReferences[i]->apply(mItemIdToImageMap);
+    }
+
+    mImageItemsValid = true;
+    return OK;
+}
+
+void ItemTable::attachProperty(const AssociationEntry &association) {
+    ssize_t imageIndex = mItemIdToImageMap.indexOfKey(association.itemId);
+
+    // ignore non-image items
+    if (imageIndex < 0) {
+        return;
+    }
+
+    uint16_t propertyIndex = association.index;
+    if (propertyIndex >= mItemProperties.size()) {
+        ALOGW("Ignoring invalid property index %d", propertyIndex);
+        return;
+    }
+
+    ALOGV("attach property %d to item id %d)",
+            propertyIndex, association.itemId);
+
+    mItemProperties[propertyIndex]->attachTo(
+            mItemIdToImageMap.editValueAt(imageIndex));
+}
+
+sp<MetaData> ItemTable::getImageMeta() {
+    if (!mImageItemsValid) {
+        return NULL;
+    }
+
+    ssize_t imageIndex = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
+    if (imageIndex < 0) {
+        ALOGE("Primary item id %d not found!", mPrimaryItemId);
+        return NULL;
+    }
+
+    ALOGV("primary image index %zu", imageIndex);
+
+    const ImageItem *image = &mItemIdToImageMap[imageIndex];
+
+    sp<MetaData> meta = new MetaData;
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+    ALOGV("setting image size %dx%d", image->width, image->height);
+    meta->setInt32(kKeyWidth, image->width);
+    meta->setInt32(kKeyHeight, image->height);
+    if (image->rotation != 0) {
+        meta->setInt32(kKeyRotation, image->rotation);
+    }
+    meta->setInt32(kKeyMaxInputSize, image->width * image->height * 1.5);
+
+    if (!image->thumbnails.empty()) {
+        ssize_t thumbnailIndex = mItemIdToImageMap.indexOfKey(image->thumbnails[0]);
+        if (thumbnailIndex >= 0) {
+            const ImageItem &thumbnail = mItemIdToImageMap[thumbnailIndex];
+
+            meta->setInt32(kKeyThumbnailWidth, thumbnail.width);
+            meta->setInt32(kKeyThumbnailHeight, thumbnail.height);
+            meta->setData(kKeyThumbnailHVCC, kTypeHVCC,
+                    thumbnail.hvcc->data(), thumbnail.hvcc->size());
+            ALOGV("thumbnail meta: %dx%d, index %zd",
+                    thumbnail.width, thumbnail.height, thumbnailIndex);
+        } else {
+            ALOGW("Referenced thumbnail does not exist!");
+        }
+    }
+
+    if (image->isGrid()) {
+        ssize_t tileIndex = mItemIdToImageMap.indexOfKey(image->dimgRefs[0]);
+        if (tileIndex < 0) {
+            return NULL;
+        }
+        // when there are tiles, (kKeyWidth, kKeyHeight) is the full tiled area,
+        // and (kKeyDisplayWidth, kKeyDisplayHeight) may be smaller than that.
+        meta->setInt32(kKeyDisplayWidth, image->width);
+        meta->setInt32(kKeyDisplayHeight, image->height);
+        int32_t gridRows = image->rows, gridCols = image->columns;
+
+        // point image to the first tile for grid size and HVCC
+        image = &mItemIdToImageMap.editValueAt(tileIndex);
+        meta->setInt32(kKeyWidth, image->width * gridCols);
+        meta->setInt32(kKeyHeight, image->height * gridRows);
+        meta->setInt32(kKeyGridWidth, image->width);
+        meta->setInt32(kKeyGridHeight, image->height);
+        meta->setInt32(kKeyMaxInputSize, image->width * image->height * 1.5);
+    }
+
+    if (image->hvcc == NULL) {
+        ALOGE("hvcc is missing!");
+        return NULL;
+    }
+    meta->setData(kKeyHVCC, kTypeHVCC, image->hvcc->data(), image->hvcc->size());
+
+    if (image->icc != NULL) {
+        meta->setData(kKeyIccProfile, 0, image->icc->data(), image->icc->size());
+    }
+    return meta;
+}
+
+uint32_t ItemTable::countImages() const {
+    return mImageItemsValid ? mItemIdToImageMap.size() : 0;
+}
+
+status_t ItemTable::findPrimaryImage(uint32_t *imageIndex) {
+    if (!mImageItemsValid) {
+        return INVALID_OPERATION;
+    }
+
+    ssize_t index = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
+    if (index < 0) {
+        return ERROR_MALFORMED;
+    }
+
+    *imageIndex = index;
+    return OK;
+}
+
+status_t ItemTable::findThumbnail(uint32_t *imageIndex) {
+    if (!mImageItemsValid) {
+        return INVALID_OPERATION;
+    }
+
+    ssize_t primaryIndex = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
+    if (primaryIndex < 0) {
+        ALOGE("Primary item id %d not found!", mPrimaryItemId);
+        return ERROR_MALFORMED;
+    }
+
+    const ImageItem &primaryImage = mItemIdToImageMap[primaryIndex];
+    if (primaryImage.thumbnails.empty()) {
+        ALOGW("Using primary in place of thumbnail.");
+        *imageIndex = primaryIndex;
+        return OK;
+    }
+
+    ssize_t thumbnailIndex = mItemIdToImageMap.indexOfKey(
+            primaryImage.thumbnails[0]);
+    if (thumbnailIndex < 0) {
+        ALOGE("Thumbnail item id %d not found!", primaryImage.thumbnails[0]);
+        return ERROR_MALFORMED;
+    }
+
+    *imageIndex = thumbnailIndex;
+    return OK;
+}
+
+status_t ItemTable::getImageOffsetAndSize(
+        uint32_t *imageIndex, off64_t *offset, size_t *size) {
+    if (!mImageItemsValid) {
+        return INVALID_OPERATION;
+    }
+
+    if (imageIndex != NULL) {
+        if (*imageIndex >= mItemIdToImageMap.size()) {
+            ALOGE("Bad image index!");
+            return BAD_VALUE;
+        }
+        mCurrentImageIndex = *imageIndex;
+    }
+
+    ImageItem &image = mItemIdToImageMap.editValueAt(mCurrentImageIndex);
+    if (image.isGrid()) {
+        uint32_t tileItemId;
+        status_t err = image.getNextTileItemId(&tileItemId, imageIndex != NULL);
+        if (err != OK) {
+            return err;
+        }
+        ssize_t tileImageIndex = mItemIdToImageMap.indexOfKey(tileItemId);
+        if (tileImageIndex < 0) {
+            return ERROR_END_OF_STREAM;
+        }
+        *offset = mItemIdToImageMap[tileImageIndex].offset;
+        *size = mItemIdToImageMap[tileImageIndex].size;
+    } else {
+        if (imageIndex == NULL) {
+            // For single images, we only allow it to be read once, after that
+            // it's EOS.  New image index must be requested each time.
+            return ERROR_END_OF_STREAM;
+        }
+        *offset = mItemIdToImageMap[mCurrentImageIndex].offset;
+        *size = mItemIdToImageMap[mCurrentImageIndex].size;
+    }
+
+    return OK;
+}
+
+} // namespace heif
+
+}  // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 459a1cb..5ef0f56 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -28,6 +28,7 @@
 
 #include "include/MPEG4Extractor.h"
 #include "include/SampleTable.h"
+#include "include/ItemTable.h"
 #include "include/ESDS.h"
 
 #include <media/stagefright/foundation/ABitReader.h>
@@ -36,6 +37,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
@@ -72,7 +74,8 @@
                 const sp<SampleTable> &sampleTable,
                 Vector<SidxEntry> &sidx,
                 const Trex *trex,
-                off64_t firstMoofOffset);
+                off64_t firstMoofOffset,
+                const sp<ItemTable> &itemTable);
     virtual status_t init();
 
     virtual status_t start(MetaData *params = NULL);
@@ -134,6 +137,9 @@
 
     uint8_t *mSrcBuffer;
 
+    bool mIsHEIF;
+    sp<ItemTable> mItemTable;
+
     size_t parseNALSize(const uint8_t *data) const;
     status_t parseChunk(off64_t *offset);
     status_t parseTrackFragmentHeader(off64_t offset, off64_t size);
@@ -285,45 +291,6 @@
 
 static const bool kUseHexDump = false;
 
-static void hexdump(const void *_data, size_t size) {
-    const uint8_t *data = (const uint8_t *)_data;
-    size_t offset = 0;
-    while (offset < size) {
-        printf("0x%04zx  ", offset);
-
-        size_t n = size - offset;
-        if (n > 16) {
-            n = 16;
-        }
-
-        for (size_t i = 0; i < 16; ++i) {
-            if (i == 8) {
-                printf(" ");
-            }
-
-            if (offset + i < size) {
-                printf("%02x ", data[offset + i]);
-            } else {
-                printf("   ");
-            }
-        }
-
-        printf(" ");
-
-        for (size_t i = 0; i < n; ++i) {
-            if (isprint(data[offset + i])) {
-                printf("%c", data[offset + i]);
-            } else {
-                printf(".");
-            }
-        }
-
-        printf("\n");
-
-        offset += 16;
-    }
-}
-
 static const char *FourCC2MIME(uint32_t fourcc) {
     switch (fourcc) {
         case FOURCC('m', 'p', '4', 'a'):
@@ -378,6 +345,7 @@
       mInitCheck(NO_INIT),
       mHeaderTimescale(0),
       mIsQT(false),
+      mIsHEIF(false),
       mFirstTrack(NULL),
       mLastTrack(NULL),
       mFileMetaData(new MetaData),
@@ -512,14 +480,6 @@
     return track->meta;
 }
 
-static void MakeFourCCString(uint32_t x, char *s) {
-    s[0] = x >> 24;
-    s[1] = (x >> 16) & 0xff;
-    s[2] = (x >> 8) & 0xff;
-    s[3] = x & 0xff;
-    s[4] = '\0';
-}
-
 status_t MPEG4Extractor::readMetaData() {
     if (mInitCheck != NO_INIT) {
         return mInitCheck;
@@ -529,7 +489,8 @@
     status_t err;
     bool sawMoovOrSidx = false;
 
-    while (!(sawMoovOrSidx && (mMdatFound || mMoofFound))) {
+    while (!((sawMoovOrSidx && (mMdatFound || mMoofFound)) ||
+            (mIsHEIF && (mItemTable != NULL) && mItemTable->isValid()))) {
         off64_t orig_offset = offset;
         err = parseChunk(&offset, 0);
 
@@ -581,6 +542,29 @@
         mFileMetaData->setData(kKeyPssh, 'pssh', buf, psshsize);
         free(buf);
     }
+
+    if (mIsHEIF) {
+        sp<MetaData> meta = mItemTable->getImageMeta();
+        if (meta == NULL) {
+            return ERROR_MALFORMED;
+        }
+
+        Track *track = mLastTrack;
+        if (track != NULL) {
+            ALOGW("track is set before metadata is fully processed");
+        } else {
+            track = new Track;
+            track->next = NULL;
+            mFirstTrack = mLastTrack = track;
+        }
+
+        track->meta = meta;
+        track->meta->setInt32(kKeyTrackID, 0);
+        track->includes_expensive_metadata = false;
+        track->skipTrack = false;
+        track->timescale = 0;
+    }
+
     return mInitCheck;
 }
 
@@ -967,8 +951,9 @@
                     }
                 }
 
-                if (mLastTrack == NULL)
+                if (mLastTrack == NULL) {
                     return ERROR_MALFORMED;
+                }
 
                 mLastTrack->sampleTable = new SampleTable(mDataSource);
             }
@@ -1140,8 +1125,9 @@
             original_fourcc = ntohl(original_fourcc);
             ALOGV("read original format: %d", original_fourcc);
 
-            if (mLastTrack == NULL)
+            if (mLastTrack == NULL) {
                 return ERROR_MALFORMED;
+            }
 
             mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc));
             uint32_t num_channels = 0;
@@ -1581,8 +1567,9 @@
         case FOURCC('s', 't', 'c', 'o'):
         case FOURCC('c', 'o', '6', '4'):
         {
-            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
                 return ERROR_MALFORMED;
+            }
 
             status_t err =
                 mLastTrack->sampleTable->setChunkOffsetParams(
@@ -1618,8 +1605,9 @@
         case FOURCC('s', 't', 's', 'z'):
         case FOURCC('s', 't', 'z', '2'):
         {
-            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
                 return ERROR_MALFORMED;
+            }
 
             status_t err =
                 mLastTrack->sampleTable->setSampleSizeParams(
@@ -2035,6 +2023,28 @@
             break;
         }
 
+        case FOURCC('i', 'l', 'o', 'c'):
+        case FOURCC('i', 'i', 'n', 'f'):
+        case FOURCC('i', 'p', 'r', 'p'):
+        case FOURCC('p', 'i', 't', 'm'):
+        case FOURCC('i', 'd', 'a', 't'):
+        case FOURCC('i', 'r', 'e', 'f'):
+        case FOURCC('i', 'p', 'r', 'o'):
+        {
+            if (mIsHEIF) {
+                if (mItemTable == NULL) {
+                    mItemTable = new ItemTable(mDataSource);
+                }
+                status_t err = mItemTable->parse(
+                        chunk_type, data_offset, chunk_data_size);
+                if (err != OK) {
+                    return err;
+                }
+            }
+            *offset += chunk_size;
+            break;
+        }
+
         case FOURCC('m', 'e', 'a', 'n'):
         case FOURCC('n', 'a', 'm', 'e'):
         case FOURCC('d', 'a', 't', 'a'):
@@ -2382,6 +2392,7 @@
 
             off64_t stop_offset = *offset + chunk_size;
             uint32_t numCompatibleBrands = (chunk_data_size - 8) / 4;
+            std::set<uint32_t> brandSet;
             for (size_t i = 0; i < numCompatibleBrands + 2; ++i) {
                 if (i == 1) {
                     // Skip this index, it refers to the minorVersion,
@@ -2395,10 +2406,15 @@
                 }
 
                 brand = ntohl(brand);
-                if (brand == FOURCC('q', 't', ' ', ' ')) {
-                    mIsQT = true;
-                    break;
-                }
+                brandSet.insert(brand);
+            }
+
+            if (brandSet.count(FOURCC('q', 't', ' ', ' ')) > 0) {
+                mIsQT = true;
+            } else if (brandSet.count(FOURCC('m', 'i', 'f', '1')) > 0
+                    && brandSet.count(FOURCC('h', 'e', 'i', 'c')) > 0) {
+                mIsHEIF = true;
+                ALOGV("identified HEIF image");
             }
 
             *offset = stop_offset;
@@ -3347,7 +3363,7 @@
 
     sp<MPEG4Source> source =  new MPEG4Source(this,
             track->meta, mDataSource, track->timescale, track->sampleTable,
-            mSidxEntries, trex, mMoofOffset);
+            mSidxEntries, trex, mMoofOffset, mItemTable);
     if (source->init() != OK) {
         return NULL;
     }
@@ -3736,7 +3752,8 @@
         const sp<SampleTable> &sampleTable,
         Vector<SidxEntry> &sidx,
         const Trex *trex,
-        off64_t firstMoofOffset)
+        off64_t firstMoofOffset,
+        const sp<ItemTable> &itemTable)
     : mOwner(owner),
       mFormat(format),
       mDataSource(dataSource),
@@ -3761,7 +3778,9 @@
       mGroup(NULL),
       mBuffer(NULL),
       mWantsNALFragments(false),
-      mSrcBuffer(NULL) {
+      mSrcBuffer(NULL),
+      mIsHEIF(itemTable != NULL),
+      mItemTable(itemTable) {
 
     memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
 
@@ -4536,77 +4555,93 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
-        uint32_t findFlags = 0;
-        switch (mode) {
-            case ReadOptions::SEEK_PREVIOUS_SYNC:
-                findFlags = SampleTable::kFlagBefore;
-                break;
-            case ReadOptions::SEEK_NEXT_SYNC:
-                findFlags = SampleTable::kFlagAfter;
-                break;
-            case ReadOptions::SEEK_CLOSEST_SYNC:
-            case ReadOptions::SEEK_CLOSEST:
-                findFlags = SampleTable::kFlagClosest;
-                break;
-            default:
-                CHECK(!"Should not be here.");
-                break;
-        }
+        if (mIsHEIF) {
+            CHECK(mSampleTable == NULL);
+            CHECK(mItemTable != NULL);
 
-        uint32_t sampleIndex;
-        status_t err = mSampleTable->findSampleAtTime(
-                seekTimeUs, 1000000, mTimescale,
-                &sampleIndex, findFlags);
-
-        if (mode == ReadOptions::SEEK_CLOSEST) {
-            // We found the closest sample already, now we want the sync
-            // sample preceding it (or the sample itself of course), even
-            // if the subsequent sync sample is closer.
-            findFlags = SampleTable::kFlagBefore;
-        }
-
-        uint32_t syncSampleIndex;
-        if (err == OK) {
-            err = mSampleTable->findSyncSampleNear(
-                    sampleIndex, &syncSampleIndex, findFlags);
-        }
-
-        uint32_t sampleTime;
-        if (err == OK) {
-            err = mSampleTable->getMetaDataForSample(
-                    sampleIndex, NULL, NULL, &sampleTime);
-        }
-
-        if (err != OK) {
-            if (err == ERROR_OUT_OF_RANGE) {
-                // An attempt to seek past the end of the stream would
-                // normally cause this ERROR_OUT_OF_RANGE error. Propagating
-                // this all the way to the MediaPlayer would cause abnormal
-                // termination. Legacy behaviour appears to be to behave as if
-                // we had seeked to the end of stream, ending normally.
-                err = ERROR_END_OF_STREAM;
+            status_t err;
+            if (seekTimeUs >= 0) {
+                err = mItemTable->findPrimaryImage(&mCurrentSampleIndex);
+            } else {
+                err = mItemTable->findThumbnail(&mCurrentSampleIndex);
             }
-            ALOGV("end of stream");
-            return err;
-        }
+            if (err != OK) {
+                return err;
+            }
+        } else {
+            uint32_t findFlags = 0;
+            switch (mode) {
+                case ReadOptions::SEEK_PREVIOUS_SYNC:
+                    findFlags = SampleTable::kFlagBefore;
+                    break;
+                case ReadOptions::SEEK_NEXT_SYNC:
+                    findFlags = SampleTable::kFlagAfter;
+                    break;
+                case ReadOptions::SEEK_CLOSEST_SYNC:
+                case ReadOptions::SEEK_CLOSEST:
+                    findFlags = SampleTable::kFlagClosest;
+                    break;
+                default:
+                    CHECK(!"Should not be here.");
+                    break;
+            }
 
-        if (mode == ReadOptions::SEEK_CLOSEST) {
-            targetSampleTimeUs = (sampleTime * 1000000ll) / mTimescale;
-        }
+            uint32_t sampleIndex;
+            status_t err = mSampleTable->findSampleAtTime(
+                    seekTimeUs, 1000000, mTimescale,
+                    &sampleIndex, findFlags);
+
+            if (mode == ReadOptions::SEEK_CLOSEST) {
+                // We found the closest sample already, now we want the sync
+                // sample preceding it (or the sample itself of course), even
+                // if the subsequent sync sample is closer.
+                findFlags = SampleTable::kFlagBefore;
+            }
+
+            uint32_t syncSampleIndex;
+            if (err == OK) {
+                err = mSampleTable->findSyncSampleNear(
+                        sampleIndex, &syncSampleIndex, findFlags);
+            }
+
+            uint32_t sampleTime;
+            if (err == OK) {
+                err = mSampleTable->getMetaDataForSample(
+                        sampleIndex, NULL, NULL, &sampleTime);
+            }
+
+            if (err != OK) {
+                if (err == ERROR_OUT_OF_RANGE) {
+                    // An attempt to seek past the end of the stream would
+                    // normally cause this ERROR_OUT_OF_RANGE error. Propagating
+                    // this all the way to the MediaPlayer would cause abnormal
+                    // termination. Legacy behaviour appears to be to behave as if
+                    // we had seeked to the end of stream, ending normally.
+                    err = ERROR_END_OF_STREAM;
+                }
+                ALOGV("end of stream");
+                return err;
+            }
+
+            if (mode == ReadOptions::SEEK_CLOSEST) {
+                targetSampleTimeUs = (sampleTime * 1000000ll) / mTimescale;
+            }
 
 #if 0
-        uint32_t syncSampleTime;
-        CHECK_EQ(OK, mSampleTable->getMetaDataForSample(
-                    syncSampleIndex, NULL, NULL, &syncSampleTime));
+            uint32_t syncSampleTime;
+            CHECK_EQ(OK, mSampleTable->getMetaDataForSample(
+                        syncSampleIndex, NULL, NULL, &syncSampleTime));
 
-        ALOGI("seek to time %lld us => sample at time %lld us, "
-             "sync sample at time %lld us",
-             seekTimeUs,
-             sampleTime * 1000000ll / mTimescale,
-             syncSampleTime * 1000000ll / mTimescale);
+            ALOGI("seek to time %lld us => sample at time %lld us, "
+                 "sync sample at time %lld us",
+                 seekTimeUs,
+                 sampleTime * 1000000ll / mTimescale,
+                 syncSampleTime * 1000000ll / mTimescale);
 #endif
 
-        mCurrentSampleIndex = syncSampleIndex;
+            mCurrentSampleIndex = syncSampleIndex;
+        }
+
         if (mBuffer != NULL) {
             mBuffer->release();
             mBuffer = NULL;
@@ -4623,9 +4658,19 @@
     if (mBuffer == NULL) {
         newBuffer = true;
 
-        status_t err =
-            mSampleTable->getMetaDataForSample(
+        status_t err;
+        if (!mIsHEIF) {
+            err = mSampleTable->getMetaDataForSample(
                     mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
+        } else {
+            err = mItemTable->getImageOffsetAndSize(
+                    options && options->getSeekTo(&seekTimeUs, &mode) ?
+                            &mCurrentSampleIndex : NULL, &offset, &size);
+
+            cts = stts = 0;
+            isSyncSample = 0;
+            ALOGV("image offset %lld, size %zu", (long long)offset, size);
+        }
 
         if (err != OK) {
             return err;
@@ -5195,7 +5240,8 @@
         || !memcmp(header, "ftyp3ge6", 8) || !memcmp(header, "ftyp3gg6", 8)
         || !memcmp(header, "ftypisom", 8) || !memcmp(header, "ftypM4V ", 8)
         || !memcmp(header, "ftypM4A ", 8) || !memcmp(header, "ftypf4v ", 8)
-        || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)) {
+        || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)
+        || !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)) {
         *mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4;
         *confidence = 0.4;
 
@@ -5224,6 +5270,8 @@
 
         FOURCC('3', 'g', '2', 'a'),  // 3GPP2
         FOURCC('3', 'g', '2', 'b'),
+        FOURCC('m', 'i', 'f', '1'),  // HEIF image
+        FOURCC('h', 'e', 'i', 'c'),  // HEIF image
     };
 
     for (size_t i = 0;
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index f0c27ac..4ff2bfe 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -85,7 +85,8 @@
     status_t status;
     if (fd < 0) {
         // couldn't open it locally, maybe the media server can?
-        status = mRetriever->setDataSource(NULL /* httpService */, path);
+        sp<IMediaHTTPService> nullService;
+        status = mRetriever->setDataSource(nullService, path);
     } else {
         status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL);
         close(fd);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 57af772..a53897f 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -121,12 +121,12 @@
 }
 
 status_t StagefrightMetadataRetriever::setDataSource(
-        const sp<DataSource>& source) {
+        const sp<DataSource>& source, const char *mime) {
     ALOGV("setDataSource(DataSource)");
 
     clearMetadata();
     mSource = source;
-    mExtractor = MediaExtractor::Create(mSource);
+    mExtractor = MediaExtractor::Create(mSource, mime);
 
     if (mExtractor == NULL) {
         ALOGE("Failed to instantiate a MediaExtractor.");
@@ -137,17 +137,173 @@
     return OK;
 }
 
+static VideoFrame *allocVideoFrame(
+        const sp<MetaData> &trackMeta, int32_t width, int32_t height, int32_t bpp, bool metaOnly) {
+    int32_t rotationAngle;
+    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
+        rotationAngle = 0;  // By default, no rotation
+    }
+
+    uint32_t type;
+    const void *iccData;
+    size_t iccSize;
+    if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
+        iccData = NULL;
+        iccSize = 0;
+    }
+
+    int32_t sarWidth, sarHeight;
+    int32_t displayWidth, displayHeight;
+    if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
+            && trackMeta->findInt32(kKeySARHeight, &sarHeight)
+            && sarHeight != 0) {
+        displayWidth = (width * sarWidth) / sarHeight;
+        displayHeight = height;
+    } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
+                && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
+                && displayWidth > 0 && displayHeight > 0
+                && width > 0 && height > 0) {
+        ALOGV("found display size %dx%d", displayWidth, displayHeight);
+    } else {
+        displayWidth = width;
+        displayHeight = height;
+    }
+
+    return new VideoFrame(width, height, displayWidth, displayHeight,
+            rotationAngle, bpp, !metaOnly, iccData, iccSize);
+}
+
+static bool getDstColorFormat(android_pixel_format_t colorFormat,
+        OMX_COLOR_FORMATTYPE *omxColorFormat, int32_t *bpp) {
+    switch (colorFormat) {
+        case HAL_PIXEL_FORMAT_RGB_565:
+        {
+            *omxColorFormat = OMX_COLOR_Format16bitRGB565;
+            *bpp = 2;
+            return true;
+        }
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        {
+            *omxColorFormat = OMX_COLOR_Format32BitRGBA8888;
+            *bpp = 4;
+            return true;
+        }
+        case HAL_PIXEL_FORMAT_BGRA_8888:
+        {
+            *omxColorFormat = OMX_COLOR_Format32bitBGRA8888;
+            *bpp = 4;
+            return true;
+        }
+        default:
+        {
+            ALOGE("Unsupported color format: %d", colorFormat);
+            break;
+        }
+    }
+    return false;
+}
+
 static VideoFrame *extractVideoFrame(
         const AString &componentName,
         const sp<MetaData> &trackMeta,
         const sp<IMediaSource> &source,
         int64_t frameTimeUs,
-        int seekMode) {
-
+        int seekMode,
+        int colorFormat,
+        bool metaOnly) {
     sp<MetaData> format = source->getFormat();
 
+    MediaSource::ReadOptions::SeekMode mode =
+            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
+    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
+        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {
+        ALOGE("Unknown seek mode: %d", seekMode);
+        return NULL;
+    }
+
+    int32_t dstBpp;
+    OMX_COLOR_FORMATTYPE dstFormat;
+    if (!getDstColorFormat(
+            (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
+        return NULL;
+    }
+
+    if (metaOnly) {
+        int32_t width, height;
+        CHECK(trackMeta->findInt32(kKeyWidth, &width));
+        CHECK(trackMeta->findInt32(kKeyHeight, &height));
+        return allocVideoFrame(trackMeta, width, height, dstBpp, true);
+    }
+
+    MediaSource::ReadOptions options;
+    sp<MetaData> overrideMeta;
+    if (frameTimeUs < 0) {
+        uint32_t type;
+        const void *data;
+        size_t size;
+        int64_t thumbNailTime;
+        int32_t thumbnailWidth, thumbnailHeight;
+
+        // if we have a stand-alone thumbnail, set up the override meta,
+        // and set seekTo time to -1.
+        if (trackMeta->findInt32(kKeyThumbnailWidth, &thumbnailWidth)
+         && trackMeta->findInt32(kKeyThumbnailHeight, &thumbnailHeight)
+         && trackMeta->findData(kKeyThumbnailHVCC, &type, &data, &size)){
+            overrideMeta = new MetaData(*trackMeta);
+            overrideMeta->remove(kKeyDisplayWidth);
+            overrideMeta->remove(kKeyDisplayHeight);
+            overrideMeta->setInt32(kKeyWidth, thumbnailWidth);
+            overrideMeta->setInt32(kKeyHeight, thumbnailHeight);
+            overrideMeta->setData(kKeyHVCC, type, data, size);
+            thumbNailTime = -1ll;
+            ALOGV("thumbnail: %dx%d", thumbnailWidth, thumbnailHeight);
+        } else if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
+                || thumbNailTime < 0) {
+            thumbNailTime = 0;
+        }
+
+        options.setSeekTo(thumbNailTime, mode);
+    } else {
+        options.setSeekTo(frameTimeUs, mode);
+    }
+
+    int32_t gridRows = 1, gridCols = 1;
+    if (overrideMeta == NULL) {
+        // check if we're dealing with a tiled heif
+        int32_t gridWidth, gridHeight;
+        if (trackMeta->findInt32(kKeyGridWidth, &gridWidth) && gridWidth > 0
+         && trackMeta->findInt32(kKeyGridHeight, &gridHeight) && gridHeight > 0) {
+            int32_t width, height, displayWidth, displayHeight;
+            CHECK(trackMeta->findInt32(kKeyWidth, &width));
+            CHECK(trackMeta->findInt32(kKeyHeight, &height));
+            CHECK(trackMeta->findInt32(kKeyDisplayWidth, &displayWidth));
+            CHECK(trackMeta->findInt32(kKeyDisplayHeight, &displayHeight));
+
+            if (width >= displayWidth && height >= displayHeight
+                    && (width % gridWidth == 0) && (height % gridHeight == 0)) {
+                ALOGV("grid config: %dx%d, display %dx%d, grid %dx%d",
+                        width, height, displayWidth, displayHeight, gridWidth, gridHeight);
+
+                overrideMeta = new MetaData(*trackMeta);
+                overrideMeta->remove(kKeyDisplayWidth);
+                overrideMeta->remove(kKeyDisplayHeight);
+                overrideMeta->setInt32(kKeyWidth, gridWidth);
+                overrideMeta->setInt32(kKeyHeight, gridHeight);
+                gridCols = width / gridWidth;
+                gridRows = height / gridHeight;
+            } else {
+                ALOGE("Bad grid config: %dx%d, display %dx%d, grid %dx%d",
+                        width, height, displayWidth, displayHeight, gridWidth, gridHeight);
+            }
+        }
+        if (overrideMeta == NULL) {
+            overrideMeta = trackMeta;
+        }
+    }
+    int32_t numTiles = gridRows * gridCols;
+
     sp<AMessage> videoFormat;
-    if (convertMetaDataToMessage(trackMeta, &videoFormat) != OK) {
+    if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
         ALOGE("b/23680780");
         ALOGW("Failed to convert meta data to message");
         return NULL;
@@ -160,7 +316,8 @@
     // input and output ports, if seeking to a sync frame. NOTE: This request may
     // fail if component requires more than that for decoding.
     bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST);
-    if (!isSeekingClosest) {
+    bool decodeSingleFrame = !isSeekingClosest && (numTiles == 1);
+    if (decodeSingleFrame) {
         videoFormat->setInt32("android._num-input-buffers", 1);
         videoFormat->setInt32("android._num-output-buffers", 1);
     }
@@ -190,30 +347,6 @@
         return NULL;
     }
 
-    MediaSource::ReadOptions options;
-    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
-        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {
-
-        ALOGE("Unknown seek mode: %d", seekMode);
-        decoder->release();
-        return NULL;
-    }
-
-    MediaSource::ReadOptions::SeekMode mode =
-            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
-
-    int64_t thumbNailTime;
-    if (frameTimeUs < 0) {
-        if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
-                || thumbNailTime < 0) {
-            thumbNailTime = 0;
-        }
-        options.setSeekTo(thumbNailTime, mode);
-    } else {
-        thumbNailTime = -1;
-        options.setSeekTo(frameTimeUs, mode);
-    }
-
     err = source->start();
     if (err != OK) {
         ALOGW("source failed to start: %d (%s)", err, asString(err));
@@ -258,6 +391,9 @@
     bool firstSample = true;
     int64_t targetTimeUs = -1ll;
 
+    VideoFrame *frame = NULL;
+    int32_t tilesDecoded = 0;
+
     do {
         size_t inputIndex = -1;
         int64_t ptsUs = 0ll;
@@ -282,6 +418,9 @@
             if (err != OK) {
                 ALOGW("Input Error or EOS");
                 haveMoreInputs = false;
+                if (err == ERROR_END_OF_STREAM) {
+                    err = OK;
+                }
                 break;
             }
             if (firstSample && isSeekingClosest) {
@@ -293,6 +432,7 @@
             if (mediaBuffer->range_length() > codecBuffer->capacity()) {
                 ALOGE("buffer size (%zu) too large for codec input size (%zu)",
                         mediaBuffer->range_length(), codecBuffer->capacity());
+                haveMoreInputs = false;
                 err = BAD_VALUE;
             } else {
                 codecBuffer->setRange(0, mediaBuffer->range_length());
@@ -301,19 +441,20 @@
                 memcpy(codecBuffer->data(),
                         (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
                         mediaBuffer->range_length());
-                if (isAvcOrHevc && IsIDR(codecBuffer) && !isSeekingClosest) {
-                    // Only need to decode one IDR frame, unless we're seeking with CLOSEST
-                    // option, in which case we need to actually decode to targetTimeUs.
-                    haveMoreInputs = false;
-                    flags |= MediaCodec::BUFFER_FLAG_EOS;
-                }
             }
 
             mediaBuffer->release();
             break;
         }
 
-        if (err == OK && inputIndex < inputBuffers.size()) {
+        if (haveMoreInputs && inputIndex < inputBuffers.size()) {
+            if (isAvcOrHevc && IsIDR(codecBuffer) && decodeSingleFrame) {
+                // Only need to decode one IDR frame, unless we're seeking with CLOSEST
+                // option, in which case we need to actually decode to targetTimeUs.
+                haveMoreInputs = false;
+                flags |= MediaCodec::BUFFER_FLAG_EOS;
+            }
+
             ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
                     codecBuffer->size(), ptsUs, flags);
             err = decoder->queueInputBuffer(
@@ -352,11 +493,70 @@
                 } else if (err == OK) {
                     // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
                     // from the extractor, decode to the specified frame. Otherwise we're done.
-                    done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs);
                     ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
-                    if (!done) {
-                        err = decoder->releaseOutputBuffer(index);
+                    sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
+
+                    int32_t width, height;
+                    CHECK(outputFormat != NULL);
+                    CHECK(outputFormat->findInt32("width", &width));
+                    CHECK(outputFormat->findInt32("height", &height));
+
+                    int32_t crop_left, crop_top, crop_right, crop_bottom;
+                    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+                        crop_left = crop_top = 0;
+                        crop_right = width - 1;
+                        crop_bottom = height - 1;
                     }
+
+                    if (frame == NULL) {
+                        frame = allocVideoFrame(
+                                trackMeta,
+                                (crop_right - crop_left + 1) * gridCols,
+                                (crop_bottom - crop_top + 1) * gridRows,
+                                dstBpp,
+                                false /*metaOnly*/);
+                    }
+
+                    int32_t srcFormat;
+                    CHECK(outputFormat->findInt32("color-format", &srcFormat));
+
+                    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat);
+
+                    int32_t dstLeft, dstTop, dstRight, dstBottom;
+                    if (numTiles == 1) {
+                        dstLeft = crop_left;
+                        dstTop = crop_top;
+                        dstRight = crop_right;
+                        dstBottom = crop_bottom;
+                    } else {
+                        dstLeft = tilesDecoded % gridCols * width;
+                        dstTop = tilesDecoded / gridCols * height;
+                        dstRight = dstLeft + width - 1;
+                        dstBottom = dstTop + height - 1;
+                    }
+
+                    if (converter.isValid()) {
+                        err = converter.convert(
+                                (const uint8_t *)videoFrameBuffer->data(),
+                                width, height,
+                                crop_left, crop_top, crop_right, crop_bottom,
+                                frame->mData,
+                                frame->mWidth,
+                                frame->mHeight,
+                                dstLeft, dstTop, dstRight, dstBottom);
+                    } else {
+                        ALOGE("Unable to convert from format 0x%08x to 0x%08x",
+                                srcFormat, dstFormat);
+
+                        err = ERROR_UNSUPPORTED;
+                    }
+
+                    done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs);
+                    if (numTiles > 1) {
+                        tilesDecoded++;
+                        done &= (tilesDecoded >= numTiles);
+                    }
+                    err = decoder->releaseOutputBuffer(index);
                 } else {
                     ALOGW("Received error %d (%s) instead of output", err, asString(err));
                     done = true;
@@ -366,95 +566,11 @@
         }
     } while (err == OK && !done);
 
-    if (err != OK || size <= 0 || outputFormat == NULL) {
-        ALOGE("Failed to decode thumbnail frame");
-        source->stop();
-        decoder->release();
-        return NULL;
-    }
-
-    ALOGV("successfully decoded video frame.");
-    sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
-
-    if (thumbNailTime >= 0) {
-        if (timeUs != thumbNailTime) {
-            AString mime;
-            CHECK(outputFormat->findString("mime", &mime));
-
-            ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
-                    (long long)thumbNailTime, (long long)timeUs, mime.c_str());
-        }
-    }
-
-    int32_t width, height;
-    CHECK(outputFormat->findInt32("width", &width));
-    CHECK(outputFormat->findInt32("height", &height));
-
-    int32_t crop_left, crop_top, crop_right, crop_bottom;
-    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
-        crop_left = crop_top = 0;
-        crop_right = width - 1;
-        crop_bottom = height - 1;
-    }
-
-    int32_t rotationAngle;
-    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
-        rotationAngle = 0;  // By default, no rotation
-    }
-
-    VideoFrame *frame = new VideoFrame;
-    frame->mWidth = crop_right - crop_left + 1;
-    frame->mHeight = crop_bottom - crop_top + 1;
-    frame->mDisplayWidth = frame->mWidth;
-    frame->mDisplayHeight = frame->mHeight;
-    frame->mSize = frame->mWidth * frame->mHeight * 2;
-    frame->mData = new uint8_t[frame->mSize];
-    frame->mRotationAngle = rotationAngle;
-
-    int32_t sarWidth, sarHeight;
-    if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
-            && trackMeta->findInt32(kKeySARHeight, &sarHeight)
-            && sarHeight != 0) {
-        frame->mDisplayWidth = (frame->mDisplayWidth * sarWidth) / sarHeight;
-    } else {
-        int32_t width, height;
-        if (trackMeta->findInt32(kKeyDisplayWidth, &width)
-                && trackMeta->findInt32(kKeyDisplayHeight, &height)
-                && frame->mDisplayWidth > 0 && frame->mDisplayHeight > 0
-                && width > 0 && height > 0) {
-            frame->mDisplayWidth = width;
-            frame->mDisplayHeight = height;
-        }
-    }
-
-    int32_t srcFormat;
-    CHECK(outputFormat->findInt32("color-format", &srcFormat));
-
-    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
-
-    if (converter.isValid()) {
-        err = converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
-                width, height,
-                crop_left, crop_top, crop_right, crop_bottom,
-                frame->mData,
-                frame->mWidth,
-                frame->mHeight,
-                0, 0, frame->mWidth - 1, frame->mHeight - 1);
-    } else {
-        ALOGE("Unable to convert from format 0x%08x to RGB565", srcFormat);
-
-        err = ERROR_UNSUPPORTED;
-    }
-
-    videoFrameBuffer.clear();
     source->stop();
-    decoder->releaseOutputBuffer(index);
     decoder->release();
 
     if (err != OK) {
-        ALOGE("Colorconverter failed to convert frame.");
-
+        ALOGE("failed to get video frame (err %d)", err);
         delete frame;
         frame = NULL;
     }
@@ -463,9 +579,10 @@
 }
 
 VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
-        int64_t timeUs, int option) {
+        int64_t timeUs, int option, int colorFormat, bool metaOnly) {
 
-    ALOGV("getFrameAtTime: %" PRId64 " us option: %d", timeUs, option);
+    ALOGV("getFrameAtTime: %" PRId64 " us option: %d colorFormat: %d, metaOnly: %d",
+            timeUs, option, colorFormat, metaOnly);
 
     if (mExtractor.get() == NULL) {
         ALOGV("no extractor.");
@@ -533,8 +650,8 @@
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
         const AString &componentName = matchingCodecs[i];
-        VideoFrame *frame =
-            extractVideoFrame(componentName, trackMeta, source, timeUs, option);
+        VideoFrame *frame = extractVideoFrame(
+                componentName, trackMeta, source, timeUs, option, colorFormat, metaOnly);
 
         if (frame != NULL) {
             return frame;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 0aea8e1..a3bda5d 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -1874,5 +1874,13 @@
     return result;
 }
 
+void MakeFourCCString(uint32_t x, char *s) {
+    s[0] = x >> 24;
+    s[1] = (x >> 16) & 0xff;
+    s[2] = (x >> 8) & 0xff;
+    s[3] = x & 0xff;
+    s[4] = '\0';
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/codecs/aacdec/Android.bp b/media/libstagefright/codecs/aacdec/Android.bp
index 6e04c1e..1daaf49 100644
--- a/media/libstagefright/codecs/aacdec/Android.bp
+++ b/media/libstagefright/codecs/aacdec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_aacdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "SoftAAC2.cpp",
diff --git a/media/libstagefright/codecs/aacenc/Android.bp b/media/libstagefright/codecs/aacenc/Android.bp
index 1a7ffca..4b478ea 100644
--- a/media/libstagefright/codecs/aacenc/Android.bp
+++ b/media/libstagefright/codecs/aacenc/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_aacenc",
+    vendor_available: true,
 
     srcs: [
         "basic_op/basicop2.c",
@@ -111,6 +112,10 @@
 
 cc_library_shared {
     name: "libstagefright_soft_aacenc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftAACEncoder2.cpp"],
 
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/libstagefright/codecs/amrnb/common/Android.bp
index c5ac558..5177593 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.bp
+++ b/media/libstagefright/codecs/amrnb/common/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_amrnb_common",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "src/add.cpp",
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.bp b/media/libstagefright/codecs/amrnb/dec/Android.bp
index 996183b..a61fb57 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.bp
+++ b/media/libstagefright/codecs/amrnb/dec/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_amrnbdec",
+    vendor_available: true,
 
     srcs: [
         "src/a_refl.cpp",
@@ -55,13 +56,20 @@
     //    ],
     //},
 
-    shared_libs: ["libstagefright_amrnb_common"],
+    shared_libs: [
+        "libstagefright_amrnb_common",
+        "liblog",
+    ],
 }
 
 //###############################################################################
 
 cc_library_shared {
     name: "libstagefright_soft_amrdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftAMR.cpp"],
 
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.bp b/media/libstagefright/codecs/amrnb/enc/Android.bp
index af0f8c2..04ed07f 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_amrnbenc",
+    vendor_available: true,
 
     srcs: [
         "src/amrencode.cpp",
@@ -83,6 +84,10 @@
 
 cc_library_shared {
     name: "libstagefright_soft_amrnbenc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftAMRNBEncoder.cpp"],
 
diff --git a/media/libstagefright/codecs/amrwb/Android.bp b/media/libstagefright/codecs/amrwb/Android.bp
index e261c04..b932ccc 100644
--- a/media/libstagefright/codecs/amrwb/Android.bp
+++ b/media/libstagefright/codecs/amrwb/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_amrwbdec",
+    vendor_available: true,
 
     srcs: [
         "src/agc2_amr_wb.cpp",
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 5c5a122..d337cde 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_amrwbenc",
+    vendor_available: true,
 
     srcs: [
         "src/autocorr.c",
@@ -144,6 +145,10 @@
 
 cc_library_shared {
     name: "libstagefright_soft_amrwbenc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftAMRWBEncoder.cpp"],
 
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
index 6b996a7..3fa8d7f 100644
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ b/media/libstagefright/codecs/avcdec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_avcdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     static_libs: ["libavcdec"],
     srcs: ["SoftAVCDec.cpp"],
@@ -12,7 +16,7 @@
     ],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
index 49021a9..6c4311b 100644
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ b/media/libstagefright/codecs/avcenc/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_avcenc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     static_libs: ["libavcenc"],
     srcs: ["SoftAVCEnc.cpp"],
@@ -13,7 +17,7 @@
     ],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index b1af17b..326207b 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -27,7 +27,6 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <OMX_IndexExt.h>
 #include <OMX_VideoExt.h>
 
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 021e6af..3726922 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -1,5 +1,9 @@
 cc_library {
     name: "libstagefright_enc_common",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["cmnMemory.c"],
 
diff --git a/media/libstagefright/codecs/flac/dec/Android.bp b/media/libstagefright/codecs/flac/dec/Android.bp
index 6ac264d..5652594 100644
--- a/media/libstagefright/codecs/flac/dec/Android.bp
+++ b/media/libstagefright/codecs/flac/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_flacdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "SoftFlacDecoder.cpp",
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
index d1413f6..6197157 100644
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ b/media/libstagefright/codecs/flac/enc/Android.bp
@@ -22,7 +22,7 @@
     },
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
@@ -32,5 +32,9 @@
     static_libs: ["libFLAC"],
 
     name: "libstagefright_soft_flacenc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
 }
diff --git a/media/libstagefright/codecs/g711/dec/Android.bp b/media/libstagefright/codecs/g711/dec/Android.bp
index b78b689..0e6f468 100644
--- a/media/libstagefright/codecs/g711/dec/Android.bp
+++ b/media/libstagefright/codecs/g711/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_g711dec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftG711.cpp"],
 
@@ -9,7 +13,7 @@
     ],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/gsm/dec/Android.bp b/media/libstagefright/codecs/gsm/dec/Android.bp
index 8e86ad6..7be86a4 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.bp
+++ b/media/libstagefright/codecs/gsm/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_gsmdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftGSM.cpp"],
 
@@ -23,7 +27,7 @@
     },
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/hevcdec/Android.bp b/media/libstagefright/codecs/hevcdec/Android.bp
index cd75c97..3fd1652 100644
--- a/media/libstagefright/codecs/hevcdec/Android.bp
+++ b/media/libstagefright/codecs/hevcdec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_hevcdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     static_libs: ["libhevcdec"],
     srcs: ["SoftHEVC.cpp"],
@@ -22,7 +26,7 @@
     },
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
index 04ea075..2619131 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
@@ -1,5 +1,7 @@
 cc_library_static {
     name: "libstagefright_m4vh263dec",
+    vendor_available: true,
+    shared_libs: ["liblog"],
 
     srcs: [
         "src/adaptive_smooth_no_mmx.cpp",
@@ -66,6 +68,10 @@
 
 cc_library_shared {
     name: "libstagefright_soft_mpeg4dec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftMPEG4.cpp"],
 
@@ -87,7 +93,7 @@
     static_libs: ["libstagefright_m4vh263dec"],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
index da5b162..919b9d4 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_m4vh263enc",
+    vendor_available: true,
 
     srcs: [
         "src/bitstream_io.cpp",
@@ -52,6 +53,10 @@
 
 cc_library_shared {
     name: "libstagefright_soft_mpeg4enc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftMPEG4Encoder.cpp"],
 
@@ -75,7 +80,7 @@
     static_libs: ["libstagefright_m4vh263enc"],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 6d4cb69..7b90a01 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -29,7 +29,6 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 
 #include "SoftMPEG4Encoder.h"
 
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
index 0d0a2c6..b2e8f9b 100644
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/Android.bp
@@ -1,5 +1,6 @@
 cc_library_static {
     name: "libstagefright_mp3dec",
+    vendor_available: true,
 
     srcs: [
         "src/pvmp3_normalize.cpp",
@@ -77,6 +78,10 @@
 
 cc_library_shared {
     name: "libstagefright_soft_mp3dec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftMP3.cpp"],
 
@@ -102,7 +107,7 @@
     },
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.bp b/media/libstagefright/codecs/mpeg2dec/Android.bp
index 0144581..ed51797 100644
--- a/media/libstagefright/codecs/mpeg2dec/Android.bp
+++ b/media/libstagefright/codecs/mpeg2dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_mpeg2dec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     static_libs: ["libmpeg2dec"],
     srcs: ["SoftMPEG2.cpp"],
@@ -12,7 +16,7 @@
     ],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index c4242c2..249ab92 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_vpxdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftVPX.cpp"],
 
@@ -11,7 +15,7 @@
     static_libs: ["libvpx"],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 114c1be..0284719 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_vpxenc",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "SoftVPXEncoder.cpp",
@@ -26,7 +30,7 @@
     static_libs: ["libvpx"],
 
     shared_libs: [
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/opus/dec/Android.bp b/media/libstagefright/codecs/opus/dec/Android.bp
index 5d9c4c8..d7569a9 100644
--- a/media/libstagefright/codecs/opus/dec/Android.bp
+++ b/media/libstagefright/codecs/opus/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_opusdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftOpus.cpp"],
 
@@ -10,13 +14,15 @@
 
     shared_libs: [
         "libopus",
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
         "liblog",
     ],
 
+    cflags: ["-Werror"],
+
     sanitize: {
         misc_undefined: [
             "signed-integer-overflow",
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index d1f5e59..813004b 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -62,6 +62,7 @@
       mSeekPreRoll(0),
       mAnchorTimeUs(0),
       mNumFramesOutput(0),
+      mHaveEOS(false),
       mOutputPortSettingsChange(NONE) {
     initPorts();
     CHECK_EQ(initDecoder(), (status_t)OK);
@@ -384,7 +385,31 @@
     return static_cast<double>(ns) * kRate / 1000000000;
 }
 
-void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
+void SoftOpus::handleEOS() {
+    List<BufferInfo *> &inQueue = getPortQueue(0);
+    List<BufferInfo *> &outQueue = getPortQueue(1);
+    CHECK(!inQueue.empty() && !outQueue.empty());
+
+    BufferInfo *outInfo = *outQueue.begin();
+    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+    outHeader->nFilledLen = 0;
+    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+    mHaveEOS = true;
+
+    outQueue.erase(outQueue.begin());
+    outInfo->mOwnedByUs = false;
+    notifyFillBufferDone(outHeader);
+
+    BufferInfo *inInfo = *inQueue.begin();
+    OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+    inQueue.erase(inQueue.begin());
+    inInfo->mOwnedByUs = false;
+    notifyEmptyBufferDone(inHeader);
+
+    ++mInputBufferCount;
+}
+
+void SoftOpus::onQueueFilled(OMX_U32 /* portIndex */) {
     List<BufferInfo *> &inQueue = getPortQueue(0);
     List<BufferInfo *> &outQueue = getPortQueue(1);
 
@@ -392,104 +417,108 @@
         return;
     }
 
-    if (portIndex == 0 && mInputBufferCount < 3) {
-        BufferInfo *info = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *header = info->mHeader;
-
-        const uint8_t *data = header->pBuffer + header->nOffset;
-        size_t size = header->nFilledLen;
-
-        if (mInputBufferCount == 0) {
-            CHECK(mHeader == NULL);
-            mHeader = new OpusHeader();
-            memset(mHeader, 0, sizeof(*mHeader));
-            if (!ParseOpusHeader(data, size, mHeader)) {
-                ALOGV("Parsing Opus Header failed.");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-
-            uint8_t channel_mapping[kMaxChannels] = {0};
-            if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
-                memcpy(&channel_mapping,
-                       kDefaultOpusChannelLayout,
-                       kMaxChannelsWithDefaultLayout);
-            } else {
-                memcpy(&channel_mapping,
-                       mHeader->stream_map,
-                       mHeader->channels);
-            }
-
-            int status = OPUS_INVALID_STATE;
-            mDecoder = opus_multistream_decoder_create(kRate,
-                                                       mHeader->channels,
-                                                       mHeader->num_streams,
-                                                       mHeader->num_coupled,
-                                                       channel_mapping,
-                                                       &status);
-            if (!mDecoder || status != OPUS_OK) {
-                ALOGV("opus_multistream_decoder_create failed status=%s",
-                      opus_strerror(status));
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-            status =
-                opus_multistream_decoder_ctl(mDecoder,
-                                             OPUS_SET_GAIN(mHeader->gain_db));
-            if (status != OPUS_OK) {
-                ALOGV("Failed to set OPUS header gain; status=%s",
-                      opus_strerror(status));
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-        } else if (mInputBufferCount == 1) {
-            mCodecDelay = ns_to_samples(
-                              *(reinterpret_cast<int64_t*>(header->pBuffer +
-                                                           header->nOffset)),
-                              kRate);
-            mSamplesToDiscard = mCodecDelay;
-        } else {
-            mSeekPreRoll = ns_to_samples(
-                               *(reinterpret_cast<int64_t*>(header->pBuffer +
-                                                            header->nOffset)),
-                               kRate);
-            notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-            mOutputPortSettingsChange = AWAITING_DISABLED;
-        }
-
-        inQueue.erase(inQueue.begin());
-        info->mOwnedByUs = false;
-        notifyEmptyBufferDone(header);
-        ++mInputBufferCount;
-        return;
-    }
-
-    while (!inQueue.empty() && !outQueue.empty()) {
+    while (!mHaveEOS && !inQueue.empty() && !outQueue.empty()) {
         BufferInfo *inInfo = *inQueue.begin();
         OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
 
-        // Ignore CSD re-submissions.
-        if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+        if (mInputBufferCount < 3) {
+            const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+            size_t size = inHeader->nFilledLen;
+
+            if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && size == 0) {
+                handleEOS();
+                return;
+            }
+
+            if (mInputBufferCount == 0) {
+                CHECK(mHeader == NULL);
+                mHeader = new OpusHeader();
+                memset(mHeader, 0, sizeof(*mHeader));
+                if (!ParseOpusHeader(data, size, mHeader)) {
+                    ALOGV("Parsing Opus Header failed.");
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    return;
+                }
+
+                uint8_t channel_mapping[kMaxChannels] = {0};
+                if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
+                    memcpy(&channel_mapping,
+                           kDefaultOpusChannelLayout,
+                           kMaxChannelsWithDefaultLayout);
+                } else {
+                    memcpy(&channel_mapping,
+                           mHeader->stream_map,
+                           mHeader->channels);
+                }
+
+                int status = OPUS_INVALID_STATE;
+                mDecoder = opus_multistream_decoder_create(kRate,
+                                                           mHeader->channels,
+                                                           mHeader->num_streams,
+                                                           mHeader->num_coupled,
+                                                           channel_mapping,
+                                                           &status);
+                if (!mDecoder || status != OPUS_OK) {
+                    ALOGV("opus_multistream_decoder_create failed status=%s",
+                          opus_strerror(status));
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    return;
+                }
+                status =
+                    opus_multistream_decoder_ctl(mDecoder,
+                                                 OPUS_SET_GAIN(mHeader->gain_db));
+                if (status != OPUS_OK) {
+                    ALOGV("Failed to set OPUS header gain; status=%s",
+                          opus_strerror(status));
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    return;
+                }
+            } else if (mInputBufferCount == 1) {
+                mCodecDelay = ns_to_samples(
+                                  *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
+                                                               inHeader->nOffset)),
+                                  kRate);
+                mSamplesToDiscard = mCodecDelay;
+            } else {
+                mSeekPreRoll = ns_to_samples(
+                                   *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
+                                                                inHeader->nOffset)),
+                                   kRate);
+                notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+                mOutputPortSettingsChange = AWAITING_DISABLED;
+            }
+
+            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                handleEOS();
+                return;
+            }
+
             inQueue.erase(inQueue.begin());
             inInfo->mOwnedByUs = false;
             notifyEmptyBufferDone(inHeader);
-            return;
+            ++mInputBufferCount;
+
+            continue;
+        }
+
+        // Ignore CSD re-submissions.
+        if (mInputBufferCount >= 3 && (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                handleEOS();
+                return;
+            }
+
+            inQueue.erase(inQueue.begin());
+            inInfo->mOwnedByUs = false;
+            notifyEmptyBufferDone(inHeader);
+            continue;
         }
 
         BufferInfo *outInfo = *outQueue.begin();
         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
 
         if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-
-            outHeader->nFilledLen = 0;
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
+            handleEOS();
             return;
         }
 
@@ -539,7 +568,6 @@
         }
 
         outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels;
-        outHeader->nFlags = 0;
 
         outHeader->nTimeStamp = mAnchorTimeUs +
                                 (mNumFramesOutput * 1000000ll) /
@@ -548,22 +576,20 @@
         mNumFramesOutput += numFrames;
 
         if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            inHeader->nFilledLen = 0;
+            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+            mHaveEOS = true;
         } else {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
+            outHeader->nFlags = 0;
         }
 
+        inInfo->mOwnedByUs = false;
+        inQueue.erase(inQueue.begin());
+        notifyEmptyBufferDone(inHeader);
+        ++mInputBufferCount;
+
         outInfo->mOwnedByUs = false;
         outQueue.erase(outQueue.begin());
-        outInfo = NULL;
         notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-
-        ++mInputBufferCount;
     }
 }
 
@@ -575,6 +601,7 @@
         opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
         mAnchorTimeUs = 0;
         mSamplesToDiscard = mSeekPreRoll;
+        mHaveEOS = false;
     }
 }
 
@@ -591,6 +618,7 @@
     }
 
     mOutputPortSettingsChange = NONE;
+    mHaveEOS = false;
 }
 
 void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index fab925d..91cafa1 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -75,6 +75,7 @@
     int64_t mSamplesToDiscard;
     int64_t mAnchorTimeUs;
     int64_t mNumFramesOutput;
+    bool mHaveEOS;
 
     enum {
         NONE,
@@ -85,6 +86,7 @@
     void initPorts();
     status_t initDecoder();
     bool isConfigured() const;
+    void handleEOS();
 
     DISALLOW_EVIL_CONSTRUCTORS(SoftOpus);
 };
diff --git a/media/libstagefright/codecs/raw/Android.bp b/media/libstagefright/codecs/raw/Android.bp
index c64027b..1bd75c6 100644
--- a/media/libstagefright/codecs/raw/Android.bp
+++ b/media/libstagefright/codecs/raw/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_rawdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftRaw.cpp"],
 
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.bp b/media/libstagefright/codecs/vorbis/dec/Android.bp
index 1a4de60..fedfb67 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.bp
+++ b/media/libstagefright/codecs/vorbis/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_soft_vorbisdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: ["SoftVorbis.cpp"],
 
@@ -10,7 +14,7 @@
 
     shared_libs: [
         "libvorbisidec",
-        "libmedia",
+        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 96e01b6..8912f8a 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -309,7 +309,33 @@
     oggpack_readinit(bits, ref);
 }
 
-void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
+void SoftVorbis::handleEOS() {
+    List<BufferInfo *> &inQueue = getPortQueue(0);
+    List<BufferInfo *> &outQueue = getPortQueue(1);
+
+    CHECK(!inQueue.empty() && !outQueue.empty());
+
+    mSawInputEos = true;
+
+    BufferInfo *outInfo = *outQueue.begin();
+    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+    outHeader->nFilledLen = 0;
+    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+    outQueue.erase(outQueue.begin());
+    outInfo->mOwnedByUs = false;
+    notifyFillBufferDone(outHeader);
+    mSignalledOutputEos = true;
+
+    BufferInfo *inInfo = *inQueue.begin();
+    OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+    inQueue.erase(inQueue.begin());
+    inInfo->mOwnedByUs = false;
+    notifyEmptyBufferDone(inHeader);
+    ++mInputBufferCount;
+}
+
+void SoftVorbis::onQueueFilled(OMX_U32 /* portIndex */) {
     List<BufferInfo *> &inQueue = getPortQueue(0);
     List<BufferInfo *> &outQueue = getPortQueue(1);
 
@@ -317,69 +343,7 @@
         return;
     }
 
-    if (portIndex == 0 && mInputBufferCount < 2) {
-        BufferInfo *info = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *header = info->mHeader;
-
-        const uint8_t *data = header->pBuffer + header->nOffset;
-        size_t size = header->nFilledLen;
-        if (size < 7) {
-            ALOGE("Too small input buffer: %zu bytes", size);
-            android_errorWriteLog(0x534e4554, "27833616");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        ogg_buffer buf;
-        ogg_reference ref;
-        oggpack_buffer bits;
-
-        makeBitReader(
-                (const uint8_t *)data + 7, size - 7,
-                &buf, &ref, &bits);
-
-        if (mInputBufferCount == 0) {
-            CHECK(mVi == NULL);
-            mVi = new vorbis_info;
-            vorbis_info_init(mVi);
-
-            int ret = _vorbis_unpack_info(mVi, &bits);
-            if (ret != 0) {
-                notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
-                mSignalledError = true;
-                return;
-            }
-        } else {
-            int ret = _vorbis_unpack_books(mVi, &bits);
-            if (ret != 0) {
-                notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            CHECK(mState == NULL);
-            mState = new vorbis_dsp_state;
-            CHECK_EQ(0, vorbis_dsp_init(mState, mVi));
-
-            if (mVi->rate != kDefaultSamplingRate ||
-                    mVi->channels != kDefaultChannelCount) {
-                ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels);
-                notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                mOutputPortSettingsChange = AWAITING_DISABLED;
-            }
-        }
-
-        inQueue.erase(inQueue.begin());
-        info->mOwnedByUs = false;
-        notifyEmptyBufferDone(header);
-
-        ++mInputBufferCount;
-
-        return;
-    }
-
-    while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+    while (!mSignalledOutputEos && (!inQueue.empty() || mSawInputEos) && !outQueue.empty()) {
         BufferInfo *inInfo = NULL;
         OMX_BUFFERHEADERTYPE *inHeader = NULL;
         if (!inQueue.empty()) {
@@ -393,6 +357,73 @@
         int32_t numPageSamples = 0;
 
         if (inHeader) {
+            if (mInputBufferCount < 2) {
+                const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+                size_t size = inHeader->nFilledLen;
+
+                if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && size == 0) {
+                    handleEOS();
+                    return;
+                }
+
+                if (size < 7) {
+                    ALOGE("Too small input buffer: %zu bytes", size);
+                    android_errorWriteLog(0x534e4554, "27833616");
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    mSignalledError = true;
+                    return;
+                }
+
+                ogg_buffer buf;
+                ogg_reference ref;
+                oggpack_buffer bits;
+
+                makeBitReader((const uint8_t *)data + 7, size - 7, &buf, &ref, &bits);
+
+                if (mInputBufferCount == 0) {
+                    CHECK(mVi == NULL);
+                    mVi = new vorbis_info;
+                    vorbis_info_init(mVi);
+
+                    int ret = _vorbis_unpack_info(mVi, &bits);
+                    if (ret != 0) {
+                        notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
+                        mSignalledError = true;
+                        return;
+                    }
+                } else {
+                    int ret = _vorbis_unpack_books(mVi, &bits);
+                    if (ret != 0) {
+                        notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
+                        mSignalledError = true;
+                        return;
+                    }
+
+                    CHECK(mState == NULL);
+                    mState = new vorbis_dsp_state;
+                    CHECK_EQ(0, vorbis_dsp_init(mState, mVi));
+
+                    if (mVi->rate != kDefaultSamplingRate ||
+                            mVi->channels != kDefaultChannelCount) {
+                        ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels);
+                        notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+                        mOutputPortSettingsChange = AWAITING_DISABLED;
+                    }
+                }
+
+                if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                    handleEOS();
+                    return;
+                }
+
+                inQueue.erase(inQueue.begin());
+                inInfo->mOwnedByUs = false;
+                notifyEmptyBufferDone(inHeader);
+                ++mInputBufferCount;
+
+                continue;
+            }
+
             if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
                 mSawInputEos = true;
             }
@@ -406,8 +437,7 @@
                     return;
                 }
                 memcpy(&numPageSamples,
-                       inHeader->pBuffer
-                        + inHeader->nOffset + inHeader->nFilledLen - 4,
+                       inHeader->pBuffer + inHeader->nOffset + inHeader->nFilledLen - 4,
                        sizeof(numPageSamples));
 
                 if (inHeader->nOffset == 0) {
@@ -446,6 +476,14 @@
         int numFrames = 0;
 
         outHeader->nFlags = 0;
+
+        if (mState == nullptr || mVi == nullptr) {
+            notify(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL);
+            mSignalledError = true;
+            ALOGE("onQueueFilled, input does not have CSD");
+            return;
+        }
+
         int err = vorbis_dsp_synthesis(mState, &pack, 1);
         if (err != 0) {
             // FIXME temporary workaround for log spam
@@ -495,18 +533,13 @@
         if (inHeader) {
             inInfo->mOwnedByUs = false;
             inQueue.erase(inQueue.begin());
-            inInfo = NULL;
             notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
+            ++mInputBufferCount;
         }
 
         outInfo->mOwnedByUs = false;
         outQueue.erase(outQueue.begin());
-        outInfo = NULL;
         notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-
-        ++mInputBufferCount;
     }
 }
 
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index 52d1632..5ff8ea4 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -72,6 +72,7 @@
     void initPorts();
     status_t initDecoder();
     bool isConfigured() const;
+    void handleEOS();
 
     DISALLOW_EVIL_CONSTRUCTORS(SoftVorbis);
 };
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 3ca7cc0..0982006 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -23,6 +23,7 @@
 #include <media/stagefright/MediaErrors.h>
 
 #include "libyuv/convert_from.h"
+#include "libyuv/video_common.h"
 
 #define USE_LIBYUV
 
@@ -41,17 +42,17 @@
 }
 
 bool ColorConverter::isValid() const {
-    if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
-        return false;
-    }
-
     switch (mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar:
+            return mDstFormat == OMX_COLOR_Format16bitRGB565
+                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+                    || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
+
         case OMX_COLOR_FormatCbYCrY:
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
         case OMX_COLOR_FormatYUV420SemiPlanar:
         case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
-            return true;
+            return mDstFormat == OMX_COLOR_Format16bitRGB565;
 
         default:
             return false;
@@ -62,14 +63,43 @@
         void *bits,
         size_t width, size_t height,
         size_t cropLeft, size_t cropTop,
-        size_t cropRight, size_t cropBottom)
+        size_t cropRight, size_t cropBottom,
+        OMX_COLOR_FORMATTYPE colorFromat)
     : mBits(bits),
+      mColorFormat(colorFromat),
       mWidth(width),
       mHeight(height),
       mCropLeft(cropLeft),
       mCropTop(cropTop),
       mCropRight(cropRight),
       mCropBottom(cropBottom) {
+    switch(mColorFormat) {
+    case OMX_COLOR_Format16bitRGB565:
+        mBpp = 2;
+        mStride = 2 * mWidth;
+        break;
+
+    case OMX_COLOR_Format32bitBGRA8888:
+    case OMX_COLOR_Format32BitRGBA8888:
+        mBpp = 4;
+        mStride = 4 * mWidth;
+        break;
+
+    case OMX_COLOR_FormatYUV420Planar:
+    case OMX_COLOR_FormatCbYCrY:
+    case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+    case OMX_COLOR_FormatYUV420SemiPlanar:
+    case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+        mBpp = 1;
+        mStride = mWidth;
+        break;
+
+    default:
+        ALOGE("Unsupported color format %d", mColorFormat);
+        mBpp = 1;
+        mStride = mWidth;
+        break;
+    }
 }
 
 size_t ColorConverter::BitmapParams::cropWidth() const {
@@ -89,19 +119,15 @@
         size_t dstWidth, size_t dstHeight,
         size_t dstCropLeft, size_t dstCropTop,
         size_t dstCropRight, size_t dstCropBottom) {
-    if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
-        return ERROR_UNSUPPORTED;
-    }
-
     BitmapParams src(
             const_cast<void *>(srcBits),
             srcWidth, srcHeight,
-            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom);
+            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
 
     BitmapParams dst(
             dstBits,
             dstWidth, dstHeight,
-            dstCropLeft, dstCropTop, dstCropRight, dstCropBottom);
+            dstCropLeft, dstCropTop, dstCropRight, dstCropBottom, mDstFormat);
 
     status_t err;
 
@@ -212,26 +238,104 @@
         return ERROR_UNSUPPORTED;
     }
 
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
     const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
+        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * src.mHeight
-        + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
+        (const uint8_t *)src.mBits + src.mStride * src.mHeight
+        + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
 
     const uint8_t *src_v =
-        src_u + (src.mWidth / 2) * (src.mHeight / 2);
+        src_u + (src.mStride / 2) * (src.mHeight / 2);
 
+    switch (mDstFormat) {
+    case OMX_COLOR_Format16bitRGB565:
+        libyuv::I420ToRGB565(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+                (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+        break;
 
-    libyuv::I420ToRGB565(src_y, src.mWidth, src_u, src.mWidth / 2, src_v, src.mWidth / 2,
-            (uint8 *)dst_ptr, dst.mWidth * 2, dst.mWidth, dst.mHeight);
+    case OMX_COLOR_Format32BitRGBA8888:
+        libyuv::ConvertFromI420(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+                (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight(), libyuv::FOURCC_ABGR);
+        break;
+
+    case OMX_COLOR_Format32bitBGRA8888:
+        libyuv::ConvertFromI420(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+                (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight(), libyuv::FOURCC_ARGB);
+        break;
+
+    default:
+        return ERROR_UNSUPPORTED;
+    }
 
     return OK;
 }
 
+void ColorConverter::writeToDst(
+        void *dst_ptr, uint8_t *kAdjustedClip, bool uncropped,
+        signed r1, signed g1, signed b1,
+        signed r2, signed g2, signed b2) {
+    switch (mDstFormat) {
+    case OMX_COLOR_Format16bitRGB565:
+    {
+        uint32_t rgb1 =
+            ((kAdjustedClip[r1] >> 3) << 11)
+            | ((kAdjustedClip[g1] >> 2) << 5)
+            | (kAdjustedClip[b1] >> 3);
+
+        if (uncropped) {
+            uint32_t rgb2 =
+                ((kAdjustedClip[r2] >> 3) << 11)
+                | ((kAdjustedClip[g2] >> 2) << 5)
+                | (kAdjustedClip[b2] >> 3);
+
+            *(uint32_t *)dst_ptr = (rgb2 << 16) | rgb1;
+        } else {
+            *(uint16_t *)dst_ptr = rgb1;
+        }
+        break;
+    }
+    case OMX_COLOR_Format32BitRGBA8888:
+    {
+        ((uint32_t *)dst_ptr)[0] =
+                (kAdjustedClip[r1])
+                | (kAdjustedClip[g1] << 8)
+                | (kAdjustedClip[b1] << 16)
+                | (0xFF << 24);
+
+        if (uncropped) {
+            ((uint32_t *)dst_ptr)[1] =
+                    (kAdjustedClip[r2])
+                    | (kAdjustedClip[g2] << 8)
+                    | (kAdjustedClip[b2] << 16)
+                    | (0xFF << 24);
+        }
+        break;
+    }
+    case OMX_COLOR_Format32bitBGRA8888:
+    {
+        ((uint32_t *)dst_ptr)[0] =
+                (kAdjustedClip[b1])
+                | (kAdjustedClip[g1] << 8)
+                | (kAdjustedClip[r1] << 16)
+                | (0xFF << 24);
+
+        if (uncropped) {
+            ((uint32_t *)dst_ptr)[1] =
+                    (kAdjustedClip[b2])
+                    | (kAdjustedClip[g2] << 8)
+                    | (kAdjustedClip[r2] << 16)
+                    | (0xFF << 24);
+        }
+        break;
+    }
+    default:
+        break;
+    }
+}
 status_t ColorConverter::convertYUV420Planar(
         const BitmapParams &src, const BitmapParams &dst) {
     if (!((src.mCropLeft & 1) == 0
@@ -242,18 +346,18 @@
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
     const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
+        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * src.mHeight
-        + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
+        (const uint8_t *)src.mBits + src.mStride * src.mHeight
+        + (src.mCropTop / 2) * (src.mStride / 2) + src.mCropLeft / 2;
 
     const uint8_t *src_v =
-        src_u + (src.mWidth / 2) * (src.mHeight / 2);
+        src_u + (src.mStride / 2) * (src.mHeight / 2);
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
@@ -296,31 +400,19 @@
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
 
-            uint32_t rgb1 =
-                ((kAdjustedClip[r1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[b1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[r2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[b2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
+            bool uncropped = x + 1 < src.cropWidth();
+            (void)writeToDst(dst_ptr + x * dst.mBpp,
+                    kAdjustedClip, uncropped, r1, g1, b1, r2, g2, b2);
         }
 
-        src_y += src.mWidth;
+        src_y += src.mStride;
 
         if (y & 1) {
-            src_u += src.mWidth / 2;
-            src_v += src.mWidth / 2;
+            src_u += src.mStride / 2;
+            src_v += src.mStride / 2;
         }
 
-        dst_ptr += dst.mWidth;
+        dst_ptr += dst.mStride;
     }
 
     return OK;
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index ce164a2..a127843 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -34,20 +34,21 @@
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="OMX.google.h264.decoder" type="video/avc">
-            <!-- profiles and levels:  ProfileHigh : Level41 -->
-            <Limit name="size" min="16x16" max="1920x1088" />
+            <!-- profiles and levels:  ProfileHigh : Level52 -->
+            <Limit name="size" min="2x2" max="4096x4096" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="blocks-per-second" range="1-244800" />
-            <Limit name="bitrate" range="1-12000000" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 -->
+            <Limit name="blocks-per-second" range="1-1966080" />
+            <Limit name="bitrate" range="1-48000000" />
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
             <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
-            <Limit name="size" min="2x2" max="2048x2048" />
+            <Limit name="size" min="2x2" max="4096x4096" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="8x8" />
-            <Limit name="block-count" range="1-139264" />
+            <Limit name="block-count" range="1-196608" /> <!-- max 4096x3072 -->
             <Limit name="blocks-per-second" range="1-2000000" />
             <Limit name="bitrate" range="1-10000000" />
             <Feature name="adaptive-playback" />
@@ -56,6 +57,7 @@
             <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-1000000" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
@@ -64,6 +66,7 @@
             <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-500000" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
@@ -79,10 +82,11 @@
         </MediaCodec>
         <MediaCodec name="OMX.google.h264.encoder" type="video/avc">
             <!-- profiles and levels:  ProfileBaseline : Level41 -->
-            <Limit name="size" min="16x16" max="1920x1088" />
+            <Limit name="size" min="16x16" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="blocks-per-second" range="1-244800" />
+            <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+            <Limit name="blocks-per-second" range="1-245760" />
             <Limit name="bitrate" range="1-12000000" />
             <Feature name="intra-refresh" />
         </MediaCodec>
@@ -98,6 +102,9 @@
             <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
             <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <!-- 2016 devices can encode at about 10fps at this block count -->
+            <Limit name="block-count" range="1-16384" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
@@ -105,6 +112,9 @@
             <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
             <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <!-- 2016 devices can encode at about 8fps at this block count -->
+            <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
diff --git a/media/libstagefright/data/media_codecs_google_video_le.xml b/media/libstagefright/data/media_codecs_google_video_le.xml
index 034a038..d7c6570 100644
--- a/media/libstagefright/data/media_codecs_google_video_le.xml
+++ b/media/libstagefright/data/media_codecs_google_video_le.xml
@@ -34,22 +34,22 @@
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="OMX.google.h264.decoder" type="video/avc">
-            <!-- profiles and levels:  ProfileBaseline : Level51 -->
+            <!-- profiles and levels:  ProfileHigh : Level51 -->
             <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="block-count" range="1-8160" />
-            <Limit name="blocks-per-second" range="1-489600" />
+            <Limit name="block-count" range="1-16384" />
+            <Limit name="blocks-per-second" range="1-491520" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
             <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
-            <Limit name="size" min="2x2" max="1280x1280" />
+            <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="8x8" />
-            <Limit name="block-count" range="1-139264" />
-            <Limit name="blocks-per-second" range="1-432000" />
+            <Limit name="block-count" range="1-65536" />
+            <Limit name="blocks-per-second" range="1-491520" />
             <Limit name="bitrate" range="1-5000000" />
             <Feature name="adaptive-playback" />
         </MediaCodec>
@@ -57,7 +57,7 @@
             <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="block-count" range="1-8160" />
+            <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
             <Limit name="blocks-per-second" range="1-500000" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
@@ -66,7 +66,7 @@
             <Limit name="size" min="2x2" max="1280x1280" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="block-count" range="1-3600" />
+            <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="blocks-per-second" range="1-108000" />
             <Limit name="bitrate" range="1-5000000" />
             <Feature name="adaptive-playback" />
@@ -81,12 +81,14 @@
             <Limit name="bitrate" range="1-128000" />
         </MediaCodec>
         <MediaCodec name="OMX.google.h264.encoder" type="video/avc">
-            <!-- profiles and levels:  ProfileBaseline : Level2 -->
-            <Limit name="size" min="16x16" max="896x896" />
-            <Limit name="alignment" value="16x16" />
+            <!-- profiles and levels:  ProfileBaseline : Level3 -->
+            <Limit name="size" min="16x16" max="1808x1808" />
+            <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="blocks-per-second" range="1-11880" />
+            <Limit name="block-count" range="1-1620" />
+            <Limit name="blocks-per-second" range="1-40500" />
             <Limit name="bitrate" range="1-2000000" />
+            <Feature name="intra-refresh" />
         </MediaCodec>
         <MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es">
             <!-- profiles and levels:  ProfileCore : Level2 -->
@@ -100,7 +102,8 @@
             <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
             <Limit name="size" min="2x2" max="1280x1280" />
             <Limit name="alignment" value="2x2" />
-            <Limit name="block-count" range="1-3600" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-20000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 284c25f..1b9fe0f 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,5 +1,9 @@
 cc_library_shared {
     name: "libstagefright_flacdec",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "FLACDecoder.cpp",
@@ -31,4 +35,5 @@
         "libstagefright_foundation",
         "libutils",
     ],
+    header_libs: ["libmedia_headers"],
 }
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 9108ce1..221af1d 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -7,6 +7,9 @@
 cc_library_shared {
     name: "libstagefright_foundation",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
     include_dirs: [
         "frameworks/av/include",
         "frameworks/native/include",
diff --git a/media/libstagefright/foundation/base64.cpp b/media/libstagefright/foundation/base64.cpp
index 7da7db9..cc89064 100644
--- a/media/libstagefright/foundation/base64.cpp
+++ b/media/libstagefright/foundation/base64.cpp
@@ -78,8 +78,7 @@
         accum = (accum << 6) | value;
 
         if (((i + 1) % 4) == 0) {
-            out[j++] = (accum >> 16);
-
+            if (j < outLen) { out[j++] = (accum >> 16); }
             if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
             if (j < outLen) { out[j++] = accum & 0xff; }
 
diff --git a/media/libstagefright/include/ItemTable.h b/media/libstagefright/include/ItemTable.h
new file mode 100644
index 0000000..5a6af5e
--- /dev/null
+++ b/media/libstagefright/include/ItemTable.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ITEM_TABLE_H_
+#define ITEM_TABLE_H_
+
+#include <set>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DataSource;
+class MetaData;
+
+namespace heif {
+
+struct AssociationEntry;
+struct ImageItem;
+struct ItemLoc;
+struct ItemInfo;
+struct ItemProperty;
+struct ItemReference;
+
+/*
+ * ItemTable keeps track of all image items (including coded images, grids and
+ * tiles) inside a HEIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
+ */
+
+class ItemTable : public RefBase {
+public:
+    explicit ItemTable(const sp<DataSource> &source);
+
+    status_t parse(uint32_t type, off64_t offset, size_t size);
+
+    bool isValid() { return mImageItemsValid; }
+    sp<MetaData> getImageMeta();
+    uint32_t countImages() const;
+    status_t findPrimaryImage(uint32_t *imageIndex);
+    status_t findThumbnail(uint32_t *thumbnailIndex);
+    status_t getImageOffsetAndSize(
+            uint32_t *imageIndex, off64_t *offset, size_t *size);
+
+protected:
+    ~ItemTable();
+
+private:
+    sp<DataSource> mDataSource;
+
+    KeyedVector<uint32_t, ItemLoc> mItemLocs;
+    Vector<ItemInfo> mItemInfos;
+    Vector<AssociationEntry> mAssociations;
+    Vector<sp<ItemProperty> > mItemProperties;
+    Vector<sp<ItemReference> > mItemReferences;
+
+    uint32_t mPrimaryItemId;
+    off64_t mIdatOffset;
+    size_t mIdatSize;
+
+    std::set<uint32_t> mRequiredBoxes;
+    std::set<uint32_t> mBoxesSeen;
+
+    bool mImageItemsValid;
+    uint32_t mCurrentImageIndex;
+    KeyedVector<uint32_t, ImageItem> mItemIdToImageMap;
+
+    status_t parseIlocBox(off64_t offset, size_t size);
+    status_t parseIinfBox(off64_t offset, size_t size);
+    status_t parsePitmBox(off64_t offset, size_t size);
+    status_t parseIprpBox(off64_t offset, size_t size);
+    status_t parseIdatBox(off64_t offset, size_t size);
+    status_t parseIrefBox(off64_t offset, size_t size);
+
+    void attachProperty(const AssociationEntry &association);
+    status_t buildImageItemsIfPossible(uint32_t type);
+
+    DISALLOW_EVIL_CONSTRUCTORS(ItemTable);
+};
+
+} // namespace heif
+} // namespace android
+
+#endif  // ITEM_TABLE_H_
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index f847119..4a4c538 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -28,11 +28,14 @@
 #include <utils/String8.h>
 
 namespace android {
-
 struct AMessage;
 class DataSource;
 class SampleTable;
 class String8;
+namespace heif {
+class ItemTable;
+}
+using heif::ItemTable;
 
 struct SidxEntry {
     size_t mSize;
@@ -97,6 +100,7 @@
     status_t mInitCheck;
     uint32_t mHeaderTimescale;
     bool mIsQT;
+    bool mIsHEIF;
 
     Track *mFirstTrack, *mLastTrack;
 
@@ -134,6 +138,8 @@
     SINF *mFirstSINF;
 
     bool mIsDrm;
+    sp<ItemTable> mItemTable;
+
     status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
 
     status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b7ac718..277eb3e 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -38,9 +38,9 @@
             const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t setDataSource(const sp<DataSource>& source);
+    virtual status_t setDataSource(const sp<DataSource>& source, const char *mime);
 
-    virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option);
+    virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly);
     virtual MediaAlbumArt *extractAlbumArt();
     virtual const char *extractMetadata(int keyCode);
 
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 270c809..7ac9b37 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -49,14 +49,17 @@
                 void *bits,
                 size_t width, size_t height,
                 size_t cropLeft, size_t cropTop,
-                size_t cropRight, size_t cropBottom);
+                size_t cropRight, size_t cropBottom,
+                OMX_COLOR_FORMATTYPE colorFromat);
 
         size_t cropWidth() const;
         size_t cropHeight() const;
 
         void *mBits;
+        OMX_COLOR_FORMATTYPE mColorFormat;
         size_t mWidth, mHeight;
         size_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+        size_t mBpp, mStride;
     };
 
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
@@ -82,6 +85,10 @@
     status_t convertTIYUV420PackedSemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    void writeToDst(void *dst_ptr, uint8_t *kAdjustedClip, bool uncropped,
+            signed r1, signed g1, signed b1,
+            signed r2, signed g2, signed b2);
+
     ColorConverter(const ColorConverter &);
     ColorConverter &operator=(const ColorConverter &);
 };
diff --git a/media/libstagefright/include/media/stagefright/DataSource.h b/media/libstagefright/include/media/stagefright/DataSource.h
index 63eccea..bd863ba 100644
--- a/media/libstagefright/include/media/stagefright/DataSource.h
+++ b/media/libstagefright/include/media/stagefright/DataSource.h
@@ -73,6 +73,11 @@
     bool getUInt32(off64_t offset, uint32_t *x);
     bool getUInt64(off64_t offset, uint64_t *x);
 
+    // read either int<N> or int<2N> into a uint<2N>_t, size is the int size in bytes.
+    bool getUInt16Var(off64_t offset, uint16_t *x, size_t size);
+    bool getUInt32Var(off64_t offset, uint32_t *x, size_t size);
+    bool getUInt64Var(off64_t offset, uint64_t *x, size_t size);
+
     // Reads in "count" entries of type T into vector *x.
     // Returns true if "count" entries can be read.
     // If fewer than "count" entries can be read, return false. In this case,
diff --git a/media/libstagefright/include/media/stagefright/MetaData.h b/media/libstagefright/include/media/stagefright/MetaData.h
index 9676b97..6cfde9c 100644
--- a/media/libstagefright/include/media/stagefright/MetaData.h
+++ b/media/libstagefright/include/media/stagefright/MetaData.h
@@ -38,6 +38,8 @@
     kKeyDisplayHeight     = 'dHgt',  // int32_t, display/presentation
     kKeySARWidth          = 'sarW',  // int32_t, sampleAspectRatio width
     kKeySARHeight         = 'sarH',  // int32_t, sampleAspectRatio height
+    kKeyThumbnailWidth    = 'thbW',  // int32_t, thumbnail width
+    kKeyThumbnailHeight   = 'thbH',  // int32_t, thumbnail height
 
     // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1)
     kKeyCropRect          = 'crop',
@@ -58,6 +60,7 @@
     kKeyAACProfile        = 'aacp',  // int32_t
     kKeyAVCC              = 'avcc',  // raw data
     kKeyHVCC              = 'hvcc',  // raw data
+    kKeyThumbnailHVCC     = 'thvc',  // raw data
     kKeyD263              = 'd263',  // raw data
     kKeyVorbisInfo        = 'vinf',  // raw data
     kKeyVorbisBooks       = 'vboo',  // raw data
@@ -209,6 +212,10 @@
                                    // color Matrix, value defined by ColorAspects.MatrixCoeffs.
     kKeyTemporalLayerId  = 'iLyr', // int32_t, temporal layer-id. 0-based (0 => base layer)
     kKeyTemporalLayerCount = 'cLyr', // int32_t, number of temporal layers encoded
+
+    kKeyGridWidth        = 'grdW', // int32_t, HEIF grid width
+    kKeyGridHeight       = 'grdH', // int32_t, HEIF grid height
+    kKeyIccProfile       = 'prof', // raw data, ICC prifile data
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 88a416a..77cbd4c 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -95,7 +95,7 @@
 void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */);
 
 AString nameForFd(int fd);
-
+void MakeFourCCString(uint32_t x, char *s);
 }  // namespace android
 
 #endif  // UTILS_H_
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 3027cdd..bb05740 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -1,6 +1,9 @@
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "FrameDropper.cpp",
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index caf3ac8..109460d 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -733,7 +733,7 @@
         } else {
             // snap to nearest capture point
             int64_t nFrames = std::llround(
-                    (timeUs - mPrevCaptureUs) * mCaptureFps);
+                    (timeUs - mPrevCaptureUs) * mCaptureFps / 1000000);
             if (nFrames <= 0) {
                 // skip this frame as it's too close to previous capture
                 ALOGV("skipping frame, timeUs %lld", static_cast<long long>(timeUs));
@@ -741,9 +741,9 @@
             }
             mFrameCount += nFrames;
             mPrevCaptureUs = mBaseCaptureUs + std::llround(
-                    mFrameCount / mCaptureFps);
+                    mFrameCount * 1000000 / mCaptureFps);
             mPrevFrameUs = mBaseFrameUs + std::llround(
-                    mFrameCount / mFps);
+                    mFrameCount * 1000000 / mFps);
         }
 
         ALOGV("timeUs %lld, captureUs %lld, frameUs %lld",
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 09e6d75..87c2411 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -213,6 +213,13 @@
     Mutex::Autolock autoLock(mLock);
     CHECK_LT(portIndex, mPorts.size());
 
+    PortInfo *port = &mPorts.editItemAt(portIndex);
+    if (size < port->mDef.nBufferSize) {
+        ALOGE("b/63522430, Buffer size is too small.");
+        android_errorWriteLog(0x534e4554, "63522430");
+        return OMX_ErrorBadParameter;
+    }
+
     *header = new OMX_BUFFERHEADERTYPE;
     (*header)->nSize = sizeof(OMX_BUFFERHEADERTYPE);
     (*header)->nVersion.s.nVersionMajor = 1;
@@ -235,8 +242,6 @@
     (*header)->nOutputPortIndex = portIndex;
     (*header)->nInputPortIndex = portIndex;
 
-    PortInfo *port = &mPorts.editItemAt(portIndex);
-
     CHECK(mState == OMX_StateLoaded || port->mDef.bEnabled == OMX_FALSE);
 
     CHECK_LT(port->mBuffers.size(), port->mDef.nBufferCountActual);
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 24ed981..cb811a0 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -182,11 +182,11 @@
 
 
 uint32_t SoftVideoDecoderOMXComponent::outputBufferWidth() {
-    return mIsAdaptive ? mAdaptiveMaxWidth : mWidth;
+    return max(mIsAdaptive ? mAdaptiveMaxWidth : 0, mWidth);
 }
 
 uint32_t SoftVideoDecoderOMXComponent::outputBufferHeight() {
-    return mIsAdaptive ? mAdaptiveMaxHeight : mHeight;
+    return max(mIsAdaptive ? mAdaptiveMaxHeight : 0, mHeight);
 }
 
 void SoftVideoDecoderOMXComponent::handlePortSettingsChange(
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 8e22f9e..ab893de 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -1,6 +1,9 @@
 cc_library_shared {
     name: "libstagefright_xmlparser",
     vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "MediaCodecsXmlParser.cpp",
diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp
index 1c04bcf..d2ede1c 100644
--- a/media/mtp/MtpDebug.cpp
+++ b/media/mtp/MtpDebug.cpp
@@ -102,6 +102,7 @@
     { "MTP_FORMAT_JP2",                             0x380F },
     { "MTP_FORMAT_JPX",                             0x3810 },
     { "MTP_FORMAT_DNG",                             0x3811 },
+    { "MTP_FORMAT_HEIF",                            0x3812 },
     { "MTP_FORMAT_UNDEFINED_FIRMWARE",              0xB802 },
     { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT",            0xB881 },
     { "MTP_FORMAT_UNDEFINED_AUDIO",                 0xB900 },
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index c50af2f..4132fed 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -719,9 +719,22 @@
 }
 
 int MtpFfsHandle::sendEvent(mtp_event me) {
+    // Mimic the behavior of f_mtp by sending the event async.
+    // Events aren't critical to the connection, so we don't need to check the return value.
+    char *temp = new char[me.length];
+    memcpy(temp, me.data, me.length);
+    me.data = temp;
+    std::thread t([this, me]() { return this->doSendEvent(me); });
+    t.detach();
+    return 0;
+}
+
+void MtpFfsHandle::doSendEvent(mtp_event me) {
     unsigned length = me.length;
-    int ret = writeHandle(mIntr, me.data, length);
-    return static_cast<unsigned>(ret) == length ? 0 : -1;
+    int ret = ::write(mIntr, me.data, length);
+    if (static_cast<unsigned>(ret) != length)
+        PLOG(ERROR) << "Mtp error sending event thread!";
+    delete[] reinterpret_cast<char*>(me.data);
 }
 
 } // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index 98669ff..b637d65 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -33,6 +33,7 @@
     bool initFunctionfs();
     void closeConfig();
     void closeEndpoints();
+    void doSendEvent(mtp_event me);
 
     bool mPtp;
 
diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h
index adfb102..13cc859 100644
--- a/media/mtp/mtp.h
+++ b/media/mtp/mtp.h
@@ -94,6 +94,7 @@
 #define MTP_FORMAT_JP2                                  0x380F   // JPEG2000 Baseline File Format
 #define MTP_FORMAT_JPX                                  0x3810   // JPEG2000 Extended File Format
 #define MTP_FORMAT_DNG                                  0x3811   // Digital Negative
+#define MTP_FORMAT_HEIF                                 0x3812   // HEIF images
 #define MTP_FORMAT_UNDEFINED_FIRMWARE                   0xB802
 #define MTP_FORMAT_WINDOWS_IMAGE_FORMAT                 0xB881
 #define MTP_FORMAT_UNDEFINED_AUDIO                      0xB900
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 40974f3..0d48de1 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -90,3 +90,9 @@
         },
     },
 }
+
+llndk_library {
+    name: "libmediandk",
+    symbol_file: "libmediandk.map.txt",
+    export_include_dirs: ["include"],
+}
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 9023b2d..63898a0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -604,7 +604,7 @@
         virtual status_t standby();
 
     private:
-        sp<MmapThread> mThread;
+        const sp<MmapThread> mThread;
     };
 
               ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp
index 3c73543..c1044ef 100644
--- a/services/audioflinger/ServiceUtilities.cpp
+++ b/services/audioflinger/ServiceUtilities.cpp
@@ -113,10 +113,15 @@
     return ok;
 }
 
-bool captureHotwordAllowed() {
-    static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD");
-    // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
-    bool ok = PermissionCache::checkCallingPermission(sCaptureHotwordAllowed);
+bool captureHotwordAllowed(pid_t pid, uid_t uid) {
+    // CAPTURE_AUDIO_HOTWORD permission implies RECORD_AUDIO permission
+    bool ok = recordingAllowed(String16(""), pid, uid);
+
+    if (ok) {
+        static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD");
+        // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
+        ok = PermissionCache::checkCallingPermission(sCaptureHotwordAllowed);
+    }
     if (!ok) ALOGE("android.permission.CAPTURE_AUDIO_HOTWORD");
     return ok;
 }
diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h
index 8b1bc00..04cb9cd 100644
--- a/services/audioflinger/ServiceUtilities.h
+++ b/services/audioflinger/ServiceUtilities.h
@@ -22,7 +22,7 @@
 bool isTrustedCallingUid(uid_t uid);
 bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
 bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
-bool captureHotwordAllowed();
+bool captureHotwordAllowed(pid_t pid, uid_t uid);
 bool settingsAllowed();
 bool modifyAudioRoutingAllowed();
 bool dumpAllowed();
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 459e4fb..8c4531a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7503,34 +7503,22 @@
 AudioFlinger::MmapThreadHandle::MmapThreadHandle(const sp<MmapThread>& thread)
     : mThread(thread)
 {
+    assert(thread != 0); // thread must start non-null and stay non-null
 }
 
 AudioFlinger::MmapThreadHandle::~MmapThreadHandle()
 {
-    MmapThread *thread = mThread.get();
-    // clear our strong reference before disconnecting the thread: the last strong reference
-    // will be removed when closeInput/closeOutput is executed upon call from audio policy manager
-    // and the thread removed from mMMapThreads list causing the thread destruction.
-    mThread.clear();
-    if (thread != nullptr) {
-        thread->disconnect();
-    }
+    mThread->disconnect();
 }
 
 status_t AudioFlinger::MmapThreadHandle::createMmapBuffer(int32_t minSizeFrames,
                                   struct audio_mmap_buffer_info *info)
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->createMmapBuffer(minSizeFrames, info);
 }
 
 status_t AudioFlinger::MmapThreadHandle::getMmapPosition(struct audio_mmap_position *position)
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->getMmapPosition(position);
 }
 
@@ -7538,25 +7526,16 @@
         audio_port_handle_t *handle)
 
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->start(client, handle);
 }
 
 status_t AudioFlinger::MmapThreadHandle::stop(audio_port_handle_t handle)
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->stop(handle);
 }
 
 status_t AudioFlinger::MmapThreadHandle::standby()
 {
-    if (mThread == 0) {
-        return NO_INIT;
-    }
     return mThread->standby();
 }
 
@@ -7588,7 +7567,7 @@
     for (const sp<MmapTrack> &t : mActiveTracks) {
         stop(t->portId());
     }
-    // this will cause the destruction of this thread.
+    // This will decrement references and may cause the destruction of this thread.
     if (isOutput()) {
         AudioSystem::releaseOutput(mId, streamType(), mSessionId);
     } else {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1a7db26..78f195d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -54,6 +54,11 @@
 //FIXME: workaround for truncated touch sounds
 // to be removed when the problem is handled by system UI
 #define TOUCH_SOUND_FIXED_DELAY_MS 100
+
+// Largest difference in dB on earpiece in call between the voice volume and another
+// media / notification / system volume.
+constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
+
 // ----------------------------------------------------------------------------
 // AudioPolicyInterface implementation
 // ----------------------------------------------------------------------------
@@ -5348,6 +5353,30 @@
         return ringVolumeDB - 4 > volumeDB ? ringVolumeDB - 4 : volumeDB;
     }
 
+    // in-call: always cap earpiece volume by voice volume + some low headroom
+    if ((stream != AUDIO_STREAM_VOICE_CALL) && (device & AUDIO_DEVICE_OUT_EARPIECE) && isInCall()) {
+        switch (stream) {
+        case AUDIO_STREAM_SYSTEM:
+        case AUDIO_STREAM_RING:
+        case AUDIO_STREAM_MUSIC:
+        case AUDIO_STREAM_ALARM:
+        case AUDIO_STREAM_NOTIFICATION:
+        case AUDIO_STREAM_ENFORCED_AUDIBLE:
+        case AUDIO_STREAM_DTMF:
+        case AUDIO_STREAM_ACCESSIBILITY: {
+            const float maxVoiceVolDb = computeVolume(AUDIO_STREAM_VOICE_CALL, index, device)
+                    + IN_CALL_EARPIECE_HEADROOM_DB;
+            if (volumeDB > maxVoiceVolDb) {
+                ALOGV("computeVolume() stream %d at vol=%f overriden by stream %d at vol=%f",
+                        stream, volumeDB, AUDIO_STREAM_VOICE_CALL, maxVoiceVolDb);
+                volumeDB = maxVoiceVolDb;
+            }
+            } break;
+        default:
+            break;
+        }
+    }
+
     // if a headset is connected, apply the following rules to ring tones and notifications
     // to avoid sound level bursts in user's ears:
     // - always attenuate notifications volume by 6dB
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 7d7cd93..1d4386c 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -289,13 +289,6 @@
         return BAD_VALUE;
     }
 
-    if ((attr->source == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) {
-        return BAD_VALUE;
-    }
-    sp<AudioPolicyEffects>audioPolicyEffects;
-    status_t status;
-    AudioPolicyInterface::input_type_t inputType;
-
     bool updatePid = (pid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     if (!isTrustedCallingUid(callingUid)) {
@@ -313,7 +306,15 @@
         pid = callingPid;
     }
 
+    if ((attr->source == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed(pid, uid)) {
+        return BAD_VALUE;
+    }
+
+    sp<AudioPolicyEffects>audioPolicyEffects;
     {
+        status_t status;
+        AudioPolicyInterface::input_type_t inputType;
+
         Mutex::Autolock _l(mLock);
         // the audio_in_acoustics_t parameter is ignored by get_input()
         status = mAudioPolicyManager->getInputForAttr(attr, input, session, uid,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 0429e7f..6fd9263 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -313,11 +313,13 @@
 
 binder::Status CameraDeviceClient::beginConfigure() {
     // TODO: Implement this.
+    ATRACE_CALL();
     ALOGV("%s: Not implemented yet.", __FUNCTION__);
     return binder::Status::ok();
 }
 
 binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
+    ATRACE_CALL();
     ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
             __FUNCTION__, mInputStream.configured ? 1 : 0,
             mStreamMap.size());
@@ -568,7 +570,7 @@
         /*out*/
         int* newStreamId) {
     int width, height, format, surfaceType;
-    int32_t consumerUsage;
+    uint64_t consumerUsage;
     android_dataspace dataSpace;
     status_t err;
     binder::Status res;
@@ -764,24 +766,23 @@
     // Query consumer usage bits to set async operation mode for
     // GLConsumer using controlledByApp parameter.
     bool useAsync = false;
-    int32_t consumerUsage;
+    uint64_t consumerUsage = 0;
     status_t err;
-    if ((err = gbp->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
-            &consumerUsage)) != OK) {
+    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
                 mCameraIdStr.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
-        ALOGW("%s: Camera %s with consumer usage flag: 0x%x: Forcing asynchronous mode for stream",
+        ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for stream",
                 __FUNCTION__, mCameraIdStr.string(), consumerUsage);
         useAsync = true;
     }
 
-    int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+    uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
                               GRALLOC_USAGE_RENDERSCRIPT;
-    int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+    uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
                            GraphicBuffer::USAGE_HW_TEXTURE |
                            GraphicBuffer::USAGE_HW_COMPOSER;
     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
@@ -874,7 +875,7 @@
         //surface class type. Use usage flag to approximate the comparison.
         if (consumerUsage != streamInfo.consumerUsage) {
             String8 msg = String8::format(
-                    "Camera %s:Surface usage flag doesn't match 0x%x vs 0x%x",
+                    "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
                     mCameraIdStr.string(), consumerUsage, streamInfo.consumerUsage);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index e8fc080..50661cb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -213,13 +213,13 @@
         int height;
         int format;
         android_dataspace dataSpace;
-        int32_t consumerUsage;
+        uint64_t consumerUsage;
         bool finalized = false;
         OutputStreamInfo() :
                 width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
                 consumerUsage(0) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
-                int32_t _consumerUsage) :
+                uint64_t _consumerUsage) :
                     width(_width), height(_height), format(_format),
                     dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
     };
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index d9059f3..54fcb0a 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -119,7 +119,7 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint32_t consumerUsage = 0) = 0;
+            bool isShared = false, uint64_t consumerUsage = 0) = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -132,7 +132,7 @@
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint32_t consumerUsage = 0) = 0;
+            bool isShared = false, uint64_t consumerUsage = 0) = 0;
 
     /**
      * Create an input stream of width, height, and format.
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
index 469c86c..991b50f 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
@@ -29,11 +29,6 @@
 CameraHardwareInterface::~CameraHardwareInterface()
 {
     ALOGI("Destroying camera %s", mName.string());
-    if (mDevice) {
-        int rc = mDevice->common.close(&mDevice->common);
-        if (rc != OK)
-            ALOGE("Could not close camera %s: %d", mName.string(), rc);
-    }
     if (mHidlDevice != nullptr) {
         mHidlDevice->close();
         mHidlDevice.clear();
@@ -42,12 +37,6 @@
 }
 
 status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
-    if (mDevice) {
-        ALOGE("%s: camera hardware interface has been initialized to libhardware path!",
-                __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
     ALOGI("Opening camera %s", mName.string());
 
     status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
@@ -372,7 +361,7 @@
         ALOGE("%s: preview window is null", __FUNCTION__);
         return s;
     }
-    mPreviewUsage = (int) usage;
+    mPreviewUsage = static_cast<uint64_t> (usage);
     int rc = native_window_set_usage(a, mPreviewUsage);
     if (rc == OK) {
         cleanupCirculatingBuffers();
@@ -444,23 +433,6 @@
         }
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr));
-    } else if (mDevice) {
-        if (mDevice->ops->set_preview_window) {
-            mPreviewWindow = buf;
-            if (buf != nullptr) {
-                if (mPreviewScalingMode != NOT_SET) {
-                    setPreviewScalingMode(mPreviewScalingMode);
-                }
-                if (mPreviewTransform != NOT_SET) {
-                    setPreviewTransform(mPreviewTransform);
-                }
-            }
-            mHalPreviewWindow.user = this;
-            ALOGV("%s &mHalPreviewWindow %p mHalPreviewWindow.user %p",__FUNCTION__,
-                    &mHalPreviewWindow, mHalPreviewWindow.user);
-            return mDevice->ops->set_preview_window(mDevice,
-                    buf.get() ? &mHalPreviewWindow.nw : 0);
-        }
     }
     return INVALID_OPERATION;
 }
@@ -478,15 +450,6 @@
     mCbUser = user;
 
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
-
-    if (mDevice && mDevice->ops->set_callbacks) {
-        mDevice->ops->set_callbacks(mDevice,
-                               sNotifyCb,
-                               sDataCb,
-                               sDataCbTimestamp,
-                               sGetMemory,
-                               this);
-    }
 }
 
 void CameraHardwareInterface::enableMsgType(int32_t msgType)
@@ -494,8 +457,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         mHidlDevice->enableMsgType(msgType);
-    } else if (mDevice && mDevice->ops->enable_msg_type) {
-        mDevice->ops->enable_msg_type(mDevice, msgType);
     }
 }
 
@@ -504,8 +465,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         mHidlDevice->disableMsgType(msgType);
-    } else if (mDevice && mDevice->ops->disable_msg_type) {
-        mDevice->ops->disable_msg_type(mDevice, msgType);
     }
 }
 
@@ -514,8 +473,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return mHidlDevice->msgTypeEnabled(msgType);
-    } else if (mDevice && mDevice->ops->msg_type_enabled) {
-        return mDevice->ops->msg_type_enabled(mDevice, msgType);
     }
     return false;
 }
@@ -526,8 +483,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->startPreview());
-    } else if (mDevice && mDevice->ops->start_preview) {
-        return mDevice->ops->start_preview(mDevice);
     }
     return INVALID_OPERATION;
 }
@@ -537,8 +492,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         mHidlDevice->stopPreview();
-    } else if (mDevice && mDevice->ops->stop_preview) {
-        mDevice->ops->stop_preview(mDevice);
     }
 }
 
@@ -547,8 +500,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return mHidlDevice->previewEnabled();
-    } else if (mDevice && mDevice->ops->preview_enabled) {
-        return mDevice->ops->preview_enabled(mDevice);
     }
     return false;
 }
@@ -559,8 +510,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->storeMetaDataInBuffers(enable));
-    } else if (mDevice && mDevice->ops->store_meta_data_in_buffers) {
-        return mDevice->ops->store_meta_data_in_buffers(mDevice, enable);
     }
     return enable ? INVALID_OPERATION: OK;
 }
@@ -571,8 +520,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->startRecording());
-    } else if (mDevice && mDevice->ops->start_recording) {
-        return mDevice->ops->start_recording(mDevice);
     }
     return INVALID_OPERATION;
 }
@@ -585,8 +532,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         mHidlDevice->stopRecording();
-    } else if (mDevice && mDevice->ops->stop_recording) {
-        mDevice->ops->stop_recording(mDevice);
     }
 }
 
@@ -598,8 +543,6 @@
     ALOGV("%s(%s)", __FUNCTION__, mName.string());
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return mHidlDevice->recordingEnabled();
-    } else if (mDevice && mDevice->ops->recording_enabled) {
-        return mDevice->ops->recording_enabled(mDevice);
     }
     return false;
 }
@@ -624,9 +567,6 @@
         } else {
             mHidlDevice->releaseRecordingFrame(heapId, bufferIndex);
         }
-    } else if (mDevice && mDevice->ops->release_recording_frame) {
-        void *data = ((uint8_t *)heap->base()) + offset;
-        return mDevice->ops->release_recording_frame(mDevice, data);
     }
 }
 
@@ -653,9 +593,6 @@
                 ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
                 return;
             }
-        } else {
-            ALOGE("Non HIDL mode do not support %s", __FUNCTION__);
-            return;
         }
     }
 
@@ -674,8 +611,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->autoFocus());
-    } else if (mDevice && mDevice->ops->auto_focus) {
-        return mDevice->ops->auto_focus(mDevice);
     }
     return INVALID_OPERATION;
 }
@@ -686,8 +621,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->cancelAutoFocus());
-    } else if (mDevice && mDevice->ops->cancel_auto_focus) {
-        return mDevice->ops->cancel_auto_focus(mDevice);
     }
     return INVALID_OPERATION;
 }
@@ -698,8 +631,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->takePicture());
-    } else if (mDevice && mDevice->ops->take_picture) {
-        return mDevice->ops->take_picture(mDevice);
     }
     return INVALID_OPERATION;
 }
@@ -710,8 +641,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->cancelPicture());
-    } else if (mDevice && mDevice->ops->cancel_picture) {
-        return mDevice->ops->cancel_picture(mDevice);
     }
     return INVALID_OPERATION;
 }
@@ -722,8 +651,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->setParameters(params.flatten().string()));
-    } else if (mDevice && mDevice->ops->set_parameters) {
-        return mDevice->ops->set_parameters(mDevice, params.flatten().string());
     }
     return INVALID_OPERATION;
 }
@@ -740,14 +667,6 @@
                 });
         String8 tmp(outParam.c_str());
         parms.unflatten(tmp);
-    } else if (mDevice && mDevice->ops->get_parameters) {
-        char *temp = mDevice->ops->get_parameters(mDevice);
-        String8 str_parms(temp);
-        if (mDevice->ops->put_parameters)
-            mDevice->ops->put_parameters(mDevice, temp);
-        else
-            free(temp);
-        parms.unflatten(str_parms);
     }
     return parms;
 }
@@ -758,8 +677,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         return CameraProviderManager::mapToStatusT(
                 mHidlDevice->sendCommand((CommandType) cmd, arg1, arg2));
-    } else if (mDevice && mDevice->ops->send_command) {
-        return mDevice->ops->send_command(mDevice, cmd, arg1, arg2);
     }
     return INVALID_OPERATION;
 }
@@ -773,8 +690,6 @@
     if (CC_LIKELY(mHidlDevice != nullptr)) {
         mHidlDevice->close();
         mHidlDevice.clear();
-    } else if (mDevice && mDevice->ops->release) {
-        mDevice->ops->release(mDevice);
     }
 }
 
@@ -790,15 +705,10 @@
         Status s = mHidlDevice->dumpState(handle);
         native_handle_delete(handle);
         return CameraProviderManager::mapToStatusT(s);
-    } else if (mDevice && mDevice->ops->dump) {
-        return mDevice->ops->dump(mDevice, fd);
     }
     return OK; // It's fine if the HAL doesn't implement dump()
 }
 
-/**
- * Methods for legacy (non-HIDL) path follows
- */
 void CameraHardwareInterface::sNotifyCb(int32_t msg_type, int32_t ext1,
                         int32_t ext2, void *user)
 {
@@ -868,177 +778,4 @@
     mem->decStrong(mem);
 }
 
-ANativeWindow* CameraHardwareInterface::sToAnw(void *user)
-{
-    CameraHardwareInterface *object =
-            reinterpret_cast<CameraHardwareInterface *>(user);
-    return object->mPreviewWindow.get();
-}
-#define anw(n) sToAnw(((struct camera_preview_window *)(n))->user)
-#define hwi(n) reinterpret_cast<CameraHardwareInterface *>(\
-    ((struct camera_preview_window *)(n))->user)
-
-int CameraHardwareInterface::sDequeueBuffer(struct preview_stream_ops* w,
-                            buffer_handle_t** buffer, int *stride)
-{
-    int rc;
-    ANativeWindow *a = anw(w);
-    ANativeWindowBuffer* anb;
-    rc = native_window_dequeue_buffer_and_wait(a, &anb);
-    if (rc == OK) {
-        *buffer = &anb->handle;
-        *stride = anb->stride;
-    }
-    return rc;
-}
-
-#ifndef container_of
-#define container_of(ptr, type, member) ({                      \
-    const __typeof__(((type *) 0)->member) *__mptr = (ptr);     \
-    (type *) ((char *) __mptr - (char *)(&((type *)0)->member)); })
-#endif
-
-int CameraHardwareInterface::sLockBuffer(struct preview_stream_ops* w,
-                  buffer_handle_t* /*buffer*/)
-{
-    ANativeWindow *a = anw(w);
-    (void)a;
-    return 0;
-}
-
-int CameraHardwareInterface::sEnqueueBuffer(struct preview_stream_ops* w,
-                  buffer_handle_t* buffer)
-{
-    ANativeWindow *a = anw(w);
-    return a->queueBuffer(a,
-              container_of(buffer, ANativeWindowBuffer, handle), -1);
-}
-
-int CameraHardwareInterface::sCancelBuffer(struct preview_stream_ops* w,
-                  buffer_handle_t* buffer)
-{
-    ANativeWindow *a = anw(w);
-    return a->cancelBuffer(a,
-              container_of(buffer, ANativeWindowBuffer, handle), -1);
-}
-
-int CameraHardwareInterface::sSetBufferCount(struct preview_stream_ops* w, int count)
-{
-    ANativeWindow *a = anw(w);
-
-    if (a != nullptr) {
-        // Workaround for b/27039775
-        // Previously, setting the buffer count would reset the buffer
-        // queue's flag that allows for all buffers to be dequeued on the
-        // producer side, instead of just the producer's declared max count,
-        // if no filled buffers have yet been queued by the producer.  This
-        // reset no longer happens, but some HALs depend on this behavior,
-        // so it needs to be maintained for HAL backwards compatibility.
-        // Simulate the prior behavior by disconnecting/reconnecting to the
-        // window and setting the values again.  This has the drawback of
-        // actually causing memory reallocation, which may not have happened
-        // in the past.
-        CameraHardwareInterface *hw = hwi(w);
-        native_window_api_disconnect(a, NATIVE_WINDOW_API_CAMERA);
-        native_window_api_connect(a, NATIVE_WINDOW_API_CAMERA);
-        if (hw->mPreviewScalingMode != NOT_SET) {
-            native_window_set_scaling_mode(a, hw->mPreviewScalingMode);
-        }
-        if (hw->mPreviewTransform != NOT_SET) {
-            native_window_set_buffers_transform(a, hw->mPreviewTransform);
-        }
-        if (hw->mPreviewWidth != NOT_SET) {
-            native_window_set_buffers_dimensions(a,
-                    hw->mPreviewWidth, hw->mPreviewHeight);
-            native_window_set_buffers_format(a, hw->mPreviewFormat);
-        }
-        if (hw->mPreviewUsage != 0) {
-            native_window_set_usage(a, hw->mPreviewUsage);
-        }
-        if (hw->mPreviewSwapInterval != NOT_SET) {
-            a->setSwapInterval(a, hw->mPreviewSwapInterval);
-        }
-        if (hw->mPreviewCrop.left != NOT_SET) {
-            native_window_set_crop(a, &(hw->mPreviewCrop));
-        }
-    }
-
-    return native_window_set_buffer_count(a, count);
-}
-
-int CameraHardwareInterface::sSetBuffersGeometry(struct preview_stream_ops* w,
-                  int width, int height, int format)
-{
-    int rc;
-    ANativeWindow *a = anw(w);
-    CameraHardwareInterface *hw = hwi(w);
-    hw->mPreviewWidth = width;
-    hw->mPreviewHeight = height;
-    hw->mPreviewFormat = format;
-    rc = native_window_set_buffers_dimensions(a, width, height);
-    if (rc == OK) {
-        rc = native_window_set_buffers_format(a, format);
-    }
-    return rc;
-}
-
-int CameraHardwareInterface::sSetCrop(struct preview_stream_ops *w,
-                  int left, int top, int right, int bottom)
-{
-    ANativeWindow *a = anw(w);
-    CameraHardwareInterface *hw = hwi(w);
-    hw->mPreviewCrop.left = left;
-    hw->mPreviewCrop.top = top;
-    hw->mPreviewCrop.right = right;
-    hw->mPreviewCrop.bottom = bottom;
-    return native_window_set_crop(a, &(hw->mPreviewCrop));
-}
-
-int CameraHardwareInterface::sSetTimestamp(struct preview_stream_ops *w,
-                           int64_t timestamp) {
-    ANativeWindow *a = anw(w);
-    return native_window_set_buffers_timestamp(a, timestamp);
-}
-
-int CameraHardwareInterface::sSetUsage(struct preview_stream_ops* w, int usage)
-{
-    ANativeWindow *a = anw(w);
-    CameraHardwareInterface *hw = hwi(w);
-    hw->mPreviewUsage = usage;
-    return native_window_set_usage(a, usage);
-}
-
-int CameraHardwareInterface::sSetSwapInterval(struct preview_stream_ops *w, int interval)
-{
-    ANativeWindow *a = anw(w);
-    CameraHardwareInterface *hw = hwi(w);
-    hw->mPreviewSwapInterval = interval;
-    return a->setSwapInterval(a, interval);
-}
-
-int CameraHardwareInterface::sGetMinUndequeuedBufferCount(
-                  const struct preview_stream_ops *w,
-                  int *count)
-{
-    ANativeWindow *a = anw(w);
-    return a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, count);
-}
-
-void CameraHardwareInterface::initHalPreviewWindow()
-{
-    mHalPreviewWindow.nw.cancel_buffer = sCancelBuffer;
-    mHalPreviewWindow.nw.lock_buffer = sLockBuffer;
-    mHalPreviewWindow.nw.dequeue_buffer = sDequeueBuffer;
-    mHalPreviewWindow.nw.enqueue_buffer = sEnqueueBuffer;
-    mHalPreviewWindow.nw.set_buffer_count = sSetBufferCount;
-    mHalPreviewWindow.nw.set_buffers_geometry = sSetBuffersGeometry;
-    mHalPreviewWindow.nw.set_crop = sSetCrop;
-    mHalPreviewWindow.nw.set_timestamp = sSetTimestamp;
-    mHalPreviewWindow.nw.set_usage = sSetUsage;
-    mHalPreviewWindow.nw.set_swap_interval = sSetSwapInterval;
-
-    mHalPreviewWindow.nw.get_min_undequeued_buffer_count =
-            sGetMinUndequeuedBufferCount;
-}
-
 }; // namespace android
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index 1c38d00..6a1b4fb 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
@@ -90,7 +90,6 @@
 
 public:
     explicit CameraHardwareInterface(const char *name):
-            mDevice(nullptr),
             mHidlDevice(nullptr),
             mName(name),
             mPreviewScalingMode(NOT_SET),
@@ -299,7 +298,6 @@
     status_t dump(int fd, const Vector<String16>& /*args*/) const;
 
 private:
-    camera_device_t *mDevice;
     sp<hardware::camera::device::V1_0::ICameraDevice> mHidlDevice;
     String8 mName;
 
@@ -369,41 +367,6 @@
 
     static void sPutMemory(camera_memory_t *data);
 
-    static ANativeWindow *sToAnw(void *user);
-
-    static int sDequeueBuffer(struct preview_stream_ops* w,
-                                buffer_handle_t** buffer, int *stride);
-
-    static int sLockBuffer(struct preview_stream_ops* w,
-                      buffer_handle_t* /*buffer*/);
-
-    static int sEnqueueBuffer(struct preview_stream_ops* w,
-                      buffer_handle_t* buffer);
-
-    static int sCancelBuffer(struct preview_stream_ops* w,
-                      buffer_handle_t* buffer);
-
-    static int sSetBufferCount(struct preview_stream_ops* w, int count);
-
-    static int sSetBuffersGeometry(struct preview_stream_ops* w,
-                      int width, int height, int format);
-
-    static int sSetCrop(struct preview_stream_ops *w,
-                      int left, int top, int right, int bottom);
-
-    static int sSetTimestamp(struct preview_stream_ops *w,
-                               int64_t timestamp);
-
-    static int sSetUsage(struct preview_stream_ops* w, int usage);
-
-    static int sSetSwapInterval(struct preview_stream_ops *w, int interval);
-
-    static int sGetMinUndequeuedBufferCount(
-                      const struct preview_stream_ops *w,
-                      int *count);
-
-    void initHalPreviewWindow();
-
     std::pair<bool, uint64_t> getBufferId(ANativeWindowBuffer* anb);
     void cleanupCirculatingBuffers();
 
@@ -459,13 +422,6 @@
 
     sp<ANativeWindow>        mPreviewWindow;
 
-    struct camera_preview_window {
-        struct preview_stream_ops nw;
-        void *user;
-    };
-
-    struct camera_preview_window mHalPreviewWindow;
-
     notify_callback               mNotifyCb;
     data_callback                 mDataCb;
     data_callback_timestamp       mDataCbTimestamp;
@@ -479,7 +435,7 @@
     int mPreviewWidth;
     int mPreviewHeight;
     int mPreviewFormat;
-    int mPreviewUsage;
+    uint64_t mPreviewUsage;
     int mPreviewSwapInterval;
     android_native_rect_t mPreviewCrop;
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 69b1d7d..02f5424 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -407,7 +407,7 @@
 }
 
 BufferUsageFlags Camera3Device::mapToConsumerUsage(
-        uint32_t usage) {
+        uint64_t usage) {
     return usage;
 }
 
@@ -460,12 +460,12 @@
     return static_cast<uint32_t>(pixelFormat);
 }
 
-uint32_t Camera3Device::mapConsumerToFrameworkUsage(
+uint64_t Camera3Device::mapConsumerToFrameworkUsage(
         BufferUsageFlags usage) {
     return usage;
 }
 
-uint32_t Camera3Device::mapProducerToFrameworkUsage(
+uint64_t Camera3Device::mapProducerToFrameworkUsage(
         BufferUsageFlags usage) {
     return usage;
 }
@@ -1208,7 +1208,7 @@
 status_t Camera3Device::createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-            int streamSetId, bool isShared, uint32_t consumerUsage) {
+            int streamSetId, bool isShared, uint64_t consumerUsage) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1226,13 +1226,13 @@
 status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-        int streamSetId, bool isShared, uint32_t consumerUsage) {
+        int streamSetId, bool isShared, uint64_t consumerUsage) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
-            " consumer usage 0x%x, isShared %d", mId.string(), mNextStreamId, width, height, format,
+            " consumer usage %" PRIu64 ", isShared %d", mId.string(), mNextStreamId, width, height, format,
             dataSpace, rotation, consumerUsage, isShared);
 
     status_t res;
@@ -1478,6 +1478,7 @@
 
 status_t Camera3Device::getInputBufferProducer(
         sp<IGraphicBufferProducer> *producer) {
+    ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -1691,6 +1692,7 @@
 }
 
 status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
+    ATRACE_CALL();
     status_t res;
     Mutex::Autolock l(mOutputLock);
 
@@ -1884,6 +1886,7 @@
  */
 
 void Camera3Device::notifyStatus(bool idle) {
+    ATRACE_CALL();
     {
         // Need mLock to safely update state and synchronize to current
         // state of methods in flight.
@@ -2317,6 +2320,7 @@
 }
 
 void Camera3Device::setErrorState(const char *fmt, ...) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
     va_list args;
     va_start(args, fmt);
@@ -2327,6 +2331,7 @@
 }
 
 void Camera3Device::setErrorStateV(const char *fmt, va_list args) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
     setErrorStateLockedV(fmt, args);
 }
@@ -2411,6 +2416,7 @@
 }
 
 void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+    ATRACE_CALL();
     nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
     mInFlightMap.removeItemsAt(idx, 1);
 
@@ -2495,6 +2501,7 @@
 }
 
 void Camera3Device::flushInflightRequests() {
+    ATRACE_CALL();
     { // First return buffers cached in mInFlightMap
         Mutex::Autolock l(mInFlightLock);
         for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
@@ -2621,6 +2628,7 @@
 
 void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
         const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mOutputLock);
 
     CaptureResult captureResult;
@@ -2636,6 +2644,7 @@
         CameraMetadata &collectedPartialResult,
         uint32_t frameNumber,
         bool reprocess) {
+    ATRACE_CALL();
     if (pendingMetadata.isEmpty())
         return;
 
@@ -2884,7 +2893,7 @@
 
 void Camera3Device::notifyError(const camera3_error_msg_t &msg,
         sp<NotificationListener> listener) {
-
+    ATRACE_CALL();
     // Map camera HAL error codes to ICameraDeviceCallback error codes
     // Index into this with the HAL error code
     static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
@@ -2962,6 +2971,7 @@
 
 void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
         sp<NotificationListener> listener) {
+    ATRACE_CALL();
     ssize_t idx;
 
     // Set timestamp for the request in the in-flight tracking
@@ -3048,24 +3058,20 @@
 Camera3Device::HalInterface::HalInterface(
             sp<ICameraDeviceSession> &session,
             std::shared_ptr<RequestMetadataQueue> queue) :
-        mHal3Device(nullptr),
         mHidlSession(session),
         mRequestMetadataQueue(queue) {}
 
-Camera3Device::HalInterface::HalInterface() :
-        mHal3Device(nullptr) {}
+Camera3Device::HalInterface::HalInterface() {}
 
 Camera3Device::HalInterface::HalInterface(const HalInterface& other) :
-        mHal3Device(other.mHal3Device),
         mHidlSession(other.mHidlSession),
         mRequestMetadataQueue(other.mRequestMetadataQueue) {}
 
 bool Camera3Device::HalInterface::valid() {
-    return (mHal3Device != nullptr) || (mHidlSession != nullptr);
+    return (mHidlSession != nullptr);
 }
 
 void Camera3Device::HalInterface::clear() {
-    mHal3Device = nullptr;
     mHidlSession.clear();
 }
 
@@ -3080,72 +3086,60 @@
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
-    if (mHal3Device != nullptr) {
-        const camera_metadata *r;
-        r = mHal3Device->ops->construct_default_request_settings(
-                mHal3Device, templateId);
-        if (r == nullptr) return BAD_VALUE;
-        *requestTemplate = clone_camera_metadata(r);
-        if (requestTemplate == nullptr) {
-            ALOGE("%s: Unable to clone camera metadata received from HAL",
-                    __FUNCTION__);
-            return INVALID_OPERATION;
-        }
-    } else {
-        common::V1_0::Status status;
-        RequestTemplate id;
-        switch (templateId) {
-            case CAMERA3_TEMPLATE_PREVIEW:
-                id = RequestTemplate::PREVIEW;
-                break;
-            case CAMERA3_TEMPLATE_STILL_CAPTURE:
-                id = RequestTemplate::STILL_CAPTURE;
-                break;
-            case CAMERA3_TEMPLATE_VIDEO_RECORD:
-                id = RequestTemplate::VIDEO_RECORD;
-                break;
-            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
-                id = RequestTemplate::VIDEO_SNAPSHOT;
-                break;
-            case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
-                id = RequestTemplate::ZERO_SHUTTER_LAG;
-                break;
-            case CAMERA3_TEMPLATE_MANUAL:
-                id = RequestTemplate::MANUAL;
-                break;
-            default:
-                // Unknown template ID
-                return BAD_VALUE;
-        }
-        auto err = mHidlSession->constructDefaultRequestSettings(id,
-                [&status, &requestTemplate]
-                (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
-                    status = s;
-                    if (status == common::V1_0::Status::OK) {
-                        const camera_metadata *r =
-                                reinterpret_cast<const camera_metadata_t*>(request.data());
-                        size_t expectedSize = request.size();
-                        int ret = validate_camera_metadata_structure(r, &expectedSize);
-                        if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
-                            *requestTemplate = clone_camera_metadata(r);
-                            if (*requestTemplate == nullptr) {
-                                ALOGE("%s: Unable to clone camera metadata received from HAL",
-                                        __FUNCTION__);
-                                status = common::V1_0::Status::INTERNAL_ERROR;
-                            }
-                        } else {
-                            ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+    common::V1_0::Status status;
+    RequestTemplate id;
+    switch (templateId) {
+        case CAMERA3_TEMPLATE_PREVIEW:
+            id = RequestTemplate::PREVIEW;
+            break;
+        case CAMERA3_TEMPLATE_STILL_CAPTURE:
+            id = RequestTemplate::STILL_CAPTURE;
+            break;
+        case CAMERA3_TEMPLATE_VIDEO_RECORD:
+            id = RequestTemplate::VIDEO_RECORD;
+            break;
+        case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+            id = RequestTemplate::VIDEO_SNAPSHOT;
+            break;
+        case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+            id = RequestTemplate::ZERO_SHUTTER_LAG;
+            break;
+        case CAMERA3_TEMPLATE_MANUAL:
+            id = RequestTemplate::MANUAL;
+            break;
+        default:
+            // Unknown template ID
+            return BAD_VALUE;
+    }
+    auto err = mHidlSession->constructDefaultRequestSettings(id,
+            [&status, &requestTemplate]
+            (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
+                status = s;
+                if (status == common::V1_0::Status::OK) {
+                    const camera_metadata *r =
+                            reinterpret_cast<const camera_metadata_t*>(request.data());
+                    size_t expectedSize = request.size();
+                    int ret = validate_camera_metadata_structure(r, &expectedSize);
+                    if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
+                        *requestTemplate = clone_camera_metadata(r);
+                        if (*requestTemplate == nullptr) {
+                            ALOGE("%s: Unable to clone camera metadata received from HAL",
+                                    __FUNCTION__);
                             status = common::V1_0::Status::INTERNAL_ERROR;
                         }
+                    } else {
+                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+                        status = common::V1_0::Status::INTERNAL_ERROR;
                     }
-                });
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-            res = DEAD_OBJECT;
-        } else {
-            res = CameraProviderManager::mapToStatusT(status);
-        }
+                }
+            });
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        res = DEAD_OBJECT;
+    } else {
+        res = CameraProviderManager::mapToStatusT(status);
     }
+
     return res;
 }
 
@@ -3154,145 +3148,144 @@
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
-    if (mHal3Device != nullptr) {
-        res = mHal3Device->ops->configure_streams(mHal3Device, config);
-    } else {
-        // Convert stream config to HIDL
-        std::set<int> activeStreams;
-        StreamConfiguration requestedConfiguration;
-        requestedConfiguration.streams.resize(config->num_streams);
-        for (size_t i = 0; i < config->num_streams; i++) {
-            Stream &dst = requestedConfiguration.streams[i];
-            camera3_stream_t *src = config->streams[i];
+    // Convert stream config to HIDL
+    std::set<int> activeStreams;
+    StreamConfiguration requestedConfiguration;
+    requestedConfiguration.streams.resize(config->num_streams);
+    for (size_t i = 0; i < config->num_streams; i++) {
+        Stream &dst = requestedConfiguration.streams[i];
+        camera3_stream_t *src = config->streams[i];
 
-            Camera3Stream* cam3stream = Camera3Stream::cast(src);
-            cam3stream->setBufferFreedListener(this);
-            int streamId = cam3stream->getId();
-            StreamType streamType;
-            switch (src->stream_type) {
-                case CAMERA3_STREAM_OUTPUT:
-                    streamType = StreamType::OUTPUT;
-                    break;
-                case CAMERA3_STREAM_INPUT:
-                    streamType = StreamType::INPUT;
-                    break;
-                default:
-                    ALOGE("%s: Stream %d: Unsupported stream type %d",
-                            __FUNCTION__, streamId, config->streams[i]->stream_type);
-                    return BAD_VALUE;
+        Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        cam3stream->setBufferFreedListener(this);
+        int streamId = cam3stream->getId();
+        StreamType streamType;
+        switch (src->stream_type) {
+            case CAMERA3_STREAM_OUTPUT:
+                streamType = StreamType::OUTPUT;
+                break;
+            case CAMERA3_STREAM_INPUT:
+                streamType = StreamType::INPUT;
+                break;
+            default:
+                ALOGE("%s: Stream %d: Unsupported stream type %d",
+                        __FUNCTION__, streamId, config->streams[i]->stream_type);
+                return BAD_VALUE;
+        }
+        dst.id = streamId;
+        dst.streamType = streamType;
+        dst.width = src->width;
+        dst.height = src->height;
+        dst.format = mapToPixelFormat(src->format);
+        dst.usage = mapToConsumerUsage(cam3stream->getUsage());
+        dst.dataSpace = mapToHidlDataspace(src->data_space);
+        dst.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
+
+        activeStreams.insert(streamId);
+        // Create Buffer ID map if necessary
+        if (mBufferIdMaps.count(streamId) == 0) {
+            mBufferIdMaps.emplace(streamId, BufferIdMap{});
+        }
+    }
+    // remove BufferIdMap for deleted streams
+    for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
+        int streamId = it->first;
+        bool active = activeStreams.count(streamId) > 0;
+        if (!active) {
+            it = mBufferIdMaps.erase(it);
+        } else {
+            ++it;
+        }
+    }
+
+    res = mapToStreamConfigurationMode(
+            (camera3_stream_configuration_mode_t) config->operation_mode,
+            /*out*/ &requestedConfiguration.operationMode);
+    if (res != OK) {
+        return res;
+    }
+
+    // Invoke configureStreams
+
+    HalStreamConfiguration finalConfiguration;
+    common::V1_0::Status status;
+    auto err = mHidlSession->configureStreams(requestedConfiguration,
+            [&status, &finalConfiguration]
+            (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
+                finalConfiguration = halConfiguration;
+                status = s;
+            });
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        return DEAD_OBJECT;
+    }
+
+    if (status != common::V1_0::Status::OK ) {
+        return CameraProviderManager::mapToStatusT(status);
+    }
+
+    // And convert output stream configuration from HIDL
+
+    for (size_t i = 0; i < config->num_streams; i++) {
+        camera3_stream_t *dst = config->streams[i];
+        int streamId = Camera3Stream::cast(dst)->getId();
+
+        // Start scan at i, with the assumption that the stream order matches
+        size_t realIdx = i;
+        bool found = false;
+        for (size_t idx = 0; idx < finalConfiguration.streams.size(); idx++) {
+            if (finalConfiguration.streams[realIdx].id == streamId) {
+                found = true;
+                break;
             }
-            dst.id = streamId;
-            dst.streamType = streamType;
-            dst.width = src->width;
-            dst.height = src->height;
-            dst.format = mapToPixelFormat(src->format);
-            dst.usage = mapToConsumerUsage(src->usage);
-            dst.dataSpace = mapToHidlDataspace(src->data_space);
-            dst.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
+            realIdx = (realIdx >= finalConfiguration.streams.size()) ? 0 : realIdx + 1;
+        }
+        if (!found) {
+            ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+                    __FUNCTION__, streamId);
+            return INVALID_OPERATION;
+        }
+        HalStream &src = finalConfiguration.streams[realIdx];
 
-            activeStreams.insert(streamId);
-            // Create Buffer ID map if necessary
-            if (mBufferIdMaps.count(streamId) == 0) {
-                mBufferIdMaps.emplace(streamId, BufferIdMap{});
+        int overrideFormat = mapToFrameworkFormat(src.overrideFormat);
+        if (dst->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            if (dst->format != overrideFormat) {
+                ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
+                        streamId, dst->format);
             }
-        }
-        // remove BufferIdMap for deleted streams
-        for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
-            int streamId = it->first;
-            bool active = activeStreams.count(streamId) > 0;
-            if (!active) {
-                it = mBufferIdMaps.erase(it);
-            } else {
-                ++it;
-            }
+        } else {
+            // Override allowed with IMPLEMENTATION_DEFINED
+            dst->format = overrideFormat;
         }
 
-        res = mapToStreamConfigurationMode(
-                (camera3_stream_configuration_mode_t) config->operation_mode,
-                /*out*/ &requestedConfiguration.operationMode);
-        if (res != OK) {
-            return res;
-        }
-
-        // Invoke configureStreams
-
-        HalStreamConfiguration finalConfiguration;
-        common::V1_0::Status status;
-        auto err = mHidlSession->configureStreams(requestedConfiguration,
-                [&status, &finalConfiguration]
-                (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
-                    finalConfiguration = halConfiguration;
-                    status = s;
-                });
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-            return DEAD_OBJECT;
-        }
-
-        if (status != common::V1_0::Status::OK ) {
-            return CameraProviderManager::mapToStatusT(status);
-        }
-
-        // And convert output stream configuration from HIDL
-
-        for (size_t i = 0; i < config->num_streams; i++) {
-            camera3_stream_t *dst = config->streams[i];
-            int streamId = Camera3Stream::cast(dst)->getId();
-
-            // Start scan at i, with the assumption that the stream order matches
-            size_t realIdx = i;
-            bool found = false;
-            for (size_t idx = 0; idx < finalConfiguration.streams.size(); idx++) {
-                if (finalConfiguration.streams[realIdx].id == streamId) {
-                    found = true;
-                    break;
-                }
-                realIdx = (realIdx >= finalConfiguration.streams.size()) ? 0 : realIdx + 1;
-            }
-            if (!found) {
-                ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+        if (dst->stream_type == CAMERA3_STREAM_INPUT) {
+            if (src.producerUsage != 0) {
+                ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
                         __FUNCTION__, streamId);
                 return INVALID_OPERATION;
             }
-            HalStream &src = finalConfiguration.streams[realIdx];
-
-            int overrideFormat = mapToFrameworkFormat(src.overrideFormat);
-            if (dst->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-                if (dst->format != overrideFormat) {
-                    ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
-                            streamId, dst->format);
-                }
-            } else {
-                // Override allowed with IMPLEMENTATION_DEFINED
-                dst->format = overrideFormat;
+            Camera3Stream::cast(dst)->setUsage(
+                    mapConsumerToFrameworkUsage(src.consumerUsage));
+        } else {
+            // OUTPUT
+            if (src.consumerUsage != 0) {
+                ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
+                        __FUNCTION__, streamId);
+                return INVALID_OPERATION;
             }
-
-            if (dst->stream_type == CAMERA3_STREAM_INPUT) {
-                if (src.producerUsage != 0) {
-                    ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
-                            __FUNCTION__, streamId);
-                    return INVALID_OPERATION;
-                }
-                dst->usage = mapConsumerToFrameworkUsage(src.consumerUsage);
-            } else {
-                // OUTPUT
-                if (src.consumerUsage != 0) {
-                    ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
-                            __FUNCTION__, streamId);
-                    return INVALID_OPERATION;
-                }
-                dst->usage = mapProducerToFrameworkUsage(src.producerUsage);
-            }
-            dst->max_buffers = src.maxBuffers;
+            Camera3Stream::cast(dst)->setUsage(
+                    mapProducerToFrameworkUsage(src.producerUsage));
         }
+        dst->max_buffers = src.maxBuffers;
     }
+
     return res;
 }
 
 void Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
         /*out*/device::V3_2::CaptureRequest* captureRequest,
         /*out*/std::vector<native_handle_t*>* handlesCreated) {
-
+    ATRACE_CALL();
     if (captureRequest == nullptr || handlesCreated == nullptr) {
         ALOGE("%s: captureRequest (%p) and handlesCreated (%p) must not be null",
                 __FUNCTION__, captureRequest, handlesCreated);
@@ -3441,14 +3434,11 @@
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
-    if (mHal3Device != nullptr) {
-        res = mHal3Device->ops->process_capture_request(mHal3Device, request);
-    } else {
-        uint32_t numRequestProcessed = 0;
-        std::vector<camera3_capture_request_t*> requests(1);
-        requests[0] = request;
-        res = processBatchCaptureRequests(requests, &numRequestProcessed);
-    }
+    uint32_t numRequestProcessed = 0;
+    std::vector<camera3_capture_request_t*> requests(1);
+    requests[0] = request;
+    res = processBatchCaptureRequests(requests, &numRequestProcessed);
+
     return res;
 }
 
@@ -3457,31 +3447,24 @@
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
-    if (mHal3Device != nullptr) {
-        res = mHal3Device->ops->flush(mHal3Device);
+    auto err = mHidlSession->flush();
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        res = DEAD_OBJECT;
     } else {
-        auto err = mHidlSession->flush();
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-            res = DEAD_OBJECT;
-        } else {
-            res = CameraProviderManager::mapToStatusT(err);
-        }
+        res = CameraProviderManager::mapToStatusT(err);
     }
+
     return res;
 }
 
-status_t Camera3Device::HalInterface::dump(int fd) {
+status_t Camera3Device::HalInterface::dump(int /*fd*/) {
     ATRACE_NAME("CameraHal::dump");
     if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
 
-    if (mHal3Device != nullptr) {
-        mHal3Device->ops->dump(mHal3Device, fd);
-    } else {
-        // Handled by CameraProviderManager::dump
-    }
-    return res;
+    // Handled by CameraProviderManager::dump
+
+    return OK;
 }
 
 status_t Camera3Device::HalInterface::close() {
@@ -3489,15 +3472,12 @@
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
-    if (mHal3Device != nullptr) {
-        mHal3Device->common.close(&mHal3Device->common);
-    } else {
-        auto err = mHidlSession->close();
-        // Interface will be dead shortly anyway, so don't log errors
-        if (!err.isOk()) {
-            res = DEAD_OBJECT;
-        }
+    auto err = mHidlSession->close();
+    // Interface will be dead shortly anyway, so don't log errors
+    if (!err.isOk()) {
+        res = DEAD_OBJECT;
     }
+
     return res;
 }
 
@@ -3614,11 +3594,13 @@
 
 void Camera3Device::RequestThread::setNotificationListener(
         wp<NotificationListener> listener) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mListener = listener;
 }
 
 void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
     // Prepare video stream for high speed recording.
@@ -3629,6 +3611,7 @@
         List<sp<CaptureRequest> > &requests,
         /*out*/
         int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end();
             ++it) {
@@ -3651,7 +3634,7 @@
 status_t Camera3Device::RequestThread::queueTrigger(
         RequestTrigger trigger[],
         size_t count) {
-
+    ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
     status_t ret;
 
@@ -3708,6 +3691,7 @@
         const RequestList &requests,
         /*out*/
         int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     if (lastFrameNumber != NULL) {
         *lastFrameNumber = mRepeatingLastFrameNumber;
@@ -3734,6 +3718,7 @@
 }
 
 status_t Camera3Device::RequestThread::clearRepeatingRequests(/*out*/int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     return clearRepeatingRequestsLocked(lastFrameNumber);
 
@@ -3750,6 +3735,7 @@
 
 status_t Camera3Device::RequestThread::clear(
         /*out*/int64_t *lastFrameNumber) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     ALOGV("RequestThread::%s:", __FUNCTION__);
 
@@ -3805,6 +3791,7 @@
 }
 
 void Camera3Device::RequestThread::setPaused(bool paused) {
+    ATRACE_CALL();
     Mutex::Autolock l(mPauseLock);
     mDoPause = paused;
     mDoPauseSignal.signal();
@@ -3812,6 +3799,7 @@
 
 status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
         int32_t requestId, nsecs_t timeout) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLatestRequestMutex);
     status_t res;
     while (mLatestRequestId != requestId) {
@@ -3838,6 +3826,7 @@
 }
 
 void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
+    ATRACE_CALL();
     bool surfaceAbandoned = false;
     int64_t lastFrameNumber = 0;
     sp<NotificationListener> listener;
@@ -3866,6 +3855,7 @@
 }
 
 bool Camera3Device::RequestThread::sendRequestsBatch() {
+    ATRACE_CALL();
     status_t res;
     size_t batchSize = mNextRequests.size();
     std::vector<camera3_capture_request_t*> requests(batchSize);
@@ -4261,6 +4251,7 @@
 }
 
 CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+    ATRACE_CALL();
     Mutex::Autolock al(mLatestRequestMutex);
 
     ALOGV("RequestThread::%s", __FUNCTION__);
@@ -4270,6 +4261,7 @@
 
 bool Camera3Device::RequestThread::isStreamPending(
         sp<Camera3StreamInterface>& stream) {
+    ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
 
     for (const auto& nextRequest : mNextRequests) {
@@ -4299,6 +4291,7 @@
 }
 
 nsecs_t Camera3Device::getExpectedInFlightDuration() {
+    ATRACE_CALL();
     Mutex::Autolock al(mInFlightLock);
     return mExpectedInflightDuration > kMinInflightDuration ?
             mExpectedInflightDuration : kMinInflightDuration;
@@ -4370,6 +4363,7 @@
 }
 
 void Camera3Device::RequestThread::waitForNextRequestBatch() {
+    ATRACE_CALL();
     // Optimized a bit for the simple steady-state case (single repeating
     // request), to avoid putting that request in the queue temporarily.
     Mutex::Autolock l(mRequestLock);
@@ -4519,6 +4513,7 @@
 }
 
 bool Camera3Device::RequestThread::waitIfPaused() {
+    ATRACE_CALL();
     status_t res;
     Mutex::Autolock l(mPauseLock);
     while (mDoPause) {
@@ -4543,6 +4538,7 @@
 }
 
 void Camera3Device::RequestThread::unpauseForNewRequests() {
+    ATRACE_CALL();
     // With work to do, mark thread as unpaused.
     // If paused by request (setPaused), don't resume, to avoid
     // extra signaling/waiting overhead to waitUntilPaused
@@ -4574,7 +4570,7 @@
 
 status_t Camera3Device::RequestThread::insertTriggers(
         const sp<CaptureRequest> &request) {
-
+    ATRACE_CALL();
     Mutex::Autolock al(mTriggerMutex);
 
     sp<Camera3Device> parent = mParent.promote();
@@ -4663,6 +4659,7 @@
 
 status_t Camera3Device::RequestThread::removeTriggers(
         const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
     Mutex::Autolock al(mTriggerMutex);
 
     CameraMetadata &metadata = request->mSettings;
@@ -4779,6 +4776,7 @@
 }
 
 status_t Camera3Device::PreparerThread::prepare(int maxCount, sp<Camera3StreamInterface>& stream) {
+    ATRACE_CALL();
     status_t res;
 
     Mutex::Autolock l(mLock);
@@ -4822,6 +4820,7 @@
 }
 
 status_t Camera3Device::PreparerThread::clear() {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
 
     for (const auto& stream : mPendingStreams) {
@@ -4834,6 +4833,7 @@
 }
 
 void Camera3Device::PreparerThread::setNotificationListener(wp<NotificationListener> listener) {
+    ATRACE_CALL();
     Mutex::Autolock l(mLock);
     mListener = listener;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index d700e03..b5f19d7 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -117,12 +117,12 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint32_t consumerUsage = 0) override;
+            bool isShared = false, uint64_t consumerUsage = 0) override;
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint32_t consumerUsage = 0) override;
+            bool isShared = false, uint64_t consumerUsage = 0) override;
 
     status_t createInputStream(
             uint32_t width, uint32_t height, int format,
@@ -271,7 +271,6 @@
         void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
 
       private:
-        camera3_device_t *mHal3Device;
         sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
         std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
 
@@ -590,7 +589,7 @@
     static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
     static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
             android_dataspace dataSpace);
-    static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint32_t usage);
+    static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint64_t usage);
     static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
             camera3_stream_rotation_t rotation);
     // Returns a negative error code if the passed-in operation mode is not valid.
@@ -598,9 +597,9 @@
             /*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
     static camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status);
     static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
-    static uint32_t mapConsumerToFrameworkUsage(
+    static uint64_t mapConsumerToFrameworkUsage(
             hardware::camera::device::V3_2::BufferUsageFlags usage);
-    static uint32_t mapProducerToFrameworkUsage(
+    static uint64_t mapProducerToFrameworkUsage(
             hardware::camera::device::V3_2::BufferUsageFlags usage);
 
     struct RequestTrigger {
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 9c951b7..6e2978f 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -95,7 +95,7 @@
     return OK;
 }
 
-status_t Camera3DummyStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3DummyStream::getEndpointUsage(uint64_t *usage) const {
     *usage = DUMMY_USAGE;
     return OK;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 35a6a18..492fb49 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -94,7 +94,7 @@
     static const int DUMMY_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     static const android_dataspace DUMMY_DATASPACE = HAL_DATASPACE_UNKNOWN;
     static const camera3_stream_rotation_t DUMMY_ROTATION = CAMERA3_STREAM_ROTATION_0;
-    static const uint32_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
+    static const uint64_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
 
     /**
      * Internal Camera3Stream interface
@@ -107,7 +107,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint32_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage) const;
 
 }; // class Camera3DummyStream
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 7ad2300..a52422d 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -69,7 +69,7 @@
     (void) args;
     String8 lines;
 
-    uint32_t consumerUsage = 0;
+    uint64_t consumerUsage = 0;
     status_t res = getEndpointUsage(&consumerUsage);
     if (res != OK) consumerUsage = 0;
 
@@ -78,8 +78,8 @@
             camera3_stream::width, camera3_stream::height,
             camera3_stream::format, camera3_stream::data_space);
     lines.appendFormat("      Max size: %zu\n", mMaxSize);
-    lines.appendFormat("      Combined usage: %d, max HAL buffers: %d\n",
-            camera3_stream::usage | consumerUsage, camera3_stream::max_buffers);
+    lines.appendFormat("      Combined usage: %" PRIu64 ", max HAL buffers: %d\n",
+            mUsage | consumerUsage, camera3_stream::max_buffers);
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
     lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 35dda39..2376058 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -85,7 +85,7 @@
 
     virtual size_t   getHandoutInputBufferCountLocked();
 
-    virtual status_t getEndpointUsage(uint32_t *usage) const = 0;
+    virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
 
     status_t getBufferPreconditionCheckLocked() const;
     status_t returnBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index ff2dcef..2cb1ea7 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -258,7 +258,7 @@
             camera3_stream::max_buffers : minBufs;
         // TODO: somehow set the total buffer count when producer connects?
 
-        mConsumer = new BufferItemConsumer(consumer, camera3_stream::usage,
+        mConsumer = new BufferItemConsumer(consumer, mUsage,
                                            mTotalBufferCount);
         mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
 
@@ -284,7 +284,7 @@
     return OK;
 }
 
-status_t Camera3InputStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3InputStream::getEndpointUsage(uint64_t *usage) const {
     // Per HAL3 spec, input streams have 0 for their initial usage field.
     *usage = 0;
     return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 8f5b431..81226f8 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -76,7 +76,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint32_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage) const;
 
     /**
      * BufferItemConsumer::BufferFreedListener interface
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index b02cd6a..dcaefe3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -90,7 +90,7 @@
 
 Camera3OutputStream::Camera3OutputStream(int id,
         uint32_t width, uint32_t height, int format,
-        uint32_t consumerUsage, android_dataspace dataSpace,
+        uint64_t consumerUsage, android_dataspace dataSpace,
         camera3_stream_rotation_t rotation, nsecs_t timestampOffset, int setId) :
         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation, setId),
@@ -111,7 +111,8 @@
     // Sanity check for the consumer usage flag.
     if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
             (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
-        ALOGE("%s: Deferred consumer usage flag is illegal (0x%x)!", __FUNCTION__, consumerUsage);
+        ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
+              __FUNCTION__, consumerUsage);
         mState = STATE_ERROR;
     }
 
@@ -127,7 +128,7 @@
                                          int format,
                                          android_dataspace dataSpace,
                                          camera3_stream_rotation_t rotation,
-                                         uint32_t consumerUsage, nsecs_t timestampOffset,
+                                         uint64_t consumerUsage, nsecs_t timestampOffset,
                                          int setId) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
@@ -365,10 +366,10 @@
 
     mConsumerName = mConsumer->getConsumerName();
 
-    res = native_window_set_usage(mConsumer.get(), camera3_stream::usage);
+    res = native_window_set_usage(mConsumer.get(), mUsage);
     if (res != OK) {
-        ALOGE("%s: Unable to configure usage %08x for stream %d",
-                __FUNCTION__, camera3_stream::usage, mId);
+        ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
+                __FUNCTION__, mUsage, mId);
         return res;
     }
 
@@ -461,11 +462,11 @@
      * HAL3.2 devices may not support the dynamic buffer registeration.
      */
     if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID) {
-        uint32_t consumerUsage = 0;
+        uint64_t consumerUsage = 0;
         getEndpointUsage(&consumerUsage);
         StreamInfo streamInfo(
                 getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
-                camera3_stream::usage | consumerUsage, mTotalBufferCount,
+                mUsage | consumerUsage, mTotalBufferCount,
                 /*isConfigured*/true);
         wp<Camera3OutputStream> weakThis(this);
         res = mBufferManager->registerStream(weakThis,
@@ -628,7 +629,7 @@
     return OK;
 }
 
-status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
 
     status_t res;
 
@@ -643,14 +644,12 @@
     return res;
 }
 
-status_t Camera3OutputStream::getEndpointUsageForSurface(uint32_t *usage,
+status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
         const sp<Surface>& surface) const {
     status_t res;
-    int32_t u = 0;
+    uint64_t u = 0;
 
-    res = static_cast<ANativeWindow*>(surface.get())->query(surface.get(),
-            NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
-
+    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
     // If an opaque output stream's endpoint is ImageReader, add
     // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
     // for the ZSL use case.
@@ -670,7 +669,7 @@
 }
 
 bool Camera3OutputStream::isVideoStream() const {
-    uint32_t usage = 0;
+    uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
@@ -813,7 +812,7 @@
 }
 
 bool Camera3OutputStream::isConsumedByHWComposer() const {
-    uint32_t usage = 0;
+    uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
@@ -824,7 +823,7 @@
 }
 
 bool Camera3OutputStream::isConsumedByHWTexture() const {
-    uint32_t usage = 0;
+    uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 97aa7d4..7023d5d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -44,7 +44,7 @@
     uint32_t height;
     uint32_t format;
     android_dataspace dataSpace;
-    uint32_t combinedUsage;
+    uint64_t combinedUsage;
     size_t totalBufferCount;
     bool isConfigured;
     explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID,
@@ -53,7 +53,7 @@
             uint32_t h = 0,
             uint32_t fmt = 0,
             android_dataspace ds = HAL_DATASPACE_UNKNOWN,
-            uint32_t usage = 0,
+            uint64_t usage = 0,
             size_t bufferCount = 0,
             bool configured = false) :
                 streamId(id),
@@ -101,7 +101,7 @@
      * stream set id needs to be set to support buffer sharing between multiple streams.
      */
     Camera3OutputStream(int id, uint32_t width, uint32_t height, int format,
-            uint32_t consumerUsage, android_dataspace dataSpace,
+            uint64_t consumerUsage, android_dataspace dataSpace,
             camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
             int setId = CAMERA3_STREAM_SET_ID_INVALID);
 
@@ -176,7 +176,7 @@
     Camera3OutputStream(int id, camera3_stream_type_t type,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation,
-            uint32_t consumerUsage = 0, nsecs_t timestampOffset = 0,
+            uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
             int setId = CAMERA3_STREAM_SET_ID_INVALID);
 
     /**
@@ -191,14 +191,14 @@
 
     virtual status_t disconnectLocked();
 
-    status_t getEndpointUsageForSurface(uint32_t *usage,
+    status_t getEndpointUsageForSurface(uint64_t *usage,
             const sp<Surface>& surface) const;
     status_t configureConsumerQueueLocked();
 
     // Consumer as the output of camera HAL
     sp<Surface> mConsumer;
 
-    uint32_t getPresetConsumerUsage() const { return mConsumerUsage; }
+    uint64_t getPresetConsumerUsage() const { return mConsumerUsage; }
 
     static const nsecs_t       kDequeueBufferTimeout   = 1000000000; // 1 sec
 
@@ -245,7 +245,7 @@
      * Consumer end point usage flag set by the constructor for the deferred
      * consumer case.
      */
-    uint32_t    mConsumerUsage;
+    uint64_t    mConsumerUsage;
 
     /**
      * Internal Camera3Stream interface
@@ -262,7 +262,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint32_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage) const;
 
     /**
      * Private methods
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 2ae5660..5051711 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -23,7 +23,7 @@
 Camera3SharedOutputStream::Camera3SharedOutputStream(int id,
         const std::vector<sp<Surface>>& surfaces,
         uint32_t width, uint32_t height, int format,
-        uint32_t consumerUsage, android_dataspace dataSpace,
+        uint64_t consumerUsage, android_dataspace dataSpace,
         camera3_stream_rotation_t rotation,
         nsecs_t timestampOffset, int setId) :
         Camera3OutputStream(id, CAMERA3_STREAM_OUTPUT, width, height,
@@ -41,7 +41,7 @@
 
     mStreamSplitter = new Camera3StreamSplitter();
 
-    uint32_t usage;
+    uint64_t usage;
     getEndpointUsage(&usage);
 
     res = mStreamSplitter->connect(mSurfaces, usage, camera3_stream::max_buffers, &mConsumer);
@@ -191,10 +191,10 @@
     return res;
 }
 
-status_t Camera3SharedOutputStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3SharedOutputStream::getEndpointUsage(uint64_t *usage) const {
 
     status_t res = OK;
-    uint32_t u = 0;
+    uint64_t u = 0;
 
     if (mConsumer == nullptr) {
         // Called before shared buffer queue is constructed.
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 7be0940..22bb2fc 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -34,7 +34,7 @@
      */
     Camera3SharedOutputStream(int id, const std::vector<sp<Surface>>& surfaces,
             uint32_t width, uint32_t height, int format,
-            uint32_t consumerUsage, android_dataspace dataSpace,
+            uint64_t consumerUsage, android_dataspace dataSpace,
             camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
             int setId = CAMERA3_STREAM_SET_ID_INVALID);
 
@@ -74,7 +74,7 @@
 
     virtual status_t disconnectLocked();
 
-    virtual status_t getEndpointUsage(uint32_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage) const;
 
 }; // class Camera3SharedOutputStream
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 9e6ac79..25e44a5 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -56,6 +56,7 @@
     mState(STATE_CONSTRUCTED),
     mStatusId(StatusTracker::NO_STATUS_ID),
     mStreamUnpreparable(true),
+    mUsage(0),
     mOldUsage(0),
     mOldMaxBuffers(0),
     mPrepared(false),
@@ -69,7 +70,6 @@
     camera3_stream::format = format;
     camera3_stream::data_space = dataSpace;
     camera3_stream::rotation = rotation;
-    camera3_stream::usage = 0;
     camera3_stream::max_buffers = 0;
     camera3_stream::priv = NULL;
 
@@ -104,6 +104,14 @@
     return camera3_stream::data_space;
 }
 
+uint64_t Camera3Stream::getUsage() const {
+    return mUsage;
+}
+
+void Camera3Stream::setUsage(uint64_t usage) {
+    mUsage = usage;
+}
+
 camera3_stream* Camera3Stream::startConfiguration() {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
@@ -133,10 +141,10 @@
             return NULL;
     }
 
-    mOldUsage = camera3_stream::usage;
+    mOldUsage = mUsage;
     mOldMaxBuffers = camera3_stream::max_buffers;
 
-    res = getEndpointUsage(&(camera3_stream::usage));
+    res = getEndpointUsage(&mUsage);
     if (res != OK) {
         ALOGE("%s: Cannot query consumer endpoint usage!",
                 __FUNCTION__);
@@ -197,7 +205,7 @@
     // Check if the stream configuration is unchanged, and skip reallocation if
     // so. As documented in hardware/camera3.h:configure_streams().
     if (mState == STATE_IN_RECONFIG &&
-            mOldUsage == camera3_stream::usage &&
+            mOldUsage == mUsage &&
             mOldMaxBuffers == camera3_stream::max_buffers) {
         mState = STATE_CONFIGURED;
         return OK;
@@ -243,7 +251,7 @@
             return INVALID_OPERATION;
     }
 
-    camera3_stream::usage = mOldUsage;
+    mUsage = mOldUsage;
     camera3_stream::max_buffers = mOldMaxBuffers;
 
     mState = (mState == STATE_IN_RECONFIG) ? STATE_CONFIGURED : STATE_CONSTRUCTED;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 44fe6b6..9090f83 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -144,6 +144,8 @@
     uint32_t          getHeight() const;
     int               getFormat() const;
     android_dataspace getDataSpace() const;
+    uint64_t          getUsage() const;
+    void              setUsage(uint64_t usage);
 
     camera3_stream*   asHalStream() override {
         return this;
@@ -459,7 +461,7 @@
 
     // Get the usage flags for the other endpoint, or return
     // INVALID_OPERATION if they cannot be obtained.
-    virtual status_t getEndpointUsage(uint32_t *usage) const = 0;
+    virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
 
     // Return whether the buffer is in the list of outstanding buffers.
     bool isOutstandingBuffer(const camera3_stream_buffer& buffer) const;
@@ -473,8 +475,10 @@
     // prepareNextBuffer called on it.
     bool mStreamUnpreparable;
 
+    uint64_t mUsage;
+
   private:
-    uint32_t mOldUsage;
+    uint64_t mOldUsage;
     uint32_t mOldMaxBuffers;
     Condition mOutputBufferReturnedSignal;
     Condition mInputBufferReturnedSignal;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 869e93a..a0a50c2 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -39,7 +39,7 @@
 namespace android {
 
 status_t Camera3StreamSplitter::connect(const std::vector<sp<Surface> >& surfaces,
-        uint32_t consumerUsage, size_t halMaxBuffers, sp<Surface>* consumer) {
+        uint64_t consumerUsage, size_t halMaxBuffers, sp<Surface>* consumer) {
     ATRACE_CALL();
     if (consumer == nullptr) {
         SP_LOGE("%s: consumer pointer is NULL", __FUNCTION__);
@@ -195,10 +195,8 @@
 
     // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
     // We need skip these cases as timeout will disable the non-blocking (async) mode.
-    int32_t usage = 0;
-    static_cast<ANativeWindow*>(outputQueue.get())->query(
-            outputQueue.get(),
-            NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+    uint64_t usage = 0;
+    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(outputQueue.get()), &usage);
     if (!(usage & (GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_TEXTURE))) {
         outputQueue->setDequeueTimeout(kDequeueBufferTimeout);
     }
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index cc623e0..3b8839e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -52,7 +52,7 @@
     // Connect to the stream splitter by creating buffer queue and connecting it
     // with output surfaces.
     status_t connect(const std::vector<sp<Surface> >& surfaces,
-            uint32_t consumerUsage, size_t halMaxBuffers,
+            uint64_t consumerUsage, size_t halMaxBuffers,
             sp<Surface>* consumer);
 
     // addOutput adds an output BufferQueue to the splitter. The splitter
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 3d54460..ee018c3 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -38,7 +38,7 @@
 namespace android {
 
 RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
-        uint32_t consumerUsage,
+        uint64_t consumerUsage,
         int bufferCount) :
     ConsumerBase(consumer),
     mBufferCount(bufferCount),
@@ -368,7 +368,7 @@
     return mConsumer->setDefaultBufferFormat(defaultFormat);
 }
 
-status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) {
+status_t RingBufferConsumer::setConsumerUsage(uint64_t usage) {
     Mutex::Autolock _l(mMutex);
     return mConsumer->setConsumerUsageBits(usage);
 }
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 2bafe4a..b737469 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -60,7 +60,7 @@
     // the consumer usage flags passed to the graphics allocator. The
     // bufferCount parameter specifies how many buffers can be pinned for user
     // access at the same time.
-    RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
+    RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint64_t consumerUsage,
             int bufferCount);
 
     virtual ~RingBufferConsumer();
@@ -80,7 +80,7 @@
 
     // setConsumerUsage allows the BufferQueue consumer usage to be
     // set at a later time after construction.
-    status_t setConsumerUsage(uint32_t usage);
+    status_t setConsumerUsage(uint64_t usage);
 
     // Buffer info, minus the graphics buffer/slot itself.
     struct BufferInfo {
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 970d734..68dcaff 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -31,6 +31,7 @@
 #include "SharedMemoryProxy.h"
 #include "utility/AAudioUtilities.h"
 
+using android::base::unique_fd;
 using namespace android;
 using namespace aaudio;
 
@@ -69,11 +70,6 @@
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 
-    if (mAudioDataFileDescriptor != -1) {
-        ::close(mAudioDataFileDescriptor);
-        mAudioDataFileDescriptor = -1;
-    }
-
     return AAudioServiceStreamBase::close();
 }
 
@@ -193,7 +189,13 @@
                            ? audio_channel_count_from_out_mask(config.channel_mask)
                            : audio_channel_count_from_in_mask(config.channel_mask);
 
-    mAudioDataFileDescriptor = mMmapBufferinfo.shared_memory_fd;
+    // AAudio creates a copy of this FD and retains ownership of the copy.
+    // Assume that AudioFlinger will close the original shared_memory_fd.
+    mAudioDataFileDescriptor.reset(dup(mMmapBufferinfo.shared_memory_fd));
+    if (mAudioDataFileDescriptor.get() == -1) {
+        ALOGE("AAudioServiceStreamMMAP::open() - could not dup shared_memory_fd");
+        return AAUDIO_ERROR_INTERNAL; // TODO review
+    }
     mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
     mAudioFormat = AAudioConvert_androidToAAudioDataFormat(config.format);
     mSampleRate = config.sample_rate;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index e6f8fad..e631fd3 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -19,6 +19,7 @@
 
 #include <atomic>
 
+#include <android-base/unique_fd.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <media/MmapStreamCallback.h>
 #include <media/MmapStreamInterface.h>
@@ -33,6 +34,7 @@
 #include "TimestampScheduler.h"
 #include "utility/MonotonicCounter.h"
 
+
 namespace aaudio {
 
     /**
@@ -133,9 +135,9 @@
     MonotonicCounter                    mFramesWritten;
     MonotonicCounter                    mFramesRead;
     int32_t                             mPreviousFrameCounter = 0;   // from HAL
-    int                                 mAudioDataFileDescriptor = -1;
     int64_t                             mHardwareTimeOffsetNanos = 0; // TODO get from HAL
 
+
     // Interface to the AudioFlinger MMAP support.
     android::sp<android::MmapStreamInterface> mMmapStream;
     struct audio_mmap_buffer_info             mMmapBufferinfo;
@@ -143,6 +145,7 @@
     audio_port_handle_t                       mDeviceId = AUDIO_PORT_HANDLE_NONE;
     android::AudioClient                      mServiceClient;
     bool                                      mInService = false;
+    android::base::unique_fd                  mAudioDataFileDescriptor;
 };
 
 } // namespace aaudio
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index d648c6d..57990ce 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -131,7 +131,7 @@
     aaudio_direction_t direction = request.getDirection();
 
     AAudioEndpointManager &mEndpointManager = AAudioEndpointManager::getInstance();
-    mServiceEndpoint = mEndpointManager.openEndpoint(mAudioService, configurationOutput, direction);
+    mServiceEndpoint = mEndpointManager.openEndpoint(mAudioService, configurationInput, direction);
     if (mServiceEndpoint == nullptr) {
         ALOGE("AAudioServiceStreamShared::open() mServiceEndPoint = %p", mServiceEndpoint);
         return AAUDIO_ERROR_UNAVAILABLE;
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index 6b3fb4c..83b25b3 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -35,11 +35,6 @@
         munmap(mSharedMemory, mSharedMemorySizeInBytes);
         mSharedMemory = nullptr;
     }
-    if (mFileDescriptor != -1) {
-        ALOGV("SharedRingBuffer: LEAK? close(mFileDescriptor = %d)\n", mFileDescriptor);
-        close(mFileDescriptor);
-        mFileDescriptor = -1;
-    }
 }
 
 aaudio_result_t SharedRingBuffer::allocate(fifo_frames_t   bytesPerFrame,
@@ -49,17 +44,17 @@
     // Create shared memory large enough to hold the data and the read and write counters.
     mDataMemorySizeInBytes = bytesPerFrame * capacityInFrames;
     mSharedMemorySizeInBytes = mDataMemorySizeInBytes + (2 * (sizeof(fifo_counter_t)));
-    mFileDescriptor = ashmem_create_region("AAudioSharedRingBuffer", mSharedMemorySizeInBytes);
-    ALOGV("SharedRingBuffer::allocate() LEAK? mFileDescriptor = %d\n", mFileDescriptor);
-    if (mFileDescriptor < 0) {
+    mFileDescriptor.reset(ashmem_create_region("AAudioSharedRingBuffer", mSharedMemorySizeInBytes));
+    if (mFileDescriptor.get() == -1) {
         ALOGE("SharedRingBuffer::allocate() ashmem_create_region() failed %d", errno);
         return AAUDIO_ERROR_INTERNAL;
     }
+    ALOGV("SharedRingBuffer::allocate() mFileDescriptor = %d\n", mFileDescriptor.get());
 
-    int err = ashmem_set_prot_region(mFileDescriptor, PROT_READ|PROT_WRITE); // TODO error handling?
+    int err = ashmem_set_prot_region(mFileDescriptor.get(), PROT_READ|PROT_WRITE); // TODO error handling?
     if (err < 0) {
         ALOGE("SharedRingBuffer::allocate() ashmem_set_prot_region() failed %d", errno);
-        close(mFileDescriptor);
+        mFileDescriptor.reset();
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
 
@@ -67,10 +62,10 @@
     mSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
                          PROT_READ|PROT_WRITE,
                          MAP_SHARED,
-                         mFileDescriptor, 0);
+                         mFileDescriptor.get(), 0);
     if (mSharedMemory == MAP_FAILED) {
         ALOGE("SharedRingBuffer::allocate() mmap() failed %d", errno);
-        close(mFileDescriptor);
+        mFileDescriptor.reset();
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
 
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index a2c3766..79169bc 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -17,6 +17,7 @@
 #ifndef AAUDIO_SHARED_RINGBUFFER_H
 #define AAUDIO_SHARED_RINGBUFFER_H
 
+#include <android-base/unique_fd.h>
 #include <stdint.h>
 #include <cutils/ashmem.h>
 #include <sys/mman.h>
@@ -51,12 +52,12 @@
     }
 
 private:
-    int                    mFileDescriptor = -1;
-    android::FifoBuffer   *mFifoBuffer = nullptr;
-    uint8_t               *mSharedMemory = nullptr;
-    int32_t                mSharedMemorySizeInBytes = 0;
-    int32_t                mDataMemorySizeInBytes = 0;
-    android::fifo_frames_t mCapacityInFrames = 0;
+    android::base::unique_fd  mFileDescriptor;
+    android::FifoBuffer      *mFifoBuffer = nullptr;
+    uint8_t                  *mSharedMemory = nullptr;
+    int32_t                   mSharedMemorySizeInBytes = 0;
+    int32_t                   mDataMemorySizeInBytes = 0;
+    android::fifo_frames_t    mCapacityInFrames = 0;
 };
 
 } /* namespace aaudio */
diff --git a/services/soundtrigger/SoundTriggerHwService.cpp b/services/soundtrigger/SoundTriggerHwService.cpp
index 5b8d990..8891aba 100644
--- a/services/soundtrigger/SoundTriggerHwService.cpp
+++ b/services/soundtrigger/SoundTriggerHwService.cpp
@@ -89,7 +89,8 @@
                              uint32_t *numModules)
 {
     ALOGV("listModules");
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return PERMISSION_DENIED;
     }
 
@@ -110,7 +111,8 @@
                         sp<ISoundTrigger>& moduleInterface)
 {
     ALOGV("attach module %d", handle);
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return PERMISSION_DENIED;
     }
 
@@ -942,7 +944,8 @@
 
 void SoundTriggerHwService::ModuleClient::detach() {
     ALOGV("detach()");
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return;
     }
 
@@ -965,7 +968,8 @@
                                 sound_model_handle_t *handle)
 {
     ALOGV("loadSoundModel() handle");
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return PERMISSION_DENIED;
     }
 
@@ -979,7 +983,8 @@
 status_t SoundTriggerHwService::ModuleClient::unloadSoundModel(sound_model_handle_t handle)
 {
     ALOGV("unloadSoundModel() model handle %d", handle);
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return PERMISSION_DENIED;
     }
 
@@ -994,7 +999,8 @@
                                  const sp<IMemory>& dataMemory)
 {
     ALOGV("startRecognition() model handle %d", handle);
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return PERMISSION_DENIED;
     }
 
@@ -1008,7 +1014,8 @@
 status_t SoundTriggerHwService::ModuleClient::stopRecognition(sound_model_handle_t handle)
 {
     ALOGV("stopRecognition() model handle %d", handle);
-    if (!captureHotwordAllowed()) {
+    if (!captureHotwordAllowed(IPCThreadState::self()->getCallingPid(),
+                               IPCThreadState::self()->getCallingUid())) {
         return PERMISSION_DENIED;
     }