Merge "omx: check buffer port before using" into nyc-dev
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index ab651a1..8d050c4 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -79,9 +79,15 @@
 
         Mutex::Autolock _l(mLock);
 
-        // Here we have to use reinterpret_cast because the NDK data type is
-        // exact copy of internal data type but they do not inherit from each other
-        status_t ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+        status_t ret = OK;
+        if (count == 0 && data == nullptr) {
+            ret = mData.erase(tag);
+        } else {
+            // Here we have to use reinterpret_cast because the NDK data type is
+            // exact copy of internal data type but they do not inherit from each other
+            ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+        }
+
         if (ret == OK) {
             mTags.clear();
             return ACAMERA_OK;
diff --git a/include/camera/ndk/NdkCameraCaptureSession.h b/include/camera/ndk/NdkCameraCaptureSession.h
index 68eff7a..7b314e9 100644
--- a/include/camera/ndk/NdkCameraCaptureSession.h
+++ b/include/camera/ndk/NdkCameraCaptureSession.h
@@ -177,7 +177,8 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param result The capture result metadata reported by camera device
+ * @param result The capture result metadata reported by camera device. The memory is managed by
+ *                camera framework. Do not access this pointer after this callback returns.
  */
 typedef void (*ACameraCaptureSession_captureCallback_result)(
         void* context, ACameraCaptureSession* session,
@@ -193,7 +194,9 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param failure The {@link ACameraCaptureFailure} desribes the capture failure.
+ * @param failure The {@link ACameraCaptureFailure} desribes the capture failure. The memory is
+ *                managed by camera framework. Do not access this pointer after this callback
+ *                returns.
  */
 typedef void (*ACameraCaptureSession_captureCallback_failed)(
         void* context, ACameraCaptureSession* session,
diff --git a/include/camera/ndk/NdkCameraMetadata.h b/include/camera/ndk/NdkCameraMetadata.h
index 8a8865d..d929854 100644
--- a/include/camera/ndk/NdkCameraMetadata.h
+++ b/include/camera/ndk/NdkCameraMetadata.h
@@ -43,35 +43,78 @@
 extern "C" {
 #endif
 
+/**
+ * ACameraMetadata is opaque type that provides access to read-only camera metadata like camera
+ * characteristics (via {@link ACameraManager_getCameraCharacteristics}) or capture results (via
+ * {@link ACameraCaptureSession_captureCallback_result}).
+ */
 typedef struct ACameraMetadata ACameraMetadata;
 
-// Keep in sync with system/media/include/system/camera_metadata.h
+/**
+ * Possible data types of a metadata entry.
+ *
+ * Keep in sync with system/media/include/system/camera_metadata.h
+ */
 enum {
-    // Unsigned 8-bit integer (uint8_t)
+    /// Unsigned 8-bit integer (uint8_t)
     ACAMERA_TYPE_BYTE = 0,
-    // Signed 32-bit integer (int32_t)
+    /// Signed 32-bit integer (int32_t)
     ACAMERA_TYPE_INT32 = 1,
-    // 32-bit float (float)
+    /// 32-bit float (float)
     ACAMERA_TYPE_FLOAT = 2,
-    // Signed 64-bit integer (int64_t)
+    /// Signed 64-bit integer (int64_t)
     ACAMERA_TYPE_INT64 = 3,
-    // 64-bit float (double)
+    /// 64-bit float (double)
     ACAMERA_TYPE_DOUBLE = 4,
-    // A 64-bit fraction (ACameraMetadata_rational)
+    /// A 64-bit fraction (ACameraMetadata_rational)
     ACAMERA_TYPE_RATIONAL = 5,
-    // Number of type fields
+    /// Number of type fields
     ACAMERA_NUM_TYPES
 };
 
+/**
+ * Definition of rational data type in {@link ACameraMetadata}.
+ */
 typedef struct ACameraMetadata_rational {
     int32_t numerator;
     int32_t denominator;
 } ACameraMetadata_rational;
 
+/**
+ * A single camera metadata entry.
+ *
+ * <p>Each entry is an array of values, though many metadata fields may only have 1 entry in the
+ * array.</p>
+ */
 typedef struct ACameraMetadata_entry {
+    /**
+     * The tag identifying the entry.
+     *
+     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * entry should be interpreted and which parts of the API provide it.
+     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     */
     uint32_t tag;
+
+    /**
+     * The data type of this metadata entry.
+     *
+     * <p>Must be one of ACAMERA_TYPE_* enum values defined above. A particular tag always has the
+     * same type.</p>
+     */
     uint8_t  type;
+
+    /**
+     * Count of elements (NOT count of bytes) in this metadata entry.
+     */
     uint32_t count;
+
+    /**
+     * Pointer to the data held in this metadata entry.
+     *
+     * <p>The type field above defines which union member pointer is valid. The count field above
+     * defines the length of the data in number of elements.</p>
+     */
     union {
         uint8_t *u8;
         int32_t *i32;
@@ -82,10 +125,41 @@
     } data;
 } ACameraMetadata_entry;
 
+/**
+ * A single read-only camera metadata entry.
+ *
+ * <p>Each entry is an array of values, though many metadata fields may only have 1 entry in the
+ * array.</p>
+ */
 typedef struct ACameraMetadata_const_entry {
+    /**
+     * The tag identifying the entry.
+     *
+     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * entry should be interpreted and which parts of the API provide it.
+     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     */
     uint32_t tag;
+
+    /**
+     * The data type of this metadata entry.
+     *
+     * <p>Must be one of ACAMERA_TYPE_* enum values defined above. A particular tag always has the
+     * same type.</p>
+     */
     uint8_t  type;
+
+    /**
+     * Count of elements (NOT count of bytes) in this metadata entry.
+     */
     uint32_t count;
+
+    /**
+     * Pointer to the data held in this metadata entry.
+     *
+     * <p>The type field above defines which union member pointer is valid. The count field above
+     * defines the length of the data in number of elements.</p>
+     */
     union {
         const uint8_t *u8;
         const int32_t *i32;
@@ -96,32 +170,61 @@
     } data;
 } ACameraMetadata_const_entry;
 
-/*
- * Get a metadata entry
+/**
+ * Get a metadata entry from an input {@link ACameraMetadata}.
+ *
+ * <p>The memory of the data field in the returned entry is managed by camera framework. Do not
+ * attempt to free it.</p>
+ *
+ * @param metadata the {@link ACameraMetadata} of interest.
+ * @param tag the tag value of the camera metadata entry to be get.
+ * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
+ *        call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if metadata or entry is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_METADATA_NOT_FOUND} if input metadata does not contain an entry
+ *             of input tag value.</li></ul>
  */
 camera_status_t ACameraMetadata_getConstEntry(
-        const ACameraMetadata*, uint32_t tag, ACameraMetadata_const_entry* entry);
-
-/*
- * List all the entry tags in this metadata.
- * The memory of tags is managed by ACameraMetadata itself and must NOT be free/delete
- * by application. Do NOT access tags after calling ACameraMetadata_free
- */
-camera_status_t ACameraMetadata_getAllTags(
-        const ACameraMetadata*, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
+        const ACameraMetadata* metadata, uint32_t tag, /*out*/ACameraMetadata_const_entry* entry);
 
 /**
- * Copy a metadata. Duplicates a metadata structure.
- * The destination ACameraMetadata must be freed by the application with ACameraMetadata_free
- * after application is done using it.
- * Returns NULL when src cannot be copied
+ * List all the entry tags in input {@link ACameraMetadata}.
+ *
+ * @param metadata the {@link ACameraMetadata} of interest.
+ * @param numEntries number of metadata entries in input {@link ACameraMetadata}
+ * @param tags the tag values of the metadata entries. Length of tags is returned in numEntries
+ *             argument. The memory is managed by ACameraMetadata itself and must NOT be free/delete
+ *             by application. Do NOT access tags after calling ACameraMetadata_free.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if metadata, numEntries or tags is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ */
+camera_status_t ACameraMetadata_getAllTags(
+        const ACameraMetadata* metadata, /*out*/int32_t* numEntries, /*out*/const uint32_t** tags);
+
+/**
+ * Create a copy of input {@link ACameraMetadata}.
+ *
+ * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
+ * after application is done using it.</p>
+ *
+ * @param src the input {@link ACameraMetadata} to be copied.
+ *
+ * @return a valid ACameraMetadata pointer or NULL if the input metadata cannot be copied.
  */
 ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src);
 
 /**
- * Frees a metadata structure.
+ * Free a {@link ACameraMetadata} structure.
+ *
+ * @param metadata the {@link ACameraMetadata} to be freed.
  */
-void ACameraMetadata_free(ACameraMetadata*);
+void ACameraMetadata_free(ACameraMetadata* metadata);
 
 #ifdef __cplusplus
 } // extern "C"
diff --git a/include/camera/ndk/NdkCameraMetadataTags.h b/include/camera/ndk/NdkCameraMetadataTags.h
index 6fa0517..e7f6989 100644
--- a/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/include/camera/ndk/NdkCameraMetadataTags.h
@@ -128,7 +128,7 @@
      * FAST or HIGH_QUALITY will yield a picture with the same white point
      * as what was produced by the camera device in the earlier frame.</p>
      * <p>The expected processing pipeline is as follows:</p>
-     * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+     * <p><img alt="White balance processing pipeline" src="../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
      * <p>The white balance is encoded by two values, a 4-channel white-balance
      * gain vector (applied in the Bayer domain), and a 3x3 color transform
      * matrix (applied after demosaic).</p>
@@ -445,6 +445,10 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAe.</p>
+     * <p>The data representation is int[5 * area_count].
+     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -595,6 +599,10 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of focus areas supported by the device is determined by the value
      * of android.control.maxRegionsAf.</p>
+     * <p>The data representation is int[5 * area_count].
+     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -741,6 +749,10 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAwb.</p>
+     * <p>The data representation is int[5 * area_count].
+     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -828,7 +840,8 @@
      * ACAMERA_CONTROL_* are mostly disabled, and the camera device implements
      * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
      * as it wishes. The camera device scene mode 3A settings are provided by
-     * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+     * capture results {@link ACameraMetadata} from
+     * {@link ACameraCaptureSession_captureCallback_result}.</p>
      * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
      * is that this frame will not be used by camera device background 3A statistics
      * update, as if this frame is never captured. This mode can be used in the scenario
@@ -970,21 +983,23 @@
      * <ul>
      * <li>
      * <p>For constant-framerate recording, for each normal
-     * {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
-     * {@link android.media.CamcorderProfile CamcorderProfile} that has
-     * {@link android.media.CamcorderProfile#quality quality} in
-     * the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
-     * {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
-     * supported by the device and has
-     * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code>, this list will
-     * always include (<code>x</code>,<code>x</code>).</p>
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>, that is, a
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#quality">quality</a>
+     * in the range [
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW">QUALITY_LOW</a>,
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P">QUALITY_2160P</a>],
+     * if the profile is supported by the device and has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a>
+     * <code>x</code>, this list will always include (<code>x</code>,<code>x</code>).</p>
      * </li>
      * <li>
      * <p>Also, a camera device must either not support any
-     * {@link android.media.CamcorderProfile CamcorderProfile},
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,
      * or support at least one
-     * normal {@link android.media.CamcorderProfile CamcorderProfile} that has
-     * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code> &gt;= 24.</p>
+     * normal <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>
+     * that has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code> &gt;= 24.</p>
      * </li>
      * </ul>
      * <p>For devices at the LIMITED level or above:</p>
@@ -1190,205 +1205,45 @@
      * AE state becomes CONVERGED, then the image data associated with this result should
      * be good to use.</p>
      * <p>Below are state transition tables for different AE modes.</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device auto exposure algorithm is disabled</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State       | Transition Cause | New State | Notes
+     * :------------:|:----------------:|:---------:|:-----------------------:
+     * INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled</p>
      * <p>When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON_*:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates AE scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Camera device finishes AE scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Good values, not changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Camera device finishes AE scan</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Camera device initiates AE scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Camera device initiates AE scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values not good after unlock</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values good after unlock</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Exposure good, but too dark</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PRECAPTURE</td>
-     * <td align="center">Sequence done. ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Ready for high-quality capture</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PRECAPTURE</td>
-     * <td align="center">Sequence done. ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Ready for high-quality capture</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Precapture trigger is ignored when AE is already locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Precapture trigger is ignored when AE is already locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START</td>
-     * <td align="center">PRECAPTURE</td>
-     * <td align="center">Start AE precapture metering sequence</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Currently active precapture metering sequence is canceled</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                             | New State      | Notes
+     * :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
+     * INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
+     * INACTIVE       | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
+     * SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
+     * SEARCHING      | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
+     * CONVERGED      | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
+     * FLASH_REQUIRED | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * LOCKED         | ACAMERA_CONTROL_AE_LOCK is OFF                | SEARCHING      | Values not good after unlock
+     * LOCKED         | ACAMERA_CONTROL_AE_LOCK is OFF                | CONVERGED      | Values good after unlock
+     * LOCKED         | ACAMERA_CONTROL_AE_LOCK is OFF                | FLASH_REQUIRED | Exposure good, but too dark
+     * PRECAPTURE     | Sequence done. ACAMERA_CONTROL_AE_LOCK is OFF | CONVERGED      | Ready for high-quality capture
+     * PRECAPTURE     | Sequence done. ACAMERA_CONTROL_AE_LOCK is ON  | LOCKED         | Ready for high-quality capture
+     * LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
+     * LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START | PRECAPTURE     | Start AE precapture metering sequence
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled</p>
      * <p>For the above table, the camera device may skip reporting any state changes that happen
      * without application intervention (i.e. mode switch, trigger, locking). Any state that
      * can be skipped in that manner is called a transient state.</p>
      * <p>For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
      * listed in above table, it is also legal for the camera device to skip one or more
      * transient states between two results. See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device finished AE scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values are already good, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Converged after a precapture sequence, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Converged after a precapture sequenceis canceled, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Camera device finished AE scan</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Camera device finished AE scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Converged after a new scan, transient states are skipped by camera device.</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                                            | New State      | Notes
+     * :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
+     * INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
+     * CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
+     * FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.</p>
      *
      * @see ACAMERA_CONTROL_AE_LOCK
      * @see ACAMERA_CONTROL_AE_MODE
@@ -1418,374 +1273,79 @@
      * be sharp.</p>
      * <p>Below are state transition tables for different AF modes.</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_OFF or AF_MODE_EDOF:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Never changes</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State       | Transition Cause | New State | Notes
+     * :------------:|:----------------:|:---------:|:-----------:
+     * INACTIVE      |                  | INACTIVE  | Never changes</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_AUTO or AF_MODE_MACRO:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">Start AF sweep, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">AF sweep done</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focused, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">AF sweep done</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Not focused, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Cancel/reset AF, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Cancel/reset AF</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">Start new sweep, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Cancel/reset AF</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">Start new sweep, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state</td>
-     * <td align="center">Mode change</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause | New State          | Notes
+     * :-----------------:|:----------------:|:------------------:|:--------------:
+     * INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
+     * ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
+     * ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
+     * ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
+     * FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
+     * FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
+     * NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
+     * Any state          | Mode change      | INACTIVE           |</p>
      * <p>For the above table, the camera device may skip reporting any state changes that happen
      * without application intervention (i.e. mode switch, trigger, locking). Any state that
      * can be skipped in that manner is called a transient state.</p>
      * <p>For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
      * state transitions listed in above table, it is also legal for the camera device to skip
      * one or more transient states between two results. See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Focus failed after a scan, lens is now locked.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focus is good after a scan, lens is not locked.</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause | New State          | Notes
+     * :-----------------:|:----------------:|:------------------:|:--------------:
+     * INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
+     * INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
+     * FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_VIDEO:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF state query, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device completes current scan</td>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device fails current scan</td>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Reset lens position, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause                    | New State          | Notes
+     * :-----------------:|:-----------------------------------:|:------------------:|:--------------:
+     * INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+     * PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
+     * PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
+     * PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
+     * PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
+     * FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
+     * FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
+     * NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_PICTURE:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF state query, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device completes current scan</td>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device fails current scan</td>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Reset lens position, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Immediate trans. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Immediate trans. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause                     | New State          | Notes
+     * :-----------------:|:------------------------------------:|:------------------:|:--------------:
+     * INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+     * PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
+     * PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
+     * PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
+     * PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
+     * FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
+     * FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
+     * NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan</p>
      * <p>When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
      * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
      * camera device. When a trigger is included in a mode switch request, the trigger
      * will be evaluated in the context of the new mode in the request.
      * See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">any state</td>
-     * <td align="center">CAF--&gt;AUTO mode switch</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Mode switch without trigger, initial state must be INACTIVE</td>
-     * </tr>
-     * <tr>
-     * <td align="center">any state</td>
-     * <td align="center">CAF--&gt;AUTO mode switch with AF_TRIGGER</td>
-     * <td align="center">trigger-reachable states from INACTIVE</td>
-     * <td align="center">Mode switch with trigger, INACTIVE is skipped</td>
-     * </tr>
-     * <tr>
-     * <td align="center">any state</td>
-     * <td align="center">AUTO--&gt;CAF mode switch</td>
-     * <td align="center">passively reachable states from INACTIVE</td>
-     * <td align="center">Mode switch without trigger, passive transient state is skipped</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State      | Transition Cause                       | New State                                | Notes
+     * :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
+     * any state    | CAF--&gt;AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
+     * any state    | CAF--&gt;AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
+     * any state    | AUTO--&gt;CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped</p>
      *
      * @see ACAMERA_CONTROL_AF_MODE
      * @see ACAMERA_CONTROL_MODE
@@ -1813,109 +1373,29 @@
      * be good to use.</p>
      * <p>Below are state transition tables for different AWB modes.</p>
      * <p>When <code>ACAMERA_CONTROL_AWB_MODE != AWB_MODE_AUTO</code>:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device auto white balance algorithm is disabled</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State       | Transition Cause | New State | Notes
+     * :------------:|:----------------:|:---------:|:-----------------------:
+     * INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled</p>
      * <p>When ACAMERA_CONTROL_AWB_MODE is AWB_MODE_AUTO:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates AWB scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Camera device finishes AWB scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Good values, not changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Camera device initiates AWB scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is OFF</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values not good after unlock</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                 | New State     | Notes
+     * :-------------:|:--------------------------------:|:-------------:|:-----------------:
+     * INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
+     * INACTIVE       | ACAMERA_CONTROL_AWB_LOCK is ON    | LOCKED        | Values locked
+     * SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
+     * SEARCHING      | ACAMERA_CONTROL_AWB_LOCK is ON    | LOCKED        | Values locked
+     * CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
+     * CONVERGED      | ACAMERA_CONTROL_AWB_LOCK is ON    | LOCKED        | Values locked
+     * LOCKED         | ACAMERA_CONTROL_AWB_LOCK is OFF   | SEARCHING     | Values not good after unlock</p>
      * <p>For the above table, the camera device may skip reporting any state changes that happen
      * without application intervention (i.e. mode switch, trigger, locking). Any state that
      * can be skipped in that manner is called a transient state.</p>
      * <p>For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
      * listed in above table, it is also legal for the camera device to skip one or more
      * transient states between two results. See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device finished AWB scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values are already good, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is OFF</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values good after unlock, transient states are skipped by camera device.</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                 | New State     | Notes
+     * :-------------:|:--------------------------------:|:-------------:|:-----------------:
+     * INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
+     * LOCKED         | ACAMERA_CONTROL_AWB_LOCK is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.</p>
      *
      * @see ACAMERA_CONTROL_AWB_LOCK
      * @see ACAMERA_CONTROL_AWB_MODE
@@ -2326,14 +1806,14 @@
      * <p>When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested,
      * the camera device will handle thumbnail rotation in one of the following ways:</p>
      * <ul>
-     * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
+     * <li>Set the
+     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
      *   and keep jpeg and thumbnail image data unrotated.</li>
      * <li>Rotate the jpeg and thumbnail image data and not set
-     *   {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
-     *   case, LIMITED or FULL hardware level devices will report rotated thumnail size in
-     *   capture result, so the width and height will be interchanged if 90 or 270 degree
-     *   orientation is requested. LEGACY device will always report unrotated thumbnail
-     *   size.</li>
+     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.
+     *   In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
+     *   in capture result, so the width and height will be interchanged if 90 or 270 degree
+     *   orientation is requested. LEGACY device will always report unrotated thumbnail size.</li>
      * </ul>
      *
      * @see ACAMERA_JPEG_ORIENTATION
@@ -2568,9 +2048,9 @@
      * <p>The position of the camera device's lens optical center,
      * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
      * optical center of the largest camera device facing in the
-     * same direction as this camera, in the {@link
-     * android.hardware.SensorEvent Android sensor coordinate
-     * axes}. Note that only the axis definitions are shared with
+     * same direction as this camera, in the
+     * <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate axes</a>.
+     * Note that only the axis definitions are shared with
      * the sensor coordinate system, but not the origin.</p>
      * <p>If this device is the largest or only camera device with a
      * given facing, then this position will be <code>(0, 0, 0)</code>; a
@@ -2982,14 +2462,11 @@
      * into the 3 stream types as below:</p>
      * <ul>
      * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
-     *   Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li>
-     * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
-     *   android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
-     *   RAW12}.</li>
+     *   Typically {@link AIMAGE_FORMAT_JPEG} format.</li>
+     * <li>Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
+     *   {@link AIMAGE_FORMAT_RAW12}.</li>
      * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
-     *   Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
-     *   {@link android.graphics.ImageFormat#NV21 NV21}, or
-     *   {@link android.graphics.ImageFormat#YV12 YV12}.</li>
+     *   Typically {@link AIMAGE_FORMAT_YUV_420_888}.</li>
      * </ul>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
@@ -2997,29 +2474,6 @@
     ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS =                    // int32[3]
             ACAMERA_REQUEST_START + 6,
     /**
-     * <p>The maximum numbers of any type of input streams
-     * that can be configured and used simultaneously by a camera device.</p>
-     *
-     * <p>This tag may appear in:</p>
-     * <ul>
-     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
-     *
-     * <p>When set to 0, it means no input stream is supported.</p>
-     * <p>The image format for a input stream can be any supported format returned by {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
-     * input stream, there must be at least one output stream configured to to receive the
-     * reprocessed images.</p>
-     * <p>When an input stream and some output streams are used in a reprocessing request,
-     * only the input buffer will be used to produce these output stream buffers, and a
-     * new sensor image will not be captured.</p>
-     * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
-     * stream image format will be PRIVATE, the associated output stream image format
-     * should be JPEG.</p>
-     */
-    ACAMERA_REQUEST_MAX_NUM_INPUT_STREAMS =                     // int32
-            ACAMERA_REQUEST_START + 8,
-    /**
      * <p>Specifies the number of pipeline stages the frame went
      * through from when it was exposed to when the final completed result
      * was available to the framework.</p>
@@ -3125,7 +2579,7 @@
             ACAMERA_REQUEST_START + 12,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link android.hardware.camera2.CaptureRequest}.</p>
+     * to use with {@link ACaptureRequest}.</p>
      *
      * <p>This tag may appear in:</p>
      * <ul>
@@ -3146,7 +2600,8 @@
             ACAMERA_REQUEST_START + 13,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link android.hardware.camera2.CaptureResult}.</p>
+     * to query with {@link ACameraMetadata} from
+     * {@link ACameraCaptureSession_captureCallback_result}.</p>
      *
      * <p>This tag may appear in:</p>
      * <ul>
@@ -3176,7 +2631,8 @@
             ACAMERA_REQUEST_START + 14,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link android.hardware.camera2.CameraCharacteristics}.</p>
+     * to query with {@link ACameraMetadata} from
+     * {@link ACameraManager_getCameraCharacteristics}.</p>
      *
      * <p>This tag may appear in:</p>
      * <ul>
@@ -3204,6 +2660,7 @@
      * </ul>
      *
      * <p>This control can be used to implement digital zoom.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The crop region coordinate system is based off
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being the
      * top-left corner of the sensor active array.</p>
@@ -3286,66 +2743,16 @@
      * <p>The following table describes the minimum required output stream
      * configurations based on the hardware level
      * (ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL):</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">Format</th>
-     * <th align="center">Size</th>
-     * <th align="center">Hardware Level</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE</td>
-     * <td align="center">Any</td>
-     * <td align="center"></td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">1920x1080 (1080p)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 1080p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">1280x720 (720)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 720p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">640x480 (480p)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 480p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">320x240 (240p)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 240p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">YUV_420_888</td>
-     * <td align="center">all output sizes available for JPEG</td>
-     * <td align="center">FULL</td>
-     * <td align="center"></td>
-     * </tr>
-     * <tr>
-     * <td align="center">YUV_420_888</td>
-     * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
-     * <td align="center">LIMITED</td>
-     * <td align="center"></td>
-     * </tr>
-     * <tr>
-     * <td align="center">IMPLEMENTATION_DEFINED</td>
-     * <td align="center">same as YUV_420_888</td>
-     * <td align="center">Any</td>
-     * <td align="center"></td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>Format         | Size                                         | Hardware Level | Notes
+     * :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+     * JPEG           | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE          | Any            |
+     * JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
+     * JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
+     * JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
+     * JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
+     * YUV_420_888    | all output sizes available for JPEG          | FULL           |
+     * YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
+     * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
      * <p>Refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES for additional
      * mandatory stream configurations on a per-capability basis.</p>
      *
@@ -3374,8 +2781,6 @@
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
-     * <p>(Keep in sync with
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})</p>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -3432,21 +2837,19 @@
      * ignored).</p>
      * <p>The following formats may always have a stall duration:</p>
      * <ul>
-     * <li>{@link android.graphics.ImageFormat#JPEG}</li>
-     * <li>{@link android.graphics.ImageFormat#RAW_SENSOR}</li>
+     * <li>{@link AIMAGE_FORMAT_JPEG}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW16}</li>
      * </ul>
      * <p>The following formats will never have a stall duration:</p>
      * <ul>
-     * <li>{@link android.graphics.ImageFormat#YUV_420_888}</li>
-     * <li>{@link android.graphics.ImageFormat#RAW10}</li>
+     * <li>{@link AIMAGE_FORMAT_YUV_420_888}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW10}</li>
      * </ul>
      * <p>All other formats may or may not have an allowed stall duration on
      * a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      * for more details.</p>
      * <p>See ACAMERA_SENSOR_FRAME_DURATION for more information about
      * calculating the max frame rate (absent stalls).</p>
-     * <p>(Keep up to date with
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )</p>
      *
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -3545,8 +2948,8 @@
      * cannot process more than 1 capture at a time.</li>
      * </ul>
      * <p>The necessary information for the application, given the model above,
-     * is provided via the android.scaler.streamConfigurationMap field using
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
+     * is provided via
+     * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
      * These are used to determine the maximum frame rate / minimum frame
      * duration that is possible for a given stream configuration.</p>
      * <p>Specifically, the application can use the following rules to
@@ -3556,8 +2959,7 @@
      * <li>Let the set of currently configured input/output streams
      * be called <code>S</code>.</li>
      * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
-     * it up in android.scaler.streamConfigurationMap using {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+     * it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
      * (with its respective size/format). Let this set of frame durations be
      * called <code>F</code>.</li>
      * <li>For any given request <code>R</code>, the minimum frame duration allowed
@@ -3565,7 +2967,7 @@
      * used in <code>R</code> be called <code>S_r</code>.</li>
      * </ol>
      * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
      * using its respective size/format), then the frame duration in <code>F</code>
      * determines the steady state frame rate that the application will get
      * if it uses <code>R</code> as a repeating request. Let this special kind of
@@ -3577,7 +2979,7 @@
      * if all buffers from the previous <code>Rstall</code> have already been
      * delivered.</p>
      * <p>For more details about stalling, see
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.</p>
+     * {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.</p>
      * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
      *
@@ -3909,8 +3311,9 @@
      * timestamps for other captures from the same camera device, but are
      * not guaranteed to be comparable to any other time source.</p>
      * <p>When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE <code>==</code> REALTIME, the
-     * timestamps measure time in the same timebase as {@link
-     * android.os.SystemClock#elapsedRealtimeNanos}, and they can
+     * timestamps measure time in the same timebase as
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">elapsedRealtimeNanos</a>
+     * (or CLOCK_BOOTTIME), and they can
      * be compared to other timestamps from other subsystems that
      * are using that base.</p>
      * <p>For reprocessing, the timestamp will match the start of exposure of
@@ -4100,6 +3503,7 @@
      * optically shielded pixel areas. By blocking light, these pixels
      * provides a reliable black reference for black level compensation
      * in active array region.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This key provides a list of disjoint rectangles specifying the
      * regions of optically shielded (with metal shield) black pixel
      * regions if the camera device is capable of reading out these black
@@ -4149,7 +3553,7 @@
      * color channel listed in the CFA.</p>
      * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
      * available or the camera device advertises this key via
-     * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
+     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
@@ -4173,7 +3577,7 @@
      * estimated white level for each frame.</p>
      * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
      * available or the camera device advertises this key via
-     * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.</p>
+     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
@@ -4200,6 +3604,7 @@
      * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
      * the full pixel array, and the size of the full pixel array is given by
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system for most other keys that list pixel coordinates, including
      * ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
      * this field, with <code>(0, 0)</code> being the top-left of this rectangle.</p>
@@ -4272,7 +3677,7 @@
      * duration being clipped to the maximum. See that control for a full definition of frame
      * durations.</p>
      * <p>Refer to {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+     * ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
      * for the minimum frame duration values.</p>
      */
     ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION =                    // int64
@@ -4307,7 +3712,7 @@
      * the raw buffers produced by this sensor.</p>
      * <p>If a camera device supports raw sensor formats, either this or
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw
-     * output formats listed in android.scaler.streamConfigurationMap (this depends on
+     * output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
      * whether or not the image sensor returns buffers containing pixels that are not
      * part of the active array region for blacklevel calibration or other purposes).</p>
      * <p>Some parts of the full pixel array may not receive light from the scene,
@@ -4391,6 +3796,7 @@
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      * </ul>
      *
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
      * the region that actually receives light from the scene) before any geometric correction
      * has been applied, and should be treated as the active region rectangle for any of the
@@ -4465,7 +3871,7 @@
      * camera device, and an identity lens shading map data will be provided
      * if <code>ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE == ON</code>. For example, for lens
      * shading map with size of <code>[ 4, 3 ]</code>,
-     * the output ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP for this case will be an identity
+     * the output android.statistics.lensShadingCorrectionMap for this case will be an identity
      * map shown below:</p>
      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
@@ -4477,7 +3883,7 @@
      * <p>When set to other modes, lens shading correction will be applied by the camera
      * device. Applications can request lens shading map data by setting
      * ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE to ON, and then the camera device will provide lens
-     * shading map data in ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP; the returned shading map
+     * shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
      * data will be the one applied by the camera device for this capture request.</p>
      * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
      * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
@@ -4487,7 +3893,6 @@
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_CONTROL_AWB_MODE
-     * @see ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      */
     ACAMERA_SHADING_MODE =                                      // byte (enum)
@@ -4587,6 +3992,7 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul>
      *
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
      * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF</p>
@@ -4613,57 +4019,6 @@
             ACAMERA_STATISTICS_START + 7,
     /**
      * <p>The shading map is a low-resolution floating-point map
-     * that lists the coefficients used to correct for vignetting, for each
-     * Bayer color channel.</p>
-     *
-     * <p>This tag may appear in:</p>
-     * <ul>
-     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
-     *
-     * <p>The least shaded section of the image should have a gain factor
-     * of 1; all other sections should have gains above 1.</p>
-     * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
-     * must take into account the colorCorrection settings.</p>
-     * <p>The shading map is for the entire active pixel array, and is not
-     * affected by the crop region specified in the request. Each shading map
-     * entry is the value of the shading compensation map over a specific
-     * pixel on the sensor.  Specifically, with a (N x M) resolution shading
-     * map, and an active pixel array size (W x H), shading map entry
-     * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
-     * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
-     * The map is assumed to be bilinearly interpolated between the sample points.</p>
-     * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
-     * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
-     * The shading map is stored in a fully interleaved format.</p>
-     * <p>The shading map should have on the order of 30-40 rows and columns,
-     * and must be smaller than 64x64.</p>
-     * <p>As an example, given a very small map defined as:</p>
-     * <pre><code>width,height = [ 4, 3 ]
-     * values =
-     * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-     *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-     *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-     *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-     *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-     * </code></pre>
-     * <p>The low-resolution scaling map images for each channel are
-     * (displayed using nearest-neighbor interpolation):</p>
-     * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
-     * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
-     * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
-     * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
-     * <p>As a visualization only, inverting the full-color map to recover an
-     * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
-     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
-     *
-     * @see ACAMERA_COLOR_CORRECTION_MODE
-     */
-    ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP =            // byte
-            ACAMERA_STATISTICS_START + 10,
-    /**
-     * <p>The shading map is a low-resolution floating-point map
      * that lists the coefficients used to correct for vignetting and color shading,
      * for each Bayer color channel of RAW image data.</p>
      *
@@ -4672,20 +4027,21 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul>
      *
-     * <p>The lens shading correction is defined as a full shading correction that
-     * corrects both color shading for the output non-RAW images. After the
-     * shading map is applied, the output non-RAW images will be flat-field images
-     * for flat scenes under uniform illumination.</p>
-     * <p>When there is no lens shading correction applied to RAW output images
-     * (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> false), this map is a full lens
-     * shading correction map; when there is some lens shading correction applied
-     * to the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> true),
-     * this map reports the remaining lens shading correction map that needs to be
-     * applied to get fully shading corrected images.</p>
-     * <p>For a full shading correction map, the least shaded section of the image
-     * should have a gain factor of 1; all other sections should have gains above 1.</p>
+     * <p>The map provided here is the same map that is used by the camera device to
+     * correct both color shading and vignetting for output non-RAW images.</p>
+     * <p>When there is no lens shading correction applied to RAW
+     * output images (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code>
+     * false), this map is the complete lens shading correction
+     * map; when there is some lens shading correction applied to
+     * the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED<code>==</code> true), this map reports the remaining lens shading
+     * correction map that needs to be applied to get shading
+     * corrected images that match the camera device's output for
+     * non-RAW formats.</p>
+     * <p>For a complete shading correction map, the least shaded
+     * section of the image will have a gain factor of 1; all
+     * other sections will have gains above 1.</p>
      * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
-     * must take into account the colorCorrection settings.</p>
+     * will take into account the colorCorrection settings.</p>
      * <p>The shading map is for the entire active pixel array, and is not
      * affected by the crop region specified in the request. Each shading map
      * entry is the value of the shading compensation map over a specific
@@ -4698,8 +4054,8 @@
      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
      * The shading map is stored in a fully interleaved format, and its size
      * is provided in the camera static metadata by ACAMERA_LENS_INFO_SHADING_MAP_SIZE.</p>
-     * <p>The shading map should have on the order of 30-40 rows and columns,
-     * and must be smaller than 64x64.</p>
+     * <p>The shading map will generally have on the order of 30-40 rows and columns,
+     * and will be smaller than 64x64.</p>
      * <p>As an example, given a very small map defined as:</p>
      * <pre><code>ACAMERA_LENS_INFO_SHADING_MAP_SIZE = [ 4, 3 ]
      * ACAMERA_STATISTICS_LENS_SHADING_MAP =
@@ -4712,14 +4068,14 @@
      * </code></pre>
      * <p>The low-resolution scaling map images for each channel are
      * (displayed using nearest-neighbor interpolation):</p>
-     * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
-     * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
-     * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
-     * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+     * <p><img alt="Red lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+     * <img alt="Green (even rows) lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+     * <img alt="Green (odd rows) lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+     * <img alt="Blue lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
      * <p>As a visualization only, inverting the full-color map to recover an
      * image of a gray wall (using bicubic interpolation for visual quality)
      * as captured by the sensor gives:</p>
-     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
      * <p>Note that the RAW image data might be subject to lens shading
      * correction not reported on this map. Query
      * ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED to see if RAW image data has subject
@@ -4944,11 +4300,11 @@
      * <p>Linear mapping:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [ 0, 0, 1.0, 1.0 ]
      * </code></pre>
-     * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+     * <p><img alt="Linear mapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
      * <p>Invert mapping:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [ 0, 1.0, 1.0, 0 ]
      * </code></pre>
-     * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+     * <p><img alt="Inverting mapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [
      *   0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
@@ -4956,7 +4312,7 @@
      *   0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
      *   0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
      * </code></pre>
-     * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+     * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [
      *   0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
@@ -4964,7 +4320,7 @@
      *   0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
      *   0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
      * </code></pre>
-     * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+     * <p><img alt="sRGB tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
      *
      * @see ACAMERA_TONEMAP_CURVE_RED
      * @see ACAMERA_TONEMAP_MAX_CURVE_POINTS
@@ -5081,9 +4437,9 @@
      *
      * <p>The tonemap curve will be defined by specified standard.</p>
      * <p>sRGB (approximated by 16 control points):</p>
-     * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+     * <p><img alt="sRGB tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
      * <p>Rec. 709 (approximated by 16 control points):</p>
-     * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+     * <p><img alt="Rec. 709 tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
      * <p>Note that above figures show a 16 control points approximation of preset
      * curves. Camera devices may apply a different approximation to the curve.</p>
      */
@@ -5131,7 +4487,7 @@
      * <p>See the individual level enums for full descriptions of the supported capabilities.  The
      * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
      * finer-grain level, if needed. In addition, many controls have their available settings or
-     * ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics} entries.</p>
+     * ranges defined in individual metadata tag entries in this document.</p>
      * <p>Some features are not part of any particular hardware level or capability and must be
      * queried separately. These include:</p>
      * <ul>
@@ -5303,8 +4659,6 @@
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
-     * <p>(Keep in sync with {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})</p>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -6160,91 +5514,6 @@
     ACAMERA_CONTROL_SCENE_MODE_BARCODE                               = 16,
 
     /**
-     * <p>This is deprecated, please use {@link
-     * android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
-     * and {@link
-     * android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
-     * for high speed video recording.</p>
-     * <p>Optimized for high speed video recording (frame rate &gt;=60fps) use case.</p>
-     * <p>The supported high speed video sizes and fps ranges are specified in
-     * android.control.availableHighSpeedVideoConfigurations. To get desired
-     * output frame rates, the application is only allowed to select video size
-     * and fps range combinations listed in this static metadata. The fps range
-     * can be control via ACAMERA_CONTROL_AE_TARGET_FPS_RANGE.</p>
-     * <p>In this mode, the camera device will override aeMode, awbMode, and afMode to
-     * ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
-     * controls will be overridden to be FAST. Therefore, no manual control of capture
-     * and post-processing parameters is possible. All other controls operate the
-     * same as when ACAMERA_CONTROL_MODE == AUTO. This means that all other
-     * ACAMERA_CONTROL_* fields continue to work, such as</p>
-     * <ul>
-     * <li>ACAMERA_CONTROL_AE_TARGET_FPS_RANGE</li>
-     * <li>ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION</li>
-     * <li>ACAMERA_CONTROL_AE_LOCK</li>
-     * <li>ACAMERA_CONTROL_AWB_LOCK</li>
-     * <li>ACAMERA_CONTROL_EFFECT_MODE</li>
-     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
-     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
-     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
-     * <li>ACAMERA_CONTROL_AF_TRIGGER</li>
-     * <li>ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER</li>
-     * </ul>
-     * <p>Outside of ACAMERA_CONTROL_*, the following controls will work:</p>
-     * <ul>
-     * <li>ACAMERA_FLASH_MODE (automatic flash for still capture will not work since aeMode is ON)</li>
-     * <li>ACAMERA_LENS_OPTICAL_STABILIZATION_MODE (if it is supported)</li>
-     * <li>ACAMERA_SCALER_CROP_REGION</li>
-     * <li>ACAMERA_STATISTICS_FACE_DETECT_MODE</li>
-     * </ul>
-     * <p>For high speed recording use case, the actual maximum supported frame rate may
-     * be lower than what camera can output, depending on the destination Surfaces for
-     * the image data. For example, if the destination surface is from video encoder,
-     * the application need check if the video encoder is capable of supporting the
-     * high frame rate for a given video size, or it will end up with lower recording
-     * frame rate. If the destination surface is from preview window, the preview frame
-     * rate will be bounded by the screen refresh rate.</p>
-     * <p>The camera device will only support up to 2 output high speed streams
-     * (processed non-stalling format defined in ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS)
-     * in this mode. This control will be effective only if all of below conditions are true:</p>
-     * <ul>
-     * <li>The application created no more than maxNumHighSpeedStreams processed non-stalling
-     * format output streams, where maxNumHighSpeedStreams is calculated as
-     * min(2, ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS[Processed (but not-stalling)]).</li>
-     * <li>The stream sizes are selected from the sizes reported by
-     * android.control.availableHighSpeedVideoConfigurations.</li>
-     * <li>No processed non-stalling or raw streams are configured.</li>
-     * </ul>
-     * <p>When above conditions are NOT satistied, the controls of this mode and
-     * ACAMERA_CONTROL_AE_TARGET_FPS_RANGE will be ignored by the camera device,
-     * the camera device will fall back to ACAMERA_CONTROL_MODE <code>==</code> AUTO,
-     * and the returned capture result metadata will give the fps range choosen
-     * by the camera device.</p>
-     * <p>Switching into or out of this mode may trigger some camera ISP/sensor
-     * reconfigurations, which may introduce extra latency. It is recommended that
-     * the application avoids unnecessary scene mode switch as much as possible.</p>
-     *
-     * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
-     * @see ACAMERA_CONTROL_AE_LOCK
-     * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
-     * @see ACAMERA_CONTROL_AE_REGIONS
-     * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
-     * @see ACAMERA_CONTROL_AF_REGIONS
-     * @see ACAMERA_CONTROL_AF_TRIGGER
-     * @see ACAMERA_CONTROL_AWB_LOCK
-     * @see ACAMERA_CONTROL_AWB_REGIONS
-     * @see ACAMERA_CONTROL_EFFECT_MODE
-     * @see ACAMERA_CONTROL_MODE
-     * @see ACAMERA_FLASH_MODE
-     * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
-     * @see ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS
-     * @see ACAMERA_SCALER_CROP_REGION
-     * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
-     *
-     * <b>Deprecated</b>: please refer to this API documentation to find the alternatives
-     */
-    ACAMERA_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO                      = 17,
-
-    /**
      * <p>Turn on a device-specific high dynamic range (HDR) mode.</p>
      * <p>In this scene mode, the camera device captures images
      * that keep a larger range of scene illumination levels
@@ -6511,7 +5780,7 @@
     /**
      * <p>Edge enhancement is applied at different levels for different output streams,
      * based on resolution. Streams at maximum recording resolution (see {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
+     * ACameraDevice_createCaptureSession}) or below have
      * edge enhancement applied, while higher-resolution streams have no edge enhancement
      * applied. The level of edge enhancement for low-resolution streams is tuned so that
      * frame rate is not impacted, and the quality is equal to or better than FAST (since it
@@ -6765,7 +6034,7 @@
     /**
      * <p>Noise reduction is applied at different levels for different output streams,
      * based on resolution. Streams at maximum recording resolution (see {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
+     * ACameraDevice_createCaptureSession}) or below have noise
      * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
      * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
      * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
@@ -6987,26 +6256,18 @@
      * to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
      * per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
      * resolution of the device, whichever is smaller.</p>
-     * <p>More specifically, this means that a size matching the camera device's active array
-     * size is listed as a supported size for the {@link
-     * android.graphics.ImageFormat#YUV_420_888} format in either {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
-     * with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
-     * &lt;= 1/10 s, respectively; and the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry
-     * lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
-     * for the maximum-size YUV_420_888 format.  If that maximum size is listed in {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
-     * then the list of resolutions for YUV_420_888 from {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
-     * least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
-     * s.</p>
-     * <p>If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
-     * android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
+     * <p>More specifically, this means that at least one output {@link
+     * AIMAGE_FORMAT_YUV_420_888} size listed in
+     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
+     * 'high resolution' defined above, and can be captured at at least 20 fps.
+     * For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
+     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
+     * size for at least 10 frames per second.
+     * Also the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range
+     * where the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.</p>
+     * <p>If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
+     * AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
      * as the maximum-size YUV_420_888 resolution is.</p>
-     * <p>If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
-     * as for the YUV_420_888 format also apply to the {@link
-     * android.graphics.ImageFormat#PRIVATE} format.</p>
      * <p>In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
      * and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
      * are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
@@ -7023,13 +6284,13 @@
      * <p>The camera device can produce depth measurements from its field of view.</p>
      * <p>This capability requires the camera device to support the following:</p>
      * <ul>
-     * <li>{@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.</li>
-     * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
+     * <li>{@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.</li>
+     * <li>{@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
      *   output format.</li>
      * <li>This camera device, and all camera devices with the same ACAMERA_LENS_FACING,
-     *   will list the following calibration entries in both
-     *   {@link android.hardware.camera2.CameraCharacteristics} and
-     *   {@link android.hardware.camera2.CaptureResult}:<ul>
+     *   will list the following calibration entries in {@link ACameraMetadata} from both
+     *   {@link ACameraManager_getCameraCharacteristics} and
+     *   {@link ACameraCaptureSession_captureCallback_result}:<ul>
      * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
      * <li>ACAMERA_LENS_POSE_ROTATION</li>
      * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
@@ -7044,7 +6305,7 @@
      * <p>Generally, depth output operates at a slower frame rate than standard color capture,
      * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
      * should be accounted for (see
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
+     * {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
      * On a device that supports both depth and color-based output, to enable smooth preview,
      * using a repeating burst is recommended, where a depth-output target is only included
      * once every N frames, where N is the ratio between preview output rate and depth output
@@ -7278,8 +6539,8 @@
 
     /**
      * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
-     * {@link android.os.SystemClock#elapsedRealtimeNanos},
-     * and they can be compared to other timestamps using that base.</p>
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">elapsedRealtimeNanos</a>
+     * (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
@@ -7464,8 +6725,7 @@
      * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
      * better.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link android.hardware.camera2.CameraDevice#createCaptureSession
-     * createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
      * support for color image capture. The only exception is that the device may
      * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -7491,8 +6751,7 @@
     /**
      * <p>This camera device is capable of supporting advanced imaging applications.</p>
      * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link android.hardware.camera2.CameraDevice#createCaptureSession
-     * createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>A <code>FULL</code> device will support below capabilities:</p>
      * <ul>
      * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -7520,8 +6779,7 @@
     /**
      * <p>This camera device is running in backward compatibility mode.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
-     * documentation are supported.</p>
+     * ACameraDevice_createCaptureSession} documentation are supported.</p>
      * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
      * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
      * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -7543,7 +6801,7 @@
      * FULL-level capabilities.</p>
      * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
      * <code>LIMITED</code> tables in the {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
+     * ACameraDevice_createCaptureSession}
      * documentation are guaranteed to be supported.</p>
      * <p>The following additional capabilities are guaranteed to be supported:</p>
      * <ul>
diff --git a/include/camera/ndk/NdkCaptureRequest.h b/include/camera/ndk/NdkCaptureRequest.h
index e278196..cd97f4d 100644
--- a/include/camera/ndk/NdkCaptureRequest.h
+++ b/include/camera/ndk/NdkCaptureRequest.h
@@ -49,54 +49,255 @@
 // Container for a single output target
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
+/**
+ * ACaptureRequest is an opaque type that contains settings and output targets needed to capture
+ * a single image from camera device.
+ *
+ * <p>ACaptureRequest contains the configuration for the capture hardware (sensor, lens, flash),
+ * the processing pipeline, the control algorithms, and the output buffers. Also
+ * contains the list of target {@link ANativeWindow}s to send image data to for this
+ * capture.</p>
+ *
+ * <p>ACaptureRequest is created by {@link ACameraDevice_createCaptureRequest}.
+ *
+ * <p>ACaptureRequest is given to {@link ACameraCaptureSession_capture} or
+ * {@link ACameraCaptureSession_setRepeatingRequest} to capture images from a camera.</p>
+ *
+ * <p>Each request can specify a different subset of target {@link ANativeWindow}s for the
+ * camera to send the captured data to. All the {@link ANativeWindow}s used in a request must
+ * be part of the {@link ANativeWindow} list given to the last call to
+ * {@link ACameraDevice_createCaptureSession}, when the request is submitted to the
+ * session.</p>
+ *
+ * <p>For example, a request meant for repeating preview might only include the
+ * {@link ANativeWindow} for the preview SurfaceView or SurfaceTexture, while a
+ * high-resolution still capture would also include a {@link ANativeWindow} from a
+ * {@link AImageReader} configured for high-resolution JPEG images.</p>
+ *
+ * @see ACameraDevice_createCaptureRequest
+ * @see ACameraCaptureSession_capture
+ * @see ACameraCaptureSession_setRepeatingRequest
+ */
 typedef struct ACaptureRequest ACaptureRequest;
 
-camera_status_t ACameraOutputTarget_create(ANativeWindow* window, ACameraOutputTarget** out);
-void ACameraOutputTarget_free(ACameraOutputTarget*);
+/**
+ * Create a ACameraOutputTarget object.
+ *
+ * <p>The ACameraOutputTarget is used in {@link ACaptureRequest_addTarget} method to add an output
+ * {@link ANativeWindow} to ACaptureRequest. Use {@link ACameraOutputTarget_free} to free the object
+ * and its memory after application no longer needs the {@link ACameraOutputTarget}.</p>
+ *
+ * @param window the {@link ANativeWindow} to be associated with the {@link ACameraOutputTarget}
+ * @param output the output {@link ACameraOutputTarget} will be stored here if the
+ *                  method call succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds. The created ACameraOutputTarget will
+ *                                be filled in the output argument.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if window or output is NULL.</li></ul>
+ *
+ * @see ACaptureRequest_addTarget
+ */
+camera_status_t ACameraOutputTarget_create(ANativeWindow* window, ACameraOutputTarget** output);
 
-camera_status_t ACaptureRequest_addTarget(ACaptureRequest*, const ACameraOutputTarget*);
-camera_status_t ACaptureRequest_removeTarget(ACaptureRequest*, const ACameraOutputTarget*);
-//TODO: do we need API to query added targets?
+/**
+ * Free a ACameraOutputTarget object.
+ *
+ * @param output the {@link ACameraOutputTarget} to be freed.
+ *
+ * @see ACameraOutputTarget_create
+ */
+void ACameraOutputTarget_free(ACameraOutputTarget* output);
 
-/*
- * Get a metadata entry
+/**
+ * Add an {@link ACameraOutputTarget} object to {@link ACaptureRequest}.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param output the output {@link ACameraOutputTarget} to be added to capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request or output is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_addTarget(ACaptureRequest* request,
+        const ACameraOutputTarget* output);
+
+/**
+ * Remove an {@link ACameraOutputTarget} object from {@link ACaptureRequest}.
+ *
+ * <p>This method has no effect if the ACameraOutputTarget does not exist in ACaptureRequest.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param output the output {@link ACameraOutputTarget} to be removed from capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request or output is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_removeTarget(ACaptureRequest* request,
+        const ACameraOutputTarget* output);
+
+/**
+ * Get a metadata entry from input {@link ACaptureRequest}.
+ *
+ * <p>The memory of the data field in returned entry is managed by camera framework. Do not
+ * attempt to free it.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be get.
+ * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
+ *        call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if metadata or entry is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_METADATA_NOT_FOUND} if the capture request does not contain an
+ *             entry of input tag value.</li></ul>
  */
 camera_status_t ACaptureRequest_getConstEntry(
-        const ACaptureRequest*, uint32_t tag, ACameraMetadata_const_entry* entry);
+        const ACaptureRequest* request, uint32_t tag, ACameraMetadata_const_entry* entry);
 
 /*
- * List all the entry tags in this capture request.
- * The memory of tags is managed by ACaptureRequest itself and must NOT be free/delete
- * by application. Calling ACaptureRequest_setEntry_* API will invalidate previous
- * output of ACaptureRequest_getAllTags. Do not access tags after calling
- * ACaptureRequest_setEntry_*. To get new list of tags after updating capture request,
- * application must call ACaptureRequest_getAllTags again.
- * Do NOT access tags after calling ACaptureRequest_free.
+ * List all the entry tags in input {@link ACaptureRequest}.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param numEntries number of metadata entries in input {@link ACaptureRequest}
+ * @param tags the tag values of the metadata entries. Length of tags is returned in numEntries
+ *             argument. The memory is managed by ACaptureRequest itself and must NOT be free/delete
+ *             by application. Calling ACaptureRequest_setEntry_* methods will invalidate previous
+ *             output of ACaptureRequest_getAllTags. Do not access tags after calling
+ *             ACaptureRequest_setEntry_*. To get new list of tags after updating capture request,
+ *             application must call ACaptureRequest_getAllTags again. Do NOT access tags after
+ *             calling ACaptureRequest_free.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request, numEntries or tags is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
  */
 camera_status_t ACaptureRequest_getAllTags(
-        const ACaptureRequest*, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
+        const ACaptureRequest* request, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
 
-/*
- * Set an entry of corresponding type.
- * The entry tag's type must match corresponding set API or an
- * ACAMERA_ERROR_INVALID_PARAMETER error will occur.
- * Also, the input ACameraMetadata* must belong to a capture request or an
- * ACAMERA_ERROR_INVALID_PARAMETER error will occur.
+/**
+ * Set/change a camera capture control entry with unsigned 8 bits data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not unsigned 8 bits, or
+ *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_u8(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const uint8_t* data);
-camera_status_t ACaptureRequest_setEntry_i32(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const int32_t* data);
-camera_status_t ACaptureRequest_setEntry_float(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const float* data);
-camera_status_t ACaptureRequest_setEntry_i64(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const int64_t* data);
-camera_status_t ACaptureRequest_setEntry_double(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const double* data);
-camera_status_t ACaptureRequest_setEntry_rational(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const ACameraMetadata_rational* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const uint8_t* data);
 
-// free the capture request created by ACameraDevice_createCaptureRequest
+/**
+ * Set/change a camera capture control entry with signed 32 bits data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not signed 32 bits, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_i32(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const int32_t* data);
+
+/**
+ * Set/change a camera capture control entry with float data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not float, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_float(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const float* data);
+
+/**
+ * Set/change a camera capture control entry with signed 64 bits data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not signed 64 bits, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_i64(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const int64_t* data);
+
+/**
+ * Set/change a camera capture control entry with double data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not double, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_double(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const double* data);
+
+/**
+ * Set/change a camera capture control entry with rational data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not rational, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_rational(
+        ACaptureRequest* request, uint32_t tag, uint32_t count,
+        const ACameraMetadata_rational* data);
+
+/**
+ * Free a {@link ACaptureRequest} structure.
+ *
+ * @param request the {@link ACaptureRequest} to be freed.
+ */
 void ACaptureRequest_free(ACaptureRequest* request);
 
 #ifdef __cplusplus
diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h
index 44d6c0b..498de8e 100644
--- a/include/media/AudioTimestamp.h
+++ b/include/media/AudioTimestamp.h
@@ -34,7 +34,7 @@
     struct timespec mTime;     // corresponding CLOCK_MONOTONIC when frame is expected to present
 };
 
-struct ExtendedTimestamp {
+struct alignas(8) /* bug 29096183, bug 29108507 */ ExtendedTimestamp {
     enum Location {
         LOCATION_INVALID = -1,
         // Locations in the audio playback / record pipeline.
diff --git a/include/media/IMediaDeathNotifier.h b/include/media/IMediaDeathNotifier.h
index bb3d0d8..aca6678 100644
--- a/include/media/IMediaDeathNotifier.h
+++ b/include/media/IMediaDeathNotifier.h
@@ -30,7 +30,7 @@
     virtual ~IMediaDeathNotifier() { removeObitRecipient(this); }
 
     virtual void died() = 0;
-    static const sp<IMediaPlayerService>& getMediaPlayerService();
+    static const sp<IMediaPlayerService> getMediaPlayerService();
 
 private:
     IMediaDeathNotifier &operator=(const IMediaDeathNotifier &);
diff --git a/include/ndk/NdkImage.h b/include/ndk/NdkImage.h
index eab7ead..cd0b11e 100644
--- a/include/ndk/NdkImage.h
+++ b/include/ndk/NdkImage.h
@@ -42,21 +42,371 @@
 extern "C" {
 #endif
 
+/**
+ * AImage is an opaque type that provides access to image generated by {@link AImageReader}.
+ */
 typedef struct AImage AImage;
 
 // Formats not listed here will not be supported by AImageReader
-enum {
+enum AIMAGE_FORMATS {
+    /**
+     * Multi-plane Android YUV 420 format.
+     *
+     * <p>This format is a generic YCbCr format, capable of describing any 4:2:0
+     * chroma-subsampled planar or semiplanar buffer (but not fully interleaved),
+     * with 8 bits per color sample.</p>
+     *
+     * <p>Images in this format are always represented by three separate buffers
+     * of data, one for each color plane. Additional information always
+     * accompanies the buffers, describing the row stride and the pixel stride
+     * for each plane.</p>
+     *
+     * <p>The order of planes is guaranteed such that plane #0 is always Y, plane #1 is always
+     * U (Cb), and plane #2 is always V (Cr).</p>
+     *
+     * <p>The Y-plane is guaranteed not to be interleaved with the U/V planes
+     * (in particular, pixel stride is always 1 in {@link AImage_getPlanePixelStride}).</p>
+     *
+     * <p>The U/V planes are guaranteed to have the same row stride and pixel stride, that is, the
+     * return value of {@link AImage_getPlaneRowStride} for the U/V plane are guaranteed to be the
+     * same, and the return value of {@link AImage_getPlanePixelStride} for the U/V plane are also
+     * guaranteed to be the same.</p>
+     *
+     * <p>For example, the {@link AImage} object can provide data
+     * in this format from a {@link ACameraDevice} through an {@link AImageReader} object.</p>
+     *
+     * <p>This format is always supported as an output format for the android Camera2 NDK API.</p>
+     *
+     * @see AImage
+     * @see AImageReader
+     * @see ACameraDevice
+     */
     AIMAGE_FORMAT_YUV_420_888       = 0x23,
+
+    /**
+     * Compressed JPEG format.
+     *
+     * <p>This format is always supported as an output format for the android Camera2 NDK API.</p>
+     */
     AIMAGE_FORMAT_JPEG              = 0x100,
+
+    /**
+     * 16 bits per pixel raw camera sensor image format, usually representing a single-channel
+     * Bayer-mosaic image.
+     *
+     * <p>The layout of the color mosaic, the maximum and minimum encoding
+     * values of the raw pixel data, the color space of the image, and all other
+     * needed information to interpret a raw sensor image must be queried from
+     * the {@link ACameraDevice} which produced the image.</p>
+     */
     AIMAGE_FORMAT_RAW16             = 0x20,
+
+    /**
+     * Private raw camera sensor image format, a single channel image with implementation depedent
+     * pixel layout.
+     *
+     * <p>AIMAGE_FORMAT_RAW_PRIVATE is a format for unprocessed raw image buffers coming from an
+     * image sensor. The actual structure of buffers of this format is implementation-dependent.</p>
+     *
+     */
     AIMAGE_FORMAT_RAW_PRIVATE       = 0x24,
+
+    /**
+     * Android 10-bit raw format.
+     *
+     * <p>
+     * This is a single-plane, 10-bit per pixel, densely packed (in each row),
+     * unprocessed format, usually representing raw Bayer-pattern images coming
+     * from an image sensor.
+     * </p>
+     * <p>
+     * In an image buffer with this format, starting from the first pixel of
+     * each row, each 4 consecutive pixels are packed into 5 bytes (40 bits).
+     * Each one of the first 4 bytes contains the top 8 bits of each pixel, The
+     * fifth byte contains the 2 least significant bits of the 4 pixels, the
+     * exact layout data for each 4 consecutive pixels is illustrated below
+     * (Pi[j] stands for the jth bit of the ith pixel):
+     * </p>
+     * <table>
+     * <tr>
+     * <th align="center"></th>
+     * <th align="center">bit 7</th>
+     * <th align="center">bit 6</th>
+     * <th align="center">bit 5</th>
+     * <th align="center">bit 4</th>
+     * <th align="center">bit 3</th>
+     * <th align="center">bit 2</th>
+     * <th align="center">bit 1</th>
+     * <th align="center">bit 0</th>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 0:</td>
+     * <td align="center">P0[9]</td>
+     * <td align="center">P0[8]</td>
+     * <td align="center">P0[7]</td>
+     * <td align="center">P0[6]</td>
+     * <td align="center">P0[5]</td>
+     * <td align="center">P0[4]</td>
+     * <td align="center">P0[3]</td>
+     * <td align="center">P0[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 1:</td>
+     * <td align="center">P1[9]</td>
+     * <td align="center">P1[8]</td>
+     * <td align="center">P1[7]</td>
+     * <td align="center">P1[6]</td>
+     * <td align="center">P1[5]</td>
+     * <td align="center">P1[4]</td>
+     * <td align="center">P1[3]</td>
+     * <td align="center">P1[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 2:</td>
+     * <td align="center">P2[9]</td>
+     * <td align="center">P2[8]</td>
+     * <td align="center">P2[7]</td>
+     * <td align="center">P2[6]</td>
+     * <td align="center">P2[5]</td>
+     * <td align="center">P2[4]</td>
+     * <td align="center">P2[3]</td>
+     * <td align="center">P2[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 3:</td>
+     * <td align="center">P3[9]</td>
+     * <td align="center">P3[8]</td>
+     * <td align="center">P3[7]</td>
+     * <td align="center">P3[6]</td>
+     * <td align="center">P3[5]</td>
+     * <td align="center">P3[4]</td>
+     * <td align="center">P3[3]</td>
+     * <td align="center">P3[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 4:</td>
+     * <td align="center">P3[1]</td>
+     * <td align="center">P3[0]</td>
+     * <td align="center">P2[1]</td>
+     * <td align="center">P2[0]</td>
+     * <td align="center">P1[1]</td>
+     * <td align="center">P1[0]</td>
+     * <td align="center">P0[1]</td>
+     * <td align="center">P0[0]</td>
+     * </tr>
+     * </table>
+     * <p>
+     * This format assumes
+     * <ul>
+     * <li>a width multiple of 4 pixels</li>
+     * <li>an even height</li>
+     * </ul>
+     * </p>
+     *
+     * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
+     * not pixels.
+     *
+     * <p>
+     * Since this is a densely packed format, the pixel stride is always 0. The
+     * application must use the pixel data layout defined in above table to
+     * access each row data. When row stride is equal to (width * (10 / 8)), there
+     * will be no padding bytes at the end of each row, the entire image data is
+     * densely packed. When stride is larger than (width * (10 / 8)), padding
+     * bytes will be present at the end of each row.
+     * </p>
+     * <p>
+     * For example, the {@link AImage} object can provide data in this format from a
+     * {@link ACameraDevice} (if supported) through a {@link AImageReader} object.
+     * The number of planes returned by {@link AImage_getNumberOfPlanes} will always be 1.
+     * The pixel stride is undefined ({@link AImage_getPlanePixelStride} will return
+     * {@link AMEDIA_ERROR_UNSUPPORTED}), and the {@link AImage_getPlaneRowStride} described the
+     * vertical neighboring pixel distance (in bytes) between adjacent rows.
+     * </p>
+     *
+     * @see AImage
+     * @see AImageReader
+     * @see ACameraDevice
+     */
     AIMAGE_FORMAT_RAW10             = 0x25,
+
+    /**
+     * Android 12-bit raw format.
+     *
+     * <p>
+     * This is a single-plane, 12-bit per pixel, densely packed (in each row),
+     * unprocessed format, usually representing raw Bayer-pattern images coming
+     * from an image sensor.
+     * </p>
+     * <p>
+     * In an image buffer with this format, starting from the first pixel of each
+     * row, each two consecutive pixels are packed into 3 bytes (24 bits). The first
+     * and second byte contains the top 8 bits of first and second pixel. The third
+     * byte contains the 4 least significant bits of the two pixels, the exact layout
+     * data for each two consecutive pixels is illustrated below (Pi[j] stands for
+     * the jth bit of the ith pixel):
+     * </p>
+     * <table>
+     * <tr>
+     * <th align="center"></th>
+     * <th align="center">bit 7</th>
+     * <th align="center">bit 6</th>
+     * <th align="center">bit 5</th>
+     * <th align="center">bit 4</th>
+     * <th align="center">bit 3</th>
+     * <th align="center">bit 2</th>
+     * <th align="center">bit 1</th>
+     * <th align="center">bit 0</th>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 0:</td>
+     * <td align="center">P0[11]</td>
+     * <td align="center">P0[10]</td>
+     * <td align="center">P0[ 9]</td>
+     * <td align="center">P0[ 8]</td>
+     * <td align="center">P0[ 7]</td>
+     * <td align="center">P0[ 6]</td>
+     * <td align="center">P0[ 5]</td>
+     * <td align="center">P0[ 4]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 1:</td>
+     * <td align="center">P1[11]</td>
+     * <td align="center">P1[10]</td>
+     * <td align="center">P1[ 9]</td>
+     * <td align="center">P1[ 8]</td>
+     * <td align="center">P1[ 7]</td>
+     * <td align="center">P1[ 6]</td>
+     * <td align="center">P1[ 5]</td>
+     * <td align="center">P1[ 4]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 2:</td>
+     * <td align="center">P1[ 3]</td>
+     * <td align="center">P1[ 2]</td>
+     * <td align="center">P1[ 1]</td>
+     * <td align="center">P1[ 0]</td>
+     * <td align="center">P0[ 3]</td>
+     * <td align="center">P0[ 2]</td>
+     * <td align="center">P0[ 1]</td>
+     * <td align="center">P0[ 0]</td>
+     * </tr>
+     * </table>
+     * <p>
+     * This format assumes
+     * <ul>
+     * <li>a width multiple of 4 pixels</li>
+     * <li>an even height</li>
+     * </ul>
+     * </p>
+     *
+     * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
+     * not pixels.
+     *
+     * <p>
+     * Since this is a densely packed format, the pixel stride is always 0. The
+     * application must use the pixel data layout defined in above table to
+     * access each row data. When row stride is equal to (width * (12 / 8)), there
+     * will be no padding bytes at the end of each row, the entire image data is
+     * densely packed. When stride is larger than (width * (12 / 8)), padding
+     * bytes will be present at the end of each row.
+     * </p>
+     * <p>
+     * For example, the {@link AImage} object can provide data in this format from a
+     * {@link ACameraDevice} (if supported) through a {@link AImageReader} object.
+     * The number of planes returned by {@link AImage_getNumberOfPlanes} will always be 1.
+     * The pixel stride is undefined ({@link AImage_getPlanePixelStride} will return
+     * {@link AMEDIA_ERROR_UNSUPPORTED}), and the {@link AImage_getPlaneRowStride} described the
+     * vertical neighboring pixel distance (in bytes) between adjacent rows.
+     * </p>
+     *
+     * @see AImage
+     * @see AImageReader
+     * @see ACameraDevice
+     */
     AIMAGE_FORMAT_RAW12             = 0x26,
+
+    /**
+     * Android dense depth image format.
+     *
+     * <p>Each pixel is 16 bits, representing a depth ranging measurement from a depth camera or
+     * similar sensor. The 16-bit sample consists of a confidence value and the actual ranging
+     * measurement.</p>
+     *
+     * <p>The confidence value is an estimate of correctness for this sample.  It is encoded in the
+     * 3 most significant bits of the sample, with a value of 0 representing 100% confidence, a
+     * value of 1 representing 0% confidence, a value of 2 representing 1/7, a value of 3
+     * representing 2/7, and so on.</p>
+     *
+     * <p>As an example, the following sample extracts the range and confidence from the first pixel
+     * of a DEPTH16-format {@link AImage}, and converts the confidence to a floating-point value
+     * between 0 and 1.f inclusive, with 1.f representing maximum confidence:
+     *
+     * <pre>
+     *    uint16_t* data;
+     *    int dataLength;
+     *    AImage_getPlaneData(image, 0, (uint8_t**)&data, &dataLength);
+     *    uint16_t depthSample = data[0];
+     *    uint16_t depthRange = (depthSample & 0x1FFF);
+     *    uint16_t depthConfidence = ((depthSample >> 13) & 0x7);
+     *    float depthPercentage = depthConfidence == 0 ? 1.f : (depthConfidence - 1) / 7.f;
+     * </pre>
+     * </p>
+     *
+     * <p>This format assumes
+     * <ul>
+     * <li>an even width</li>
+     * <li>an even height</li>
+     * <li>a horizontal stride multiple of 16 pixels</li>
+     * </ul>
+     * </p>
+     *
+     * <pre> y_size = stride * height </pre>
+     *
+     * When produced by a camera, the units for the range are millimeters.
+     */
     AIMAGE_FORMAT_DEPTH16           = 0x44363159,
+
+    /**
+     * Android sparse depth point cloud format.
+     *
+     * <p>A variable-length list of 3D points plus a confidence value, with each point represented
+     * by four floats; first the X, Y, Z position coordinates, and then the confidence value.</p>
+     *
+     * <p>The number of points is ((size of the buffer in bytes) / 16).
+     *
+     * <p>The coordinate system and units of the position values depend on the source of the point
+     * cloud data. The confidence value is between 0.f and 1.f, inclusive, with 0 representing 0%
+     * confidence and 1.f representing 100% confidence in the measured position values.</p>
+     *
+     * <p>As an example, the following code extracts the first depth point in a DEPTH_POINT_CLOUD
+     * format {@link AImage}:
+     * <pre>
+     *    float* data;
+     *    int dataLength;
+     *    AImage_getPlaneData(image, 0, (uint8_t**)&data, &dataLength);
+     *    float x = data[0];
+     *    float y = data[1];
+     *    float z = data[2];
+     *    float confidence = data[3];
+     * </pre>
+     *
+     */
     AIMAGE_FORMAT_DEPTH_POINT_CLOUD = 0x101,
-    AIMAGE_FORMAT_PRIVATE           = 0x22 ///> Not supported by AImageReader yet
+
+    /**
+     * Android private opaque image format.
+     *
+     * <p>This format is not currently supported by {@link AImageReader}.</p>
+     */
+    AIMAGE_FORMAT_PRIVATE           = 0x22
 };
 
+/**
+ * Data type describing an cropped rectangle returned by {@link AImage_getCropRect}.
+ *
+ * <p>Note that the right and bottom coordinates are exclusive, so the width of the rectangle is
+ * (right - left) and the height of the rectangle is (bottom - top).</p>
+ */
 typedef struct AImageCropRect {
     int32_t left;
     int32_t top;
@@ -64,40 +414,192 @@
     int32_t bottom;
 } AImageCropRect;
 
-// Return the image back to system and delete the AImage from memory
-// Do NOT use `image` after this call
+/**
+ * Return the image back the the system and delete the AImage object from memory.
+ *
+ * <p>Do NOT use the image pointer after this method returns.
+ * Note that if the parent {@link AImageReader} is closed, all the {@link AImage} objects acquired
+ * from the parent reader will be returned to system. All AImage_* methods except this method will
+ * return {@link AMEDIA_ERROR_INVALID_OBJECT}. Application still needs to call this method on those
+ * {@link AImage} objects to fully delete the {@link AImage} object from memory.</p>
+ *
+ * @param image The {@link AImage} to be deleted.
+ */
 void AImage_delete(AImage* image);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the width of the input {@link AImage}.
+ *
+ * @param image the {@link AImage} of interest.
+ * @param width the width of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or width is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getWidth(const AImage* image, /*out*/int32_t* width);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the height of the input {@link AImage}.
+ *
+ * @param image the {@link AImage} of interest.
+ * @param height the height of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or height is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getHeight(const AImage* image, /*out*/int32_t* height);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the format of the input {@link AImage}.
+ *
+ * <p>The format value will be one of AIMAGE_FORMAT_* enum value.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param format the format of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or format is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getFormat(const AImage* image, /*out*/int32_t* format);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the cropped rectangle of the input {@link AImage}.
+ *
+ * <p>The crop rectangle specifies the region of valid pixels in the image, using coordinates in the
+ * largest-resolution plane.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param rect the cropped rectangle of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or rect is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getCropRect(const AImage* image, /*out*/AImageCropRect* rect);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the timestamp of the input {@link AImage}.
+ *
+ * <p>
+ * The timestamp is measured in nanoseconds, and is normally monotonically increasing. The
+ * timestamps for the images from different sources may have different timebases therefore may not
+ * be comparable. The specific meaning and timebase of the timestamp depend on the source providing
+ * images. For images generated by camera, the timestamp value will match
+ * {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted} and
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
+ * </p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param timestampNs the timestamp of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or timestampNs is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getTimestamp(const AImage* image, /*out*/int64_t* timestampNs);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the number of planes of the input {@link AImage}.
+ *
+ * <p>The number of plane of an {@link AImage} is determined by its format, which can be queried by
+ * {@link AImage_getFormat} method.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param numPlanes the number of planes of the image will be filled here if the method call
+ *         succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or numPlanes is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getNumberOfPlanes(const AImage* image, /*out*/int32_t* numPlanes);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the pixel stride of the input {@link AImage}.
+ *
+ * <p>This is the distance between two consecutive pixel values in a row of pixels. It may be
+ * larger than the size of a single pixel to account for interleaved image data or padded formats.
+ * Note that pixel stride is undefined for some formats such as {@link AIMAGE_FORMAT_RAW_PRIVATE},
+ * and calling this method on images of these formats will cause {@link AMEDIA_ERROR_UNSUPPORTED}
+ * being returned.
+ * For formats where pixel stride is well defined, the pixel stride is always greater than 0.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
+ * @param pixelStride the pixel stride of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or pixelStride is NULL, or planeIdx
+ *                 is out of the range of [0, numOfPlanes - 1].</li>
+ *         <li>{@link AMEDIA_ERROR_UNSUPPORTED} if pixel stride is undefined for the format of input
+ *                 image.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getPlanePixelStride(
         const AImage* image, int planeIdx, /*out*/int32_t* pixelStride);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the row stride of the input {@link AImage}.
+ *
+ * <p>This is the distance between the start of two consecutive rows of pixels in the image. Note
+ * that row stried is undefined for some formats such as {@link AIMAGE_FORMAT_RAW_PRIVATE}, and
+ * calling this method on images of these formats will cause {@link AMEDIA_ERROR_UNSUPPORTED}
+ * being returned.
+ * For formats where row stride is well defined, the row stride is always greater than 0.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
+ * @param rowStride the row stride of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or rowStride is NULL, or planeIdx
+ *                 is out of the range of [0, numOfPlanes - 1].</li>
+ *         <li>{@link AMEDIA_ERROR_UNSUPPORTED} if row stride is undefined for the format of input
+ *                 image.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getPlaneRowStride(
         const AImage* image, int planeIdx, /*out*/int32_t* rowStride);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
-// Note that once the AImage or the parent AImageReader is deleted, the `*data` returned from
-// previous AImage_getPlaneData call becomes dangling pointer. Do NOT use it after
-// AImage or AImageReader is deleted
+/**
+ * Get the data pointer of the input image for direct application access.
+ *
+ * <p>Note that once the {@link AImage} or the parent {@link AImageReader} is deleted, the data
+ * pointer from previous AImage_getPlaneData call becomes invalid. Do NOT use it after the
+ * {@link AImage} or the parent {@link AImageReader} is deleted.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
+ * @param data the data pointer of the image will be filled here if the method call succeeeds.
+ * @param dataLength the valid length of data will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image, data or dataLength is NULL, or
+ *                 planeIdx is out of the range of [0, numOfPlanes - 1].</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getPlaneData(
         const AImage* image, int planeIdx,
         /*out*/uint8_t** data, /*out*/int* dataLength);
diff --git a/include/ndk/NdkImageReader.h b/include/ndk/NdkImageReader.h
index 9e7483d..7c7ec6a 100644
--- a/include/ndk/NdkImageReader.h
+++ b/include/ndk/NdkImageReader.h
@@ -44,38 +44,253 @@
 extern "C" {
 #endif
 
+/**
+ * AImage is an opaque type that allows direct application access to image data rendered into a
+ * {@link ANativeWindow}.
+ */
 typedef struct AImageReader AImageReader;
 
+/**
+ * Create a new reader for images of the desired size and format.
+ *
+ * <p>
+ * The maxImages parameter determines the maximum number of {@link AImage} objects that can be
+ * acquired from the {@link AImageReader} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary for the use case.
+ * </p>
+ * <p>
+ * The valid sizes and formats depend on the source of the image data.
+ * </p>
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param format The format of the Image that this reader will produce. This must be one of the
+ *            AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}. Note that not all
+ *            formats are supported, like {@link AIMAGE_FORMAT_PRIVATE}.
+ * @param maxImages The maximum number of images the user will want to access simultaneously. This
+ *            should be as small as possible to limit memory use. Once maxImages Images are obtained
+ *            by the user, one of them has to be released before a new {@link AImage} will become
+ *            available for access through {@link AImageReader_acquireLatestImage} or
+ *            {@link AImageReader_acquireNextImage}. Must be greater than 0.
+ * @param reader The created image reader will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ *                 height, format, maxImages arguments is not supported.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImage
+ */
 media_status_t AImageReader_new(
         int32_t width, int32_t height, int32_t format, int32_t maxImages,
         /*out*/AImageReader** reader);
 
-// Return all images acquired from this AImageReader back to system and delete
-// the AImageReader instance from memory
-// Do NOT use `reader` after this call
+/**
+ * Delete an {@link AImageReader} and return all images generated by this reader to system.
+ *
+ * <p>This method will return all {@link AImage} objects acquired by this reader (via
+ * {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}) to system,
+ * making any of data pointers obtained from {@link AImage_getPlaneData} invalid. Do NOT access
+ * the reader object or any of those data pointers after this method returns.</p>
+ *
+ * @param reader The image reader to be deleted.
+ */
 void AImageReader_delete(AImageReader* reader);
 
-// Do NOT call ANativeWindow_release on the output. Just use AImageReader_delete.
-media_status_t AImageReader_getWindow(AImageReader*, /*out*/ANativeWindow** window);
+/**
+ * Get a {@link ANativeWindow} that can be used to produce {@link AImage} for this image reader.
+ *
+ * @param reader The image reader of interest.
+ * @param window The output {@link ANativeWindow} will be filled here if the method call succeeds.
+ *                The {@link ANativeWindow} is managed by this image reader. Do NOT call
+ *                {@link ANativeWindow_release} on it. Instead, use {@link AImageReader_delete}.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or window is NULL.</li></ul>
+ */
+media_status_t AImageReader_getWindow(AImageReader* reader, /*out*/ANativeWindow** window);
 
+/**
+ * Query the default width of the {@link AImage} generated by this reader, in pixels.
+ *
+ * <p>The width may be overridden by the producer sending buffers to this reader's
+ * {@link ANativeWindow}. If so, the actual width of the images can be found using
+ * {@link AImage_getWidth}.</p>
+ *
+ * @param reader The image reader of interest.
+ * @param width the default width of the reader will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or width is NULL.</li></ul>
+ */
 media_status_t AImageReader_getWidth(const AImageReader* reader, /*out*/int32_t* width);
+
+/**
+ * Query the default height of the {@link AImage} generated by this reader, in pixels.
+ *
+ * <p>The height may be overridden by the producer sending buffers to this reader's
+ * {@link ANativeWindow}. If so, the actual height of the images can be found using
+ * {@link AImage_getHeight}.</p>
+ *
+ * @param reader The image reader of interest.
+ * @param height the default height of the reader will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or height is NULL.</li></ul>
+ */
 media_status_t AImageReader_getHeight(const AImageReader* reader, /*out*/int32_t* height);
+
+/**
+ * Query the format of the {@link AImage} generated by this reader.
+ *
+ * @param reader The image reader of interest.
+ * @param format the fromat of the reader will be filled here if the method call succeeeds. The
+ *                value will be one of the AIMAGE_FORMAT_* enum value defiend in {@link NdkImage.h}.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or format is NULL.</li></ul>
+ */
 media_status_t AImageReader_getFormat(const AImageReader* reader, /*out*/int32_t* format);
+
+/**
+ * Query the maximum number of concurrently acquired {@link AImage}s of this reader.
+ *
+ * @param reader The image reader of interest.
+ * @param maxImages the maximum number of concurrently acquired images of the reader will be filled
+ *                here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or maxImages is NULL.</li></ul>
+ */
 media_status_t AImageReader_getMaxImages(const AImageReader* reader, /*out*/int32_t* maxImages);
 
+/**
+ * Acquire the next {@link AImage} from the image reader's queue.
+ *
+ * <p>Warning: Consider using {@link AImageReader_acquireLatestImage} instead, as it will
+ * automatically release older images, and allow slower-running processing routines to catch
+ * up to the newest frame. Usage of {@link AImageReader_acquireNextImage} is recommended for
+ * batch/background processing. Incorrectly using this method can cause images to appear
+ * with an ever-increasing delay, followed by a complete stall where no new images seem to appear.
+ * </p>
+ *
+ * <p>
+ * This method will fail if {@link AImageReader_getMaxImages maxImages} have been acquired with
+ * {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}. In particular
+ * a sequence of {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}
+ * calls greater than {@link AImageReader_getMaxImages maxImages} without calling
+ * {@link AImage_delete} in-between will exhaust the underlying queue. At such a time,
+ * {@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} will be returned until more images are released with
+ * {@link AImage_delete}.
+ * </p>
+ *
+ * @param reader The image reader of interest.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or image is NULL.</li>
+ *         <li>{@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} if the number of concurrently acquired
+ *                 images has reached the limit.</li>
+ *         <li>{@link AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE} if there is no buffers currently
+ *                 available in the reader queue.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImageReader_acquireLatestImage
+ */
 media_status_t AImageReader_acquireNextImage(AImageReader* reader, /*out*/AImage** image);
 
+/**
+
+ * Acquire the latest {@link AImage} from the image reader's queue, dropping older images.
+ *
+ * <p>
+ * This operation will acquire all the images possible from the image reader, but
+ * {@link AImage_delete} all images that aren't the latest. This function is recommended to use over
+ * {@link AImageReader_acquireNextImage} for most use-cases, as it's more suited for real-time
+ * processing.
+ * </p>
+ * <p>
+ * Note that {@link AImageReader_getMaxImages maxImages} should be at least 2 for
+ * {@link AImageReader_acquireLatestImage} to be any different than
+ * {@link AImageReader_acquireNextImage} - discarding all-but-the-newest {@link AImage} requires
+ * temporarily acquiring two {@link AImage}s at once. Or more generally, calling
+ * {@link AImageReader_acquireLatestImage} with less than two images of margin, that is
+ * (maxImages - currentAcquiredImages < 2) will not discard as expected.
+ * </p>
+ * <p>
+ * This method will fail if {@link AImageReader_getMaxImages maxImages} have been acquired with
+ * {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}. In particular
+ * a sequence of {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}
+ * calls greater than {@link AImageReader_getMaxImages maxImages} without calling
+ * {@link AImage_delete} in-between will exhaust the underlying queue. At such a time,
+ * {@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} will be returned until more images are released with
+ * {@link AImage_delete}.
+ * </p>
+ *
+ * @param reader The image reader of interest.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or image is NULL.</li>
+ *         <li>{@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} if the number of concurrently acquired
+ *                 images has reached the limit.</li>
+ *         <li>{@link AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE} if there is no buffers currently
+ *                 available in the reader queue.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImageReader_acquireNextImage
+ */
 media_status_t AImageReader_acquireLatestImage(AImageReader* reader, /*out*/AImage** image);
 
-// The callback happens on one dedicated thread per AImageReader instance
-// It's okay to use AImageReader_*/AImage_* APIs within the callback
+
+/**
+ * The definition of {@link AImageReader} new image available callback.
+ *
+ * @param context The optional application context provided by user in
+ *                {@link AImageReader_setImageListener}.
+ * @param session The camera capture session whose state is changing.
+ */
 typedef void (*AImageReader_ImageCallback)(void* context, AImageReader* reader);
 
 typedef struct AImageReader_ImageListener {
-    void*                      context; // optional application context.
+    /// optional application context.
+    void*                      context;
+
+    /**
+     * This callback is called when there is a new image available for in the image reader's queue.
+     *
+     * <p>The callback happens on one dedicated thread per {@link AImageReader} instance. It is okay
+     * to use AImageReader_* and AImage_* methods within the callback. Note that it is possible that
+     * calling {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}
+     * returns {@link AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE} within this callback. For example, when
+     * there are multiple images and callbacks queued, if application called
+     * {@link AImageReader_acquireLatestImage}, some images will be returned to system before their
+     * corresponding callback is executed.</p>
+     */
     AImageReader_ImageCallback onImageAvailable;
 } AImageReader_ImageListener;
 
+/**
+ * Set the onImageAvailable listener of this image reader.
+ *
+ * <p>Note that calling this method will replace previously registered listeners.</p>
+ *
+ * @param reader The image reader of interest.
+ * @param listener the {@link AImageReader_ImageListener} to be registered. Set this to NULL if
+ *                 application no longer needs to listen to new images.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL.</li></ul>
+ */
 media_status_t AImageReader_setImageListener(
         AImageReader* reader, AImageReader_ImageListener* listener);
 
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 6586f41..2bdfd43 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -49,6 +49,7 @@
     IResourceManagerService.cpp \
     IStreamSource.cpp \
     MediaCodecInfo.cpp \
+    MediaUtils.cpp \
     Metadata.cpp \
     mediarecorder.cpp \
     IMediaMetadataRetriever.cpp \
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index d4360ea..c43ef66 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -31,7 +31,7 @@
 SortedVector< wp<IMediaDeathNotifier> > IMediaDeathNotifier::sObitRecipients;
 
 // establish binder interface to MediaPlayerService
-/*static*/const sp<IMediaPlayerService>&
+/*static*/const sp<IMediaPlayerService>
 IMediaDeathNotifier::getMediaPlayerService()
 {
     ALOGV("getMediaPlayerService");
diff --git a/media/libmedia/MediaUtils.cpp b/media/libmedia/MediaUtils.cpp
new file mode 100644
index 0000000..a02ca65
--- /dev/null
+++ b/media/libmedia/MediaUtils.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MediaUtils"
+#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <cutils/properties.h>
+#include <sys/resource.h>
+#include <unistd.h>
+
+#include "MediaUtils.h"
+
+namespace android {
+
+void limitProcessMemory(
+    const char *property,
+    size_t numberOfBytes,
+    size_t percentageOfTotalMem) {
+
+    long pageSize = sysconf(_SC_PAGESIZE);
+    long numPages = sysconf(_SC_PHYS_PAGES);
+    size_t maxMem = SIZE_MAX;
+
+    if (pageSize > 0 && numPages > 0) {
+        if (size_t(numPages) < SIZE_MAX / size_t(pageSize)) {
+            maxMem = size_t(numPages) * size_t(pageSize);
+        }
+        ALOGV("physMem: %zu", maxMem);
+        if (percentageOfTotalMem > 100) {
+            ALOGW("requested %zu%% of total memory, using 100%%", percentageOfTotalMem);
+            percentageOfTotalMem = 100;
+        }
+        maxMem = maxMem / 100 * percentageOfTotalMem;
+        if (numberOfBytes < maxMem) {
+            maxMem = numberOfBytes;
+        }
+        ALOGV("requested limit: %zu", maxMem);
+    } else {
+        ALOGW("couldn't determine total RAM");
+    }
+
+    int64_t propVal = property_get_int64(property, maxMem);
+    if (propVal > 0 && uint64_t(propVal) <= SIZE_MAX) {
+        maxMem = propVal;
+    }
+    ALOGV("actual limit: %zu", maxMem);
+
+    struct rlimit limit;
+    getrlimit(RLIMIT_AS, &limit);
+    ALOGV("original limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
+    limit.rlim_cur = maxMem;
+    setrlimit(RLIMIT_AS, &limit);
+    limit.rlim_cur = -1;
+    limit.rlim_max = -1;
+    getrlimit(RLIMIT_AS, &limit);
+    ALOGV("new limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
+
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaUtils.h b/media/libmedia/MediaUtils.h
new file mode 100644
index 0000000..f80dd30
--- /dev/null
+++ b/media/libmedia/MediaUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MEDIA_UTILS_H
+#define _MEDIA_UTILS_H
+
+namespace android {
+
+/**
+   Limit the amount of memory a process can allocate using setrlimit(RLIMIT_AS).
+   The value to use will be read from the specified system property, or if the
+   property doesn't exist it will use the specified number of bytes or the
+   specified percentage of total memory, whichever is smaller.
+*/
+void limitProcessMemory(
+    const char *property,
+    size_t numberOfBytes,
+    size_t percentageOfTotalMem);
+
+}   // namespace android
+
+#endif  // _MEDIA_UTILS_H
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 2795101..8725dfe 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -152,7 +152,7 @@
     ALOGV("setDataSource(%s)", url);
     status_t err = BAD_VALUE;
     if (url != NULL) {
-        const sp<IMediaPlayerService>& service(getMediaPlayerService());
+        const sp<IMediaPlayerService> service(getMediaPlayerService());
         if (service != 0) {
             sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
             if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
@@ -169,7 +169,7 @@
 {
     ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
     status_t err = UNKNOWN_ERROR;
-    const sp<IMediaPlayerService>& service(getMediaPlayerService());
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
         sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
@@ -185,7 +185,7 @@
 {
     ALOGV("setDataSource(IDataSource)");
     status_t err = UNKNOWN_ERROR;
-    const sp<IMediaPlayerService>& service(getMediaPlayerService());
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
         sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
@@ -396,11 +396,22 @@
     }
     Mutex::Autolock _l(mLock);
     if (mPlayer == 0) return INVALID_OPERATION;
+
+    if (rate.mSpeed != 0.f && !(mCurrentState & MEDIA_PLAYER_STARTED)
+            && (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED
+                    | MEDIA_PLAYER_PLAYBACK_COMPLETE))) {
+        mPlayer->setLooping(mLoop);
+        mPlayer->setVolume(mLeftVolume, mRightVolume);
+        mPlayer->setAuxEffectSendLevel(mSendLevel);
+    }
+
     status_t err = mPlayer->setPlaybackSettings(rate);
     if (err == OK) {
         if (rate.mSpeed == 0.f && mCurrentState == MEDIA_PLAYER_STARTED) {
             mCurrentState = MEDIA_PLAYER_PAUSED;
-        } else if (rate.mSpeed != 0.f && mCurrentState == MEDIA_PLAYER_PAUSED) {
+        } else if (rate.mSpeed != 0.f
+                && (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED
+                    | MEDIA_PLAYER_PLAYBACK_COMPLETE))) {
             mCurrentState = MEDIA_PLAYER_STARTED;
         }
     }
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index de3b214..59c077a 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -662,7 +662,7 @@
 {
     ALOGV("constructor");
 
-    const sp<IMediaPlayerService>& service(getMediaPlayerService());
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != NULL) {
         mMediaRecorder = service->createMediaRecorder(opPackageName);
     }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 9f63027..0b10ae4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -188,6 +188,7 @@
       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
       mVideoFpsHint(-1.f),
       mStarted(false),
+      mPrepared(false),
       mResetting(false),
       mSourceStarted(false),
       mPaused(false),
@@ -768,9 +769,17 @@
                     newRate.mSpeed = mPlaybackSettings.mSpeed;
                     mPlaybackSettings = newRate;
                 } else { /* rate.mSpeed != 0.f */
-                    onResume();
-                    mPausedByClient = false;
                     mPlaybackSettings = rate;
+                    if (mStarted) {
+                        // do not resume yet if the source is still buffering
+                        if (!mPausedForBuffering) {
+                            onResume();
+                        }
+                    } else if (mPrepared) {
+                        onStart();
+                    }
+
+                    mPausedByClient = false;
                 }
             }
 
@@ -1323,6 +1332,7 @@
     mAudioEOS = false;
     mVideoEOS = false;
     mStarted = true;
+    mPaused = false;
 
     uint32_t flags = 0;
 
@@ -2012,6 +2022,7 @@
     }
 
     mStarted = false;
+    mPrepared = false;
     mResetting = false;
     mSourceStarted = false;
 }
@@ -2124,6 +2135,8 @@
                         new FlushDecoderAction(FLUSH_CMD_SHUTDOWN /* audio */,
                                                FLUSH_CMD_SHUTDOWN /* video */));
                 processDeferredActions();
+            } else {
+                mPrepared = true;
             }
 
             sp<NuPlayerDriver> driver = mDriver.promote();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 7431532..ae17c76 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -198,6 +198,7 @@
     AVSyncSettings mSyncSettings;
     float mVideoFpsHint;
     bool mStarted;
+    bool mPrepared;
     bool mResetting;
     bool mSourceStarted;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 06bb53d..0f4dce9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -243,7 +243,10 @@
 status_t NuPlayerDriver::start() {
     ALOGD("start(%p), state is %d, eos is %d", this, mState, mAtEOS);
     Mutex::Autolock autoLock(mLock);
+    return start_l();
+}
 
+status_t NuPlayerDriver::start_l() {
     switch (mState) {
         case STATE_UNPREPARED:
         {
@@ -357,8 +360,11 @@
         if (rate.mSpeed == 0.f && mState == STATE_RUNNING) {
             mState = STATE_PAUSED;
             notifyListener_l(MEDIA_PAUSED);
-        } else if (rate.mSpeed != 0.f && mState == STATE_PAUSED) {
-            mState = STATE_RUNNING;
+        } else if (rate.mSpeed != 0.f
+                && (mState == STATE_PAUSED
+                    || mState == STATE_STOPPED_AND_PREPARED
+                    || mState == STATE_PREPARED)) {
+            err = start_l();
         }
     }
     return err;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index d5b4ba1..26d3a60 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -123,6 +123,7 @@
     bool mAutoLoop;
 
     status_t prepare_l();
+    status_t start_l();
     void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index bb4497b..cbc28e3 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1732,6 +1732,14 @@
         return INVALID_OPERATION;
     }
 
+    // propagate bitrate to the output so that the muxer has it
+    if (encoder && msg->findInt32("bitrate", &bitRate)) {
+        // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
+        // average bitrate. We've been setting both bitrate and max-bitrate to this same value.
+        outputFormat->setInt32("bitrate", bitRate);
+        outputFormat->setInt32("max-bitrate", bitRate);
+    }
+
     int32_t storeMeta;
     if (encoder
             && msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index cc5f7a0..24fb987 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3041,9 +3041,9 @@
     mOwner->writeInt16(0x03);  // XXX
     mOwner->writeInt8(0x00);   // buffer size 24-bit (0x300)
 
-    int32_t avgBitrate = 256000;
+    int32_t avgBitrate = 0;
     (void)mMeta->findInt32(kKeyBitRate, &avgBitrate);
-    int32_t maxBitrate = avgBitrate;
+    int32_t maxBitrate = 0;
     (void)mMeta->findInt32(kKeyMaxBitRate, &maxBitrate);
     mOwner->writeInt32(maxBitrate);
     mOwner->writeInt32(avgBitrate);
@@ -3084,9 +3084,9 @@
     };
     mOwner->write(kData, sizeof(kData));
 
-    int32_t avgBitrate = 256000;
+    int32_t avgBitrate = 0;
     (void)mMeta->findInt32(kKeyBitRate, &avgBitrate);
-    int32_t maxBitrate = avgBitrate;
+    int32_t maxBitrate = 0;
     (void)mMeta->findInt32(kKeyMaxBitRate, &maxBitrate);
     mOwner->writeInt32(maxBitrate);
     mOwner->writeInt32(avgBitrate);
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index f64bb3e..9940822 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -168,10 +168,6 @@
             break;
         }
 
-        if (img == NULL) {
-            ALOGE("error pushing blank frames: lock returned NULL buffer");
-            break;
-        }
         *img = 0;
 
         err = buf->unlock();
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index e88dfa8..ff76bc8 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -26,6 +26,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
 
 #include <math.h>
 
@@ -55,6 +56,14 @@
     params->nVersion.s.nStep = 0;
 }
 
+static const OMX_U32 kSupportedProfiles[] = {
+    OMX_AUDIO_AACObjectLC,
+    OMX_AUDIO_AACObjectHE,
+    OMX_AUDIO_AACObjectHE_PS,
+    OMX_AUDIO_AACObjectLD,
+    OMX_AUDIO_AACObjectELD,
+};
+
 SoftAAC2::SoftAAC2(
         const char *name,
         const OMX_CALLBACKTYPE *callbacks,
@@ -207,7 +216,7 @@
 
 OMX_ERRORTYPE SoftAAC2::internalGetParameter(
         OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
+    switch ((OMX_U32) index) {
         case OMX_IndexParamAudioAac:
         {
             OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
@@ -283,6 +292,29 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexParamAudioProfileQuerySupported:
+        {
+            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *profileParams =
+                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *)params;
+
+            if (!isValidOMXParam(profileParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (profileParams->nPortIndex != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
+                return OMX_ErrorNoMore;
+            }
+
+            profileParams->eProfile =
+                kSupportedProfiles[profileParams->nProfileIndex];
+
+            return OMX_ErrorNone;
+        }
+
         default:
             return SimpleSoftOMXComponent::internalGetParameter(index, params);
     }
@@ -616,12 +648,15 @@
                         signalError = true;
                     } else {
                         adtsHeaderSize = (protectionAbsent ? 7 : 9);
+                        if (aac_frame_length < adtsHeaderSize) {
+                            signalError = true;
+                        } else {
+                            inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
+                            inBufferLength[0] = aac_frame_length - adtsHeaderSize;
 
-                        inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
-                        inBufferLength[0] = aac_frame_length - adtsHeaderSize;
-
-                        inHeader->nOffset += adtsHeaderSize;
-                        inHeader->nFilledLen -= adtsHeaderSize;
+                            inHeader->nOffset += adtsHeaderSize;
+                            inHeader->nFilledLen -= adtsHeaderSize;
+                        }
                     }
                 }
 
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index 77a7b1e..026006e 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -80,7 +80,8 @@
         src/asm/ARMV7/Syn_filt_32_neon.s \
         src/asm/ARMV7/syn_filt_neon.s
 
-    LOCAL_CFLAGS_arm := -DARM -DARMV7 -DASM_OPT
+    # don't actually generate neon instructions, see bug 26932980
+    LOCAL_CFLAGS_arm := -DARM -DARMV7 -DASM_OPT -mfpu=vfpv3
     LOCAL_C_INCLUDES_arm := $(LOCAL_PATH)/src/asm/ARMV5E
     LOCAL_C_INCLUDES_arm += $(LOCAL_PATH)/src/asm/ARMV7
 endif
@@ -102,7 +103,7 @@
 
 LOCAL_CFLAGS += -Werror
 LOCAL_CLANG := true
-LOCAL_SANITIZE := signed-integer-overflow
+#LOCAL_SANITIZE := signed-integer-overflow
 
 include $(BUILD_STATIC_LIBRARY)
 
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index 973c528..61b9bfd 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -497,16 +497,6 @@
     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
 
-    /* If input EOS is seen and decoder is not in flush mode,
-     * set the decoder in flush mode.
-     * There can be a case where EOS is sent along with last picture data
-     * In that case, only after decoding that input data, decoder has to be
-     * put in flush. This case is handled here  */
-
-    if (mReceivedEOS && !mIsInFlush) {
-        setFlushMode();
-    }
-
     while (!outQueue.empty()) {
         BufferInfo *inInfo;
         OMX_BUFFERHEADERTYPE *inHeader;
@@ -674,7 +664,7 @@
                 outInfo = NULL;
                 notifyFillBufferDone(outHeader);
                 outHeader = NULL;
-            } else {
+            } else if (mIsInFlush) {
                 /* If in flush mode and no output is returned by the codec,
                  * then come out of flush mode */
                 mIsInFlush = false;
@@ -695,6 +685,16 @@
             }
         }
 
+        /* If input EOS is seen and decoder is not in flush mode,
+         * set the decoder in flush mode.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+
+        if (mReceivedEOS && !mIsInFlush) {
+            setFlushMode();
+        }
+
         if (inHeader != NULL) {
             inInfo->mOwnedByUs = false;
             inQueue.erase(inQueue.begin());
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index 54736f8..0215a11 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -444,6 +444,9 @@
 
     if (NULL == mCodecCtx) {
         if (OK != initDecoder()) {
+            ALOGE("Failed to initialize decoder");
+            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+            mSignalledError = true;
             return;
         }
     }
@@ -456,16 +459,6 @@
     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
 
-    /* If input EOS is seen and decoder is not in flush mode,
-     * set the decoder in flush mode.
-     * There can be a case where EOS is sent along with last picture data
-     * In that case, only after decoding that input data, decoder has to be
-     * put in flush. This case is handled here  */
-
-    if (mReceivedEOS && !mIsInFlush) {
-        setFlushMode();
-    }
-
     while (!outQueue.empty()) {
         BufferInfo *inInfo;
         OMX_BUFFERHEADERTYPE *inHeader;
@@ -540,6 +533,25 @@
             IV_API_CALL_STATUS_T status;
             status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
 
+            bool unsupportedResolution =
+                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+
+            /* Check for unsupported dimensions */
+            if (unsupportedResolution) {
+                ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
+                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+                mSignalledError = true;
+                return;
+            }
+
+            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
+            if (allocationFailed) {
+                ALOGE("Allocation failure in decoder");
+                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+                mSignalledError = true;
+                return;
+            }
+
             bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
 
             GETTIME(&mTimeEnd, NULL);
@@ -600,7 +612,7 @@
                 outInfo = NULL;
                 notifyFillBufferDone(outHeader);
                 outHeader = NULL;
-            } else {
+            } else if (mIsInFlush) {
                 /* If in flush mode and no output is returned by the codec,
                  * then come out of flush mode */
                 mIsInFlush = false;
@@ -621,6 +633,16 @@
             }
         }
 
+        /* If input EOS is seen and decoder is not in flush mode,
+         * set the decoder in flush mode.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+
+        if (mReceivedEOS && !mIsInFlush) {
+            setFlushMode();
+        }
+
         // TODO: Handle more than one picture data
         if (inHeader != NULL) {
             inInfo->mOwnedByUs = false;
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
index 27f860e..5210683 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -558,16 +558,6 @@
         setParams(mStride);
     }
 
-    /* If input EOS is seen and decoder is not in flush mode,
-     * set the decoder in flush mode.
-     * There can be a case where EOS is sent along with last picture data
-     * In that case, only after decoding that input data, decoder has to be
-     * put in flush. This case is handled here  */
-
-    if (mReceivedEOS && !mIsInFlush) {
-        setFlushMode();
-    }
-
     while (!outQueue.empty()) {
         BufferInfo *inInfo;
         OMX_BUFFERHEADERTYPE *inHeader;
@@ -751,7 +741,7 @@
                     notifyFillBufferDone(outHeader);
                     outHeader = NULL;
                 }
-            } else {
+            } else if (mIsInFlush) {
                 /* If in flush mode and no output is returned by the codec,
                  * then come out of flush mode */
                 mIsInFlush = false;
@@ -772,6 +762,16 @@
             }
         }
 
+        /* If input EOS is seen and decoder is not in flush mode,
+         * set the decoder in flush mode.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+
+        if (mReceivedEOS && !mIsInFlush) {
+            setFlushMode();
+        }
+
         // TODO: Handle more than one picture data
         if (inHeader != NULL) {
             inInfo->mOwnedByUs = false;
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
index 9517d0a..799bd16 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
@@ -60,6 +60,7 @@
 #include "h264bsd_util.h"
 #include "basetype.h"
 
+#include <log/log.h>
 /*------------------------------------------------------------------------------
     2. External compiler flags
 --------------------------------------------------------------------------------
@@ -998,6 +999,13 @@
     ASSERT(maxFrameNum);
     ASSERT(dpbSize);
 
+    // see comment in loop below about size calculation
+    if (picSizeInMbs > (UINT32_MAX - 32 - 15) / 384) {
+        ALOGE("b/28533562");
+        android_errorWriteLog(0x534e4554, "28533562");
+        return(MEMORY_ALLOCATION_ERROR);
+    }
+
     dpb->maxLongTermFrameIdx = NO_LONG_TERM_FRAME_INDICES;
     dpb->maxRefFrames        = MAX(maxRefFrames, 1);
     if (noReordering)
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 935f1dc..c04549a 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -701,6 +701,22 @@
         mLastSeqNumber = mFirstSeqNumber + mItems.size() - 1;
     }
 
+    for (size_t i = 0; i < mItems.size(); ++i) {
+        sp<AMessage> meta = mItems.itemAt(i).mMeta;
+        const char *keys[] = {"audio", "video", "subtitles"};
+        for (size_t j = 0; j < sizeof(keys) / sizeof(const char *); ++j) {
+            AString groupID;
+            if (meta->findString(keys[j], &groupID)) {
+                ssize_t groupIndex = mMediaGroups.indexOfKey(groupID);
+                if (groupIndex < 0) {
+                    ALOGE("Undefined media group '%s' referenced in stream info.",
+                          groupID.c_str());
+                    return ERROR_MALFORMED;
+                }
+            }
+        }
+    }
+
     return OK;
 }
 
@@ -873,15 +889,6 @@
             }
 
             const AString &groupID = unquoteString(val);
-            ssize_t groupIndex = mMediaGroups.indexOfKey(groupID);
-
-            if (groupIndex < 0) {
-                ALOGE("Undefined media group '%s' referenced in stream info.",
-                      groupID.c_str());
-
-                return ERROR_MALFORMED;
-            }
-
             key.tolower();
             if (meta->get() == NULL) {
                 *meta = new AMessage;
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6fa83fa..f9a9ab9 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -64,6 +64,10 @@
 
 static int64_t kPauseDelayUs = 3000000ll;
 
+// The allowed maximum number of stale access units at the beginning of
+// a new sequence.
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
 namespace android {
 
 static bool GetAttribute(const char *s, const char *key, AString *value) {
@@ -1048,16 +1052,39 @@
                     break;
                 }
 
+                if (track->mNewSegment) {
+                    // The sequence number from RTP packet has only 16 bits and is extended
+                    // by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
+                    // RTSP "PLAY" command should be used to detect the first RTP packet
+                    // after seeking.
+                    if (track->mAllowedStaleAccessUnits > 0) {
+                        if ((((seqNum ^ track->mFirstSeqNumInSegment) & 0xffff) != 0)) {
+                            // Not the first rtp packet of the stream after seeking, discarding.
+                            track->mAllowedStaleAccessUnits--;
+                            ALOGV("discarding stale access unit (0x%x : 0x%x)",
+                                 seqNum, track->mFirstSeqNumInSegment);
+                            break;
+                        }
+                    } else { // track->mAllowedStaleAccessUnits <= 0
+                        mNumAccessUnitsReceived = 0;
+                        ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
+                             "Still no first rtp packet after %d stale ones",
+                             kMaxAllowedStaleAccessUnits);
+                        track->mAllowedStaleAccessUnits = -1;
+                        break;
+                    }
+
+                    // Now found the first rtp packet of the stream after seeking.
+                    track->mFirstSeqNumInSegment = seqNum;
+                    track->mNewSegment = false;
+                }
+
                 if (seqNum < track->mFirstSeqNumInSegment) {
                     ALOGV("dropping stale access-unit (%d < %d)",
                          seqNum, track->mFirstSeqNumInSegment);
                     break;
                 }
 
-                if (track->mNewSegment) {
-                    track->mNewSegment = false;
-                }
-
                 onAccessUnitComplete(trackIndex, accessUnit);
                 break;
             }
@@ -1336,6 +1363,12 @@
                 mPausing = false;
                 mSeekPending = false;
 
+                // Discard all stale access units.
+                for (size_t i = 0; i < mTracks.size(); ++i) {
+                    TrackInfo *track = &mTracks.editItemAt(i);
+                    track->mPackets.clear();
+                }
+
                 sp<AMessage> msg = mNotify->dup();
                 msg->setInt32("what", kWhatSeekDone);
                 msg->post();
@@ -1497,6 +1530,7 @@
             TrackInfo *info = &mTracks.editItemAt(trackIndex);
             info->mFirstSeqNumInSegment = seq;
             info->mNewSegment = true;
+            info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
 
             CHECK(GetAttribute((*it).c_str(), "rtptime", &val));
 
@@ -1540,6 +1574,7 @@
         bool mUsingInterleavedTCP;
         uint32_t mFirstSeqNumInSegment;
         bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
 
         uint32_t mRTPAnchor;
         int64_t mNTPAnchorUs;
@@ -1623,6 +1658,7 @@
         info->mUsingInterleavedTCP = false;
         info->mFirstSeqNumInSegment = 0;
         info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
         info->mRTPSocket = -1;
         info->mRTCPSocket = -1;
         info->mRTPAnchor = 0;
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index a1ac253..e58964d 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -84,9 +84,13 @@
 sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {
     int32_t width, height;
     const char *mimeType;
-    CHECK(md->findInt32(kKeyWidth, &width));
-    CHECK(md->findInt32(kKeyHeight, &height));
-    CHECK(md->findCString(kKeyMIMEType, &mimeType));
+    if (!md->findInt32(kKeyWidth, &width)
+            || !md->findInt32(kKeyHeight, &height)
+            || !md->findCString(kKeyMIMEType, &mimeType)) {
+        ALOGE("Missing format keys for video track");
+        md->dumpToLog();
+        return NULL;
+    }
     const char *codec;
     if (!strncasecmp(
             mimeType,
@@ -99,7 +103,8 @@
             strlen(MEDIA_MIMETYPE_VIDEO_VP9))) {
         codec = "V_VP9";
     } else {
-        CHECK(!"Unsupported codec");
+        ALOGE("Unsupported codec: %s", mimeType);
+        return NULL;
     }
     return WebmElement::VideoTrackEntry(codec, width, height, md);
 }
@@ -114,10 +119,14 @@
     const void *headerData3;
     size_t headerSize1, headerSize2 = sizeof(headerData2), headerSize3;
 
-    CHECK(md->findInt32(kKeyChannelCount, &nChannels));
-    CHECK(md->findInt32(kKeySampleRate, &samplerate));
-    CHECK(md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1));
-    CHECK(md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3));
+    if (!md->findInt32(kKeyChannelCount, &nChannels)
+            || !md->findInt32(kKeySampleRate, &samplerate)
+            || !md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1)
+            || !md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3)) {
+        ALOGE("Missing format keys for audio track");
+        md->dumpToLog();
+        return NULL;
+    }
 
     size_t codecPrivateSize = 1;
     codecPrivateSize += XiphLaceCodeLen(headerSize1);
@@ -243,6 +252,11 @@
     mFd = -1;
     mInitCheck = NO_INIT;
     mStarted = false;
+    for (size_t ix = 0; ix < kMaxStreams; ++ix) {
+        mStreams[ix].mTrackEntry.clear();
+        mStreams[ix].mSource.clear();
+    }
+    mStreamsInOrder.clear();
 }
 
 status_t WebmWriter::reset() {
@@ -275,6 +289,8 @@
         if (durationUs < minDurationUs) {
             minDurationUs = durationUs;
         }
+
+        mStreams[i].mThread.clear();
     }
 
     if (numTracks() > 1) {
@@ -389,6 +405,11 @@
     // Go ahead to add the track.
     mStreams[streamIndex].mSource = source;
     mStreams[streamIndex].mTrackEntry = mStreams[streamIndex].mMakeTrack(source->getFormat());
+    if (mStreams[streamIndex].mTrackEntry == NULL) {
+        mStreams[streamIndex].mSource.clear();
+        return BAD_VALUE;
+    }
+    mStreamsInOrder.push_back(mStreams[streamIndex].mTrackEntry);
 
     return OK;
 }
@@ -429,7 +450,10 @@
             mTimeCodeScale = tcsl;
         }
     }
-    CHECK_GT(mTimeCodeScale, 0);
+    if (mTimeCodeScale == 0) {
+        ALOGE("movie time scale is 0");
+        return BAD_VALUE;
+    }
     ALOGV("movie time scale: %" PRIu64, mTimeCodeScale);
 
     /*
@@ -451,10 +475,8 @@
     info = WebmElement::SegmentInfo(mTimeCodeScale, 0);
 
     List<sp<WebmElement> > children;
-    for (size_t i = 0; i < kMaxStreams; ++i) {
-        if (mStreams[i].mTrackEntry != NULL) {
-            children.push_back(mStreams[i].mTrackEntry);
-        }
+    for (size_t i = 0; i < mStreamsInOrder.size(); ++i) {
+        children.push_back(mStreamsInOrder[i]);
     }
     tracks = new WebmMaster(kMkvTracks, children);
 
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
index 4a7f506..dd1fba3 100644
--- a/media/libstagefright/webm/WebmWriter.h
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -110,6 +110,7 @@
         }
     };
     WebmStream mStreams[kMaxStreams];
+    Vector<sp<WebmElement>> mStreamsInOrder;
 
     sp<WebmFrameSinkThread> mSinkThread;
 
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 387a302..30aa7fb 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -95,10 +95,11 @@
 AImageReader::FrameListener::setImageListener(AImageReader_ImageListener* listener) {
     Mutex::Autolock _l(mLock);
     if (listener == nullptr) {
-        ALOGE("AImageReader: listener is null!");
-        return AMEDIA_ERROR_INVALID_PARAMETER;
+        mListener.context = nullptr;
+        mListener.onImageAvailable = nullptr;
+    } else {
+        mListener = *listener;
     }
-    mListener = *listener;
     return AMEDIA_OK;
 }
 
@@ -575,8 +576,8 @@
 media_status_t AImageReader_setImageListener(
         AImageReader* reader, AImageReader_ImageListener* listener) {
     ALOGV("%s", __FUNCTION__);
-    if (reader == nullptr || listener == nullptr) {
-        ALOGE("%s: invalid argument! read %p listener %p", __FUNCTION__, reader, listener);
+    if (reader == nullptr) {
+        ALOGE("%s: invalid argument! reader %p", __FUNCTION__, reader);
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
index 9fd459a..a990879 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
@@ -242,6 +242,7 @@
 				loopback = 0
 				ip = 0
 				bus = 0
+				stub = 0
 
 	domain: DefaultAndMic
 		conf: A2dp
@@ -380,7 +381,7 @@
 				back_mic = 0
 				builtin_mic = 0
 
-	domain: VoiceRecognitionAndHotword
+	domain: VoiceRecognitionAndUnprocessedAndHotword
 		conf: ScoHeadset
 			ForceUseForRecord Is ForceBtSco
 			AvailableInputDevices Includes BluetoothScoHeadset
@@ -391,6 +392,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 1
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 1
 					wired_headset = 0
@@ -406,6 +412,11 @@
 					wired_headset = 1
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 1
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 1
@@ -421,6 +432,11 @@
 					wired_headset = 0
 					usb_device = 1
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 1
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
@@ -436,6 +452,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 1
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 1
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
@@ -449,6 +470,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index ae20887..1f01144 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -43,7 +43,8 @@
 Camera3BufferManager::~Camera3BufferManager() {
 }
 
-status_t Camera3BufferManager::registerStream(const StreamInfo& streamInfo) {
+status_t Camera3BufferManager::registerStream(wp<Camera3OutputStream>& stream,
+        const StreamInfo& streamInfo) {
     ATRACE_CALL();
 
     int streamId = streamInfo.streamId;
@@ -112,6 +113,8 @@
     }
     currentStreamSet.streamInfoMap.add(streamId, streamInfo);
     currentStreamSet.handoutBufferCountMap.add(streamId, 0);
+    currentStreamSet.attachedBufferCountMap.add(streamId, 0);
+    mStreamMap.add(streamId, stream);
 
     // The max allowed buffer count should be the max of buffer count of each stream inside a stream
     // set.
@@ -124,6 +127,7 @@
 
 status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId) {
     ATRACE_CALL();
+
     Mutex::Autolock l(mLock);
     ALOGV("%s: unregister stream %d with stream set %d", __FUNCTION__,
             streamId, streamSetId);
@@ -142,9 +146,11 @@
     StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetId);
     BufferList& freeBufs = currentSet.freeBuffers;
     BufferCountMap& handOutBufferCounts = currentSet.handoutBufferCountMap;
+    BufferCountMap& attachedBufferCounts = currentSet.attachedBufferCountMap;
     InfoMap& infoMap = currentSet.streamInfoMap;
     removeBuffersFromBufferListLocked(freeBufs, streamId);
     handOutBufferCounts.removeItem(streamId);
+    attachedBufferCounts.removeItem(streamId);
 
     // Remove the stream info from info map and recalculate the buffer count water mark.
     infoMap.removeItem(streamId);
@@ -154,6 +160,8 @@
             currentSet.maxAllowedBufferCount = infoMap[i].totalBufferCount;
         }
     }
+    mStreamMap.removeItem(streamId);
+
     // Lazy solution: when a stream is unregistered, the streams will be reconfigured, reset
     // the water mark and let it grow again.
     currentSet.allocatedBufferWaterMark = 0;
@@ -193,6 +201,16 @@
         return INVALID_OPERATION;
     }
 
+    BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
+    size_t& attachedBufferCount = attachedBufferCounts.editValueFor(streamId);
+    if (attachedBufferCount > bufferCount) {
+        // We've already attached more buffers to this stream than we currently have
+        // outstanding, so have the stream just use an already-attached buffer
+        bufferCount++;
+        return ALREADY_EXISTS;
+    }
+    ALOGV("Stream %d set %d: Get buffer for stream: Allocate new", streamId, streamSetId);
+
     GraphicBufferEntry buffer =
             getFirstBufferFromBufferListLocked(streamSet.freeBuffers, streamId);
 
@@ -215,8 +233,9 @@
             ALOGV("%s: allocation done", __FUNCTION__);
         }
 
-        // Increase the hand-out buffer count for tracking purpose.
+        // Increase the hand-out and attached buffer counts for tracking purposes.
         bufferCount++;
+        attachedBufferCount++;
         // Update the water mark to be the max hand-out buffer count + 1. An additional buffer is
         // added to reduce the chance of buffer allocation during stream steady state, especially
         // for cases where one stream is active, the other stream may request some buffers randomly.
@@ -235,12 +254,25 @@
         // buffers for them.
         StreamId firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
         if (streamSet.streamInfoMap.size() > 1) {
+            bool freeBufferIsAttached = false;
             for (size_t i = 0; i < streamSet.streamInfoMap.size(); i++) {
                 firstOtherStreamId = streamSet.streamInfoMap[i].streamId;
-                if (firstOtherStreamId != streamId &&
-                        hasBufferForStreamLocked(streamSet.freeBuffers, firstOtherStreamId)) {
-                    break;
+                if (firstOtherStreamId != streamId) {
+
+                    size_t otherBufferCount  =
+                            streamSet.handoutBufferCountMap.valueFor(firstOtherStreamId);
+                    size_t otherAttachedBufferCount =
+                            streamSet.attachedBufferCountMap.valueFor(firstOtherStreamId);
+                    if (otherAttachedBufferCount > otherBufferCount) {
+                        freeBufferIsAttached = true;
+                        break;
+                    }
+                    if (hasBufferForStreamLocked(streamSet.freeBuffers, firstOtherStreamId)) {
+                        freeBufferIsAttached = false;
+                        break;
+                    }
                 }
+                firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
             }
             if (firstOtherStreamId == CAMERA3_STREAM_ID_INVALID) {
                 return OK;
@@ -249,12 +281,39 @@
             // This will drop the reference to one free buffer, which will effectively free one
             // buffer (from the free buffer list) for the inactive streams.
             size_t totalAllocatedBufferCount = streamSet.freeBuffers.size();
-            for (size_t i = 0; i < streamSet.handoutBufferCountMap.size(); i++) {
-                totalAllocatedBufferCount += streamSet.handoutBufferCountMap[i];
+            for (size_t i = 0; i < streamSet.attachedBufferCountMap.size(); i++) {
+                totalAllocatedBufferCount += streamSet.attachedBufferCountMap[i];
             }
             if (totalAllocatedBufferCount > streamSet.allocatedBufferWaterMark) {
                 ALOGV("%s: free a buffer from stream %d", __FUNCTION__, firstOtherStreamId);
-                getFirstBufferFromBufferListLocked(streamSet.freeBuffers, firstOtherStreamId);
+                if (freeBufferIsAttached) {
+                    ALOGV("Stream %d: Freeing buffer: detach", firstOtherStreamId);
+                    sp<Camera3OutputStream> stream =
+                            mStreamMap.valueFor(firstOtherStreamId).promote();
+                    if (stream == nullptr) {
+                        ALOGE("%s: unable to promote stream %d to detach buffer", __FUNCTION__,
+                                firstOtherStreamId);
+                        return INVALID_OPERATION;
+                    }
+
+                    // Detach and then drop the buffer.
+                    //
+                    // Need to unlock because the stream may also be calling
+                    // into the buffer manager in parallel to signal buffer
+                    // release, or acquire a new buffer.
+                    {
+                        mLock.unlock();
+                        sp<GraphicBuffer> buffer;
+                        stream->detachBuffer(&buffer, /*fenceFd*/ nullptr);
+                        mLock.lock();
+                    }
+                    size_t& otherAttachedBufferCount =
+                            streamSet.attachedBufferCountMap.editValueFor(firstOtherStreamId);
+                    otherAttachedBufferCount--;
+                } else {
+                    // Droppable buffer is in the free buffer list, grab and drop
+                    getFirstBufferFromBufferListLocked(streamSet.freeBuffers, firstOtherStreamId);
+                }
             }
         }
     } else {
@@ -265,6 +324,37 @@
     return OK;
 }
 
+status_t Camera3BufferManager::onBufferReleased(int streamId, int streamSetId) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+
+    ALOGV("Stream %d set %d: Buffer released", streamId, streamSetId);
+    if (mAllocator == NULL) {
+        ALOGE("%s: allocator is NULL, buffer manager is bad state.", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+        ALOGV("%s: signaling buffer release for an already unregistered stream "
+                "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+        return OK;
+    }
+
+    if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
+        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+        BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
+        size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
+        bufferCount--;
+        ALOGV("%s: Stream %d set %d: Buffer count now %zu", __FUNCTION__, streamId, streamSetId,
+                bufferCount);
+    } else {
+        // TODO: implement gralloc V1 support
+        return BAD_VALUE;
+    }
+
+    return OK;
+}
+
 status_t Camera3BufferManager::returnBufferForStream(int streamId,
         int streamSetId, const sp<GraphicBuffer>& buffer, int fenceFd) {
     ATRACE_CALL();
@@ -295,10 +385,12 @@
             }
         }
 
-        // Update the hand-out buffer count for this buffer.
+        // Update the handed out and attached buffer count for this buffer.
         BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
         size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
         bufferCount--;
+        size_t& attachedBufferCount = streamSet.attachedBufferCountMap.editValueFor(streamId);
+        attachedBufferCount--;
     } else {
         // TODO: implement this.
         return BAD_VALUE;
@@ -329,6 +421,13 @@
             lines.appendFormat("            stream id: %d, buffer count: %zu.\n",
                     streamId, bufferCount);
         }
+        lines.appendFormat("          Attached buffer counts:\n");
+        for (size_t m = 0; m < mStreamSetMap[i].attachedBufferCountMap.size(); m++) {
+            int streamId = mStreamSetMap[i].attachedBufferCountMap.keyAt(m);
+            size_t bufferCount = mStreamSetMap[i].attachedBufferCountMap.valueAt(m);
+            lines.appendFormat("            stream id: %d, attached buffer count: %zu.\n",
+                    streamId, bufferCount);
+        }
 
         lines.appendFormat("          Free buffer count: %zu\n",
                 mStreamSetMap[i].freeBuffers.size());
@@ -394,9 +493,6 @@
         }
     }
 
-    ALOGW_IF(i == bufferList.end(), "%s: Unable to find buffers for stream %d",
-            __FUNCTION__, streamId);
-
     return OK;
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index 7942ae6..ab6541e 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -29,6 +29,7 @@
 namespace camera3 {
 
 struct StreamInfo;
+class Camera3OutputStream;
 
 /**
  * A class managing the graphic buffers that is used by camera output streams. It allocates and
@@ -81,7 +82,7 @@
      *                     and other streams that were already registered with the same stream set
      *                     ID.
      */
-    status_t registerStream(const StreamInfo &streamInfo);
+    status_t registerStream(wp<Camera3OutputStream>& stream, const StreamInfo &streamInfo);
 
     /**
      * This method unregisters a stream from this buffer manager.
@@ -114,6 +115,8 @@
      * Return values:
      *
      *  OK:        Getting buffer for this stream was successful.
+     *  ALREADY_EXISTS: Enough free buffers are already attached to this output buffer queue,
+     *             user should just dequeue from the buffer queue.
      *  BAD_VALUE: stream ID or streamSetId are invalid, or stream ID and stream set ID
      *             combination doesn't match what was registered, or this stream wasn't registered
      *             to this buffer manager before.
@@ -122,6 +125,28 @@
     status_t getBufferForStream(int streamId, int streamSetId, sp<GraphicBuffer>* gb, int* fenceFd);
 
     /**
+     * This method notifies the manager that a buffer has been released by the consumer.
+     *
+     * The buffer is not returned to the buffer manager, but is available for the stream the buffer
+     * is attached to for dequeuing.
+     *
+     * The notification lets the manager know how many buffers are directly available to the stream.
+     *
+     * If onBufferReleased is called for a given released buffer,
+     * returnBufferForStream may not be called for the same buffer, until the
+     * buffer has been reused. The manager will call detachBuffer on the stream
+     * if it needs the released buffer otherwise.
+     *
+     * Return values:
+     *
+     *  OK:        Buffer release was processed succesfully
+     *  BAD_VALUE: stream ID or streamSetId are invalid, or stream ID and stream set ID
+     *             combination doesn't match what was registered, or this stream wasn't registered
+     *             to this buffer manager before.
+     */
+    status_t onBufferReleased(int streamId, int streamSetId);
+
+    /**
      * This method returns a buffer for a stream to this buffer manager.
      *
      * When a buffer is returned, it is treated as a free buffer and may either be reused for future
@@ -245,6 +270,12 @@
          * The count of the buffers that were handed out to the streams of this set.
          */
         BufferCountMap handoutBufferCountMap;
+        /**
+         * The count of the buffers that are attached to the streams of this set.
+         * An attached buffer may be free or handed out
+         */
+        BufferCountMap attachedBufferCountMap;
+
         StreamSet() {
             allocatedBufferWaterMark = 0;
             maxAllowedBufferCount = 0;
@@ -256,6 +287,7 @@
      */
     typedef int StreamSetId;
     KeyedVector<StreamSetId, StreamSet> mStreamSetMap;
+    KeyedVector<StreamId, wp<Camera3OutputStream>> mStreamMap;
 
     // TODO: There is no easy way to query the Gralloc version in this code yet, we have different
     // code paths for different Gralloc versions, hardcode something here for now.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c0de95a..96f9338 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -794,19 +794,12 @@
 
     if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
         res = configureStreamsLocked();
-        // Stream configuration failed due to unsupported configuration.
-        // Device back to unconfigured state. Client might try other configuraitons
-        if (res == BAD_VALUE && mStatus == STATUS_UNCONFIGURED) {
-            CLOGE("No streams configured");
-            return NULL;
-        }
-        // Stream configuration failed for other reason. Fatal.
+        // Stream configuration failed. Client might try other configuraitons.
         if (res != OK) {
-            SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res);
+            CLOGE("Can't set up streams: %s (%d)", strerror(-res), res);
             return NULL;
-        }
-        // Stream configuration successfully configure to empty stream configuration.
-        if (mStatus == STATUS_UNCONFIGURED) {
+        } else if (mStatus == STATUS_UNCONFIGURED) {
+            // Stream configuration successfully configure to empty stream configuration.
             CLOGE("No streams configured");
             return NULL;
         }
@@ -1823,6 +1816,33 @@
     return false;
 }
 
+void Camera3Device::cancelStreamsConfigurationLocked() {
+    int res = OK;
+    if (mInputStream != NULL && mInputStream->isConfiguring()) {
+        res = mInputStream->cancelConfiguration();
+        if (res != OK) {
+            CLOGE("Can't cancel configuring input stream %d: %s (%d)",
+                    mInputStream->getId(), strerror(-res), res);
+        }
+    }
+
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.editValueAt(i);
+        if (outputStream->isConfiguring()) {
+            res = outputStream->cancelConfiguration();
+            if (res != OK) {
+                CLOGE("Can't cancel configuring output stream %d: %s (%d)",
+                        outputStream->getId(), strerror(-res), res);
+            }
+        }
+    }
+
+    // Return state to that at start of call, so that future configures
+    // properly clean things up
+    internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+    mNeedConfig = true;
+}
+
 status_t Camera3Device::configureStreamsLocked() {
     ATRACE_CALL();
     status_t res;
@@ -1862,7 +1882,8 @@
         camera3_stream_t *inputStream;
         inputStream = mInputStream->startConfiguration();
         if (inputStream == NULL) {
-            SET_ERR_L("Can't start input stream configuration");
+            CLOGE("Can't start input stream configuration");
+            cancelStreamsConfigurationLocked();
             return INVALID_OPERATION;
         }
         streams.add(inputStream);
@@ -1881,7 +1902,8 @@
         camera3_stream_t *outputStream;
         outputStream = mOutputStreams.editValueAt(i)->startConfiguration();
         if (outputStream == NULL) {
-            SET_ERR_L("Can't start output stream configuration");
+            CLOGE("Can't start output stream configuration");
+            cancelStreamsConfigurationLocked();
             return INVALID_OPERATION;
         }
         streams.add(outputStream);
@@ -1898,35 +1920,8 @@
     if (res == BAD_VALUE) {
         // HAL rejected this set of streams as unsupported, clean up config
         // attempt and return to unconfigured state
-        if (mInputStream != NULL && mInputStream->isConfiguring()) {
-            res = mInputStream->cancelConfiguration();
-            if (res != OK) {
-                SET_ERR_L("Can't cancel configuring input stream %d: %s (%d)",
-                        mInputStream->getId(), strerror(-res), res);
-                return res;
-            }
-        }
-
-        for (size_t i = 0; i < mOutputStreams.size(); i++) {
-            sp<Camera3OutputStreamInterface> outputStream =
-                    mOutputStreams.editValueAt(i);
-            if (outputStream->isConfiguring()) {
-                res = outputStream->cancelConfiguration();
-                if (res != OK) {
-                    SET_ERR_L(
-                        "Can't cancel configuring output stream %d: %s (%d)",
-                        outputStream->getId(), strerror(-res), res);
-                    return res;
-                }
-            }
-        }
-
-        // Return state to that at start of call, so that future configures
-        // properly clean things up
-        internalUpdateStatusLocked(STATUS_UNCONFIGURED);
-        mNeedConfig = true;
-
-        ALOGV("%s: Camera %d: Stream configuration failed", __FUNCTION__, mId);
+        CLOGE("Set of requested inputs/outputs not supported by HAL");
+        cancelStreamsConfigurationLocked();
         return BAD_VALUE;
     } else if (res != OK) {
         // Some other kind of error from configure_streams - this is not
@@ -1943,9 +1938,10 @@
     if (mInputStream != NULL && mInputStream->isConfiguring()) {
         res = mInputStream->finishConfiguration(mHal3Device);
         if (res != OK) {
-            SET_ERR_L("Can't finish configuring input stream %d: %s (%d)",
+            CLOGE("Can't finish configuring input stream %d: %s (%d)",
                     mInputStream->getId(), strerror(-res), res);
-            return res;
+            cancelStreamsConfigurationLocked();
+            return BAD_VALUE;
         }
     }
 
@@ -1955,16 +1951,17 @@
         if (outputStream->isConfiguring()) {
             res = outputStream->finishConfiguration(mHal3Device);
             if (res != OK) {
-                SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
+                CLOGE("Can't finish configuring output stream %d: %s (%d)",
                         outputStream->getId(), strerror(-res), res);
-                return res;
+                cancelStreamsConfigurationLocked();
+                return BAD_VALUE;
             }
         }
     }
 
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
-    mRequestThread->configurationComplete();
+    mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration);
 
     // Boost priority of request thread for high speed recording to SCHED_FIFO
     if (mIsConstrainedHighSpeedConfiguration) {
@@ -2673,6 +2670,7 @@
         mParent(parent),
         mStatusTracker(statusTracker),
         mHal3Device(hal3Device),
+        mListener(nullptr),
         mId(getId(parent)),
         mReconfigured(false),
         mDoPause(false),
@@ -2683,7 +2681,8 @@
         mCurrentPreCaptureTriggerId(0),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
-        mAeLockAvailable(aeLockAvailable) {
+        mAeLockAvailable(aeLockAvailable),
+        mPrepareVideoStream(false) {
     mStatusId = statusTracker->addComponent();
 }
 
@@ -2693,9 +2692,11 @@
     mListener = listener;
 }
 
-void Camera3Device::RequestThread::configurationComplete() {
+void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
+    // Prepare video stream for high speed recording.
+    mPrepareVideoStream = isConstrainedHighSpeed;
 }
 
 status_t Camera3Device::RequestThread::queueRequestList(
@@ -3197,8 +3198,25 @@
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
         for (size_t i = 0; i < captureRequest->mOutputStreams.size(); i++) {
-            res = captureRequest->mOutputStreams.editItemAt(i)->
-                    getBuffer(&outputBuffers->editItemAt(i));
+            sp<Camera3OutputStreamInterface> outputStream = captureRequest->mOutputStreams.editItemAt(i);
+
+            // Prepare video buffers for high speed recording on the first video request.
+            if (mPrepareVideoStream && outputStream->isVideoStream()) {
+                // Only try to prepare video stream on the first video request.
+                mPrepareVideoStream = false;
+
+                res = outputStream->startPrepare(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX);
+                while (res == NOT_ENOUGH_DATA) {
+                    res = outputStream->prepareNextBuffer();
+                }
+                if (res != OK) {
+                    ALOGW("%s: Preparing video buffers for high speed failed: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                    outputStream->cancelPrepare();
+                }
+            }
+
+            res = outputStream->getBuffer(&outputBuffers->editItemAt(i));
             if (res != OK) {
                 // Can't get output buffer from gralloc queue - this could be due to
                 // abandoned queue or other consumer misbehavior, so not a fatal
@@ -3710,7 +3728,8 @@
  */
 
 Camera3Device::PreparerThread::PreparerThread() :
-        Thread(/*canCallJava*/false), mActive(false), mCancelNow(false) {
+        Thread(/*canCallJava*/false), mListener(nullptr),
+        mActive(false), mCancelNow(false) {
 }
 
 Camera3Device::PreparerThread::~PreparerThread() {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 0366ef6..2aca57d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -367,6 +367,11 @@
     status_t           configureStreamsLocked();
 
     /**
+     * Cancel stream configuration that did not finish successfully.
+     */
+    void               cancelStreamsConfigurationLocked();
+
+    /**
      * Add a dummy stream to the current stream set as a workaround for
      * not allowing 0 streams in the camera HAL spec.
      */
@@ -450,7 +455,7 @@
         /**
          * Call after stream (re)-configuration is completed.
          */
-        void     configurationComplete();
+        void     configurationComplete(bool isConstrainedHighSpeed);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -638,6 +643,9 @@
 
         // Whether the device supports AE lock
         bool               mAeLockAvailable;
+
+        // Flag indicating if we should prepare video stream for video requests.
+        bool               mPrepareVideoStream;
     };
     sp<RequestThread> mRequestThread;
 
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 5bf76bd..6354ef7 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -76,6 +76,13 @@
     return OK;
 }
 
+status_t Camera3DummyStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
+    (void) buffer;
+    (void) fenceFd;
+    // Do nothing
+    return OK;
+}
+
 status_t Camera3DummyStream::configureQueueLocked() {
     // Do nothing
     return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 97c0c96..7b48daa 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -54,6 +54,8 @@
 
     status_t         setTransform(int transform);
 
+    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+
     /**
      * Return if this output stream is for video encoding.
      */
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index d2b98e6..d09951a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -120,24 +120,35 @@
 
     ANativeWindowBuffer* anb;
     int fenceFd = -1;
+    bool gotBufferFromManager = false;
+
     if (mUseBufferManager) {
         sp<GraphicBuffer> gb;
         res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, &fenceFd);
-        if (res != OK) {
+        if (res == OK) {
+            // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
+            // successful return.
+            anb = gb.get();
+            res = mConsumer->attachBuffer(anb);
+            if (res != OK) {
+                ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
+                        __FUNCTION__, mId, strerror(-res), res);
+                return res;
+            }
+            gotBufferFromManager = true;
+            ALOGV("Stream %d: Attached new buffer", getId());
+        } else if (res == ALREADY_EXISTS) {
+            // Have sufficient free buffers already attached, can just
+            // dequeue from buffer queue
+            ALOGV("Stream %d: Reusing attached buffer", getId());
+            gotBufferFromManager = false;
+        } else if (res != OK) {
             ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
                     __FUNCTION__, mId, strerror(-res), res);
             return res;
         }
-        // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
-        // successful return.
-        anb = gb.get();
-        res = mConsumer->attachBuffer(anb);
-        if (res != OK) {
-            ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
-                    __FUNCTION__, mId, strerror(-res), res);
-            return res;
-        }
-    } else {
+    }
+    if (!gotBufferFromManager) {
         /**
          * Release the lock briefly to avoid deadlock for below scenario:
          * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
@@ -433,10 +444,15 @@
      * HAL3.2 devices may not support the dynamic buffer registeration.
      */
     if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID) {
+        uint32_t consumerUsage = 0;
+        getEndpointUsage(&consumerUsage);
         StreamInfo streamInfo(
                 getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
-                camera3_stream::usage, mTotalBufferCount, /*isConfigured*/true);
-        res = mBufferManager->registerStream(streamInfo);
+                camera3_stream::usage | consumerUsage, mTotalBufferCount,
+                /*isConfigured*/true);
+        wp<Camera3OutputStream> weakThis(this);
+        res = mBufferManager->registerStream(weakThis,
+                streamInfo);
         if (res == OK) {
             // Disable buffer allocation for this BufferQueue, buffer manager will take over
             // the buffer allocation responsibility.
@@ -561,34 +577,49 @@
         return;
     }
 
+    ALOGV("Stream %d: Buffer released", stream->getId());
+    status_t res = stream->mBufferManager->onBufferReleased(
+        stream->getId(), stream->getStreamSetId());
+    if (res != OK) {
+        ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
+                strerror(-res), res);
+        stream->mState = STATE_ERROR;
+    }
+}
+
+status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
+    Mutex::Autolock l(mLock);
+
+    ALOGV("Stream %d: detachBuffer", getId());
+    if (buffer == nullptr) {
+        return BAD_VALUE;
+    }
+
     sp<Fence> fence;
-    sp<GraphicBuffer> buffer;
-    int fenceFd = -1;
-    status_t res = stream->mConsumer->detachNextBuffer(&buffer, &fence);
+    status_t res = mConsumer->detachNextBuffer(buffer, &fence);
     if (res == NO_MEMORY) {
         // This may rarely happen, which indicates that the released buffer was freed by other
         // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
         // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
         // therefore log a warning.
-        buffer = 0;
+        *buffer = 0;
         ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
     } else if (res != OK) {
         // Other errors are fatal.
         ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
-        stream->mState = STATE_ERROR;
-        return;
+        mState = STATE_ERROR;
+        return res;
     }
 
-    if (fence!= 0 && fence->isValid()) {
-        fenceFd = fence->dup();
+    if (fenceFd != nullptr) {
+        if (fence!= 0 && fence->isValid()) {
+            *fenceFd = fence->dup();
+        } else {
+            *fenceFd = -1;
+        }
     }
-    res = stream->mBufferManager->returnBufferForStream(stream->getId(), stream->getStreamSetId(),
-                buffer, fenceFd);
-    if (res != OK) {
-        ALOGE("%s: return buffer to buffer manager failed: %s (%d).", __FUNCTION__,
-                strerror(-res), res);
-       stream->mState = STATE_ERROR;
-    }
+
+    return OK;
 }
 
 bool Camera3OutputStream::isConsumedByHWComposer() const {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index a883448..7d28b05 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -131,6 +131,8 @@
           wp<Camera3OutputStream> mParent;
     };
 
+    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+
     /**
      * Set the graphic buffer manager to get/return the stream buffers.
      *
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index df89b34..50dce55 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -39,6 +39,17 @@
      * Return if this output stream is for video encoding.
      */
     virtual bool isVideoStream() const = 0;
+
+    /**
+     * Detach an unused buffer from the stream.
+     *
+     * buffer must be non-null; fenceFd may null, and if it is non-null, but
+     * there is no valid fence associated with the detached buffer, it will be
+     * set to -1.
+     *
+     */
+    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) = 0;
+
 };
 
 } // namespace camera3
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
index bc2b641..a9a2d3c 100644
--- a/services/mediaextractor/Android.mk
+++ b/services/mediaextractor/Android.mk
@@ -19,6 +19,7 @@
 LOCAL_MODULE:= mediaextractor
 LOCAL_32_BIT_ONLY := true
 LOCAL_INIT_RC := mediaextractor.rc
+LOCAL_C_INCLUDES := frameworks/av/media/libmedia
 include $(BUILD_EXECUTABLE)
 
 include $(call all-makefiles-under, $(LOCAL_PATH))
diff --git a/services/mediaextractor/main_extractorservice.cpp b/services/mediaextractor/main_extractorservice.cpp
index a7f3fbe..245489e 100644
--- a/services/mediaextractor/main_extractorservice.cpp
+++ b/services/mediaextractor/main_extractorservice.cpp
@@ -29,12 +29,18 @@
 // from LOCAL_C_INCLUDES
 #include "IcuUtils.h"
 #include "MediaExtractorService.h"
+#include "MediaUtils.h"
 #include "minijail/minijail.h"
 
 using namespace android;
 
 int main(int argc __unused, char** argv)
 {
+    limitProcessMemory(
+        "ro.media.maxmem", /* property that defines limit */
+        SIZE_MAX, /* upper limit in bytes */
+        20 /* upper limit as percentage of physical RAM */);
+
     signal(SIGPIPE, SIG_IGN);
     MiniJail();