Merge "Revert "Handle lock() returning a NULL buffer in SurfaceUtils."" into nyc-dev
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index ab651a1..8d050c4 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -79,9 +79,15 @@
 
         Mutex::Autolock _l(mLock);
 
-        // Here we have to use reinterpret_cast because the NDK data type is
-        // exact copy of internal data type but they do not inherit from each other
-        status_t ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+        status_t ret = OK;
+        if (count == 0 && data == nullptr) {
+            ret = mData.erase(tag);
+        } else {
+            // Here we have to use reinterpret_cast because the NDK data type is
+            // exact copy of internal data type but they do not inherit from each other
+            ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+        }
+
         if (ret == OK) {
             mTags.clear();
             return ACAMERA_OK;
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index 36fa3b5..0a3bdf3 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -43,6 +43,7 @@
     fprintf(stderr, "       -h help\n");
     fprintf(stderr, "       -a use audio\n");
     fprintf(stderr, "       -v use video\n");
+    fprintf(stderr, "       -w mux into WebM container (default is MP4)\n");
     fprintf(stderr, "       -s Time in milli-seconds when the trim should start\n");
     fprintf(stderr, "       -e Time in milli-seconds when the trim should end\n");
     fprintf(stderr, "       -o output file name. Default is /sdcard/muxeroutput.mp4\n");
@@ -60,7 +61,8 @@
         bool enableTrim,
         int trimStartTimeMs,
         int trimEndTimeMs,
-        int rotationDegrees) {
+        int rotationDegrees,
+        MediaMuxer::OutputFormat container = MediaMuxer::OUTPUT_FORMAT_MPEG_4) {
     sp<NuMediaExtractor> extractor = new NuMediaExtractor;
     if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
         fprintf(stderr, "unable to instantiate extractor. %s\n", path);
@@ -80,8 +82,7 @@
         ALOGE("couldn't open file");
         return fd;
     }
-    sp<MediaMuxer> muxer = new MediaMuxer(fd,
-                                          MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+    sp<MediaMuxer> muxer = new MediaMuxer(fd, container);
     close(fd);
 
     size_t trackCount = extractor->countTracks();
@@ -237,9 +238,10 @@
     // When trimStartTimeMs and trimEndTimeMs seems valid, we turn this switch
     // to true.
     bool enableTrim = false;
+    MediaMuxer::OutputFormat container = MediaMuxer::OUTPUT_FORMAT_MPEG_4;
 
     int res;
-    while ((res = getopt(argc, argv, "h?avo:s:e:r:")) >= 0) {
+    while ((res = getopt(argc, argv, "h?avo:s:e:r:w")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -253,6 +255,12 @@
                 break;
             }
 
+            case 'w':
+            {
+                container = MediaMuxer::OUTPUT_FORMAT_WEBM;
+                break;
+            }
+
             case 'o':
             {
                 outputFileName = optarg;
@@ -318,7 +326,7 @@
     looper->start();
 
     int result = muxing(argv[0], useAudio, useVideo, outputFileName,
-                        enableTrim, trimStartTimeMs, trimEndTimeMs, rotationDegrees);
+                        enableTrim, trimStartTimeMs, trimEndTimeMs, rotationDegrees, container);
 
     looper->stop();
 
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index f8b2f68..9aa0156 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -215,7 +215,7 @@
     enc_meta->setInt32("width", width);
     enc_meta->setInt32("height", height);
     enc_meta->setInt32("sample-rate", kFramerate);
-    enc_meta->setInt32("bit-rate", kVideoBitRate);
+    enc_meta->setInt32("bitrate", kVideoBitRate);
     // enc_meta->setInt32("stride", width);
     // enc_meta->setInt32("slice-height", height);
     enc_meta->setInt32("i-frame-interval", kIFramesIntervalSec);
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index 814ffcc..dad599b 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -337,7 +337,7 @@
     return mDrmManager->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
 }
 
-status_t DrmManagerService::dump(int fd, const Vector<String16>& /* args */)
+status_t DrmManagerService::dump(int fd, const Vector<String16>& args)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -357,8 +357,12 @@
             }
         }
         if (dumpMem) {
-            dumpMemoryAddresses(fd);
+            result.append("\nDumping memory:\n");
+            std::string s = dumpMemoryAddresses(100 /* limit */);
+            result.append(s.c_str(), s.size());
         }
+#else
+        (void)args;
 #endif
     }
     write(fd, result.string(), result.size());
diff --git a/include/camera/ndk/NdkCameraCaptureSession.h b/include/camera/ndk/NdkCameraCaptureSession.h
index 68eff7a..7b314e9 100644
--- a/include/camera/ndk/NdkCameraCaptureSession.h
+++ b/include/camera/ndk/NdkCameraCaptureSession.h
@@ -177,7 +177,8 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param result The capture result metadata reported by camera device
+ * @param result The capture result metadata reported by camera device. The memory is managed by
+ *                camera framework. Do not access this pointer after this callback returns.
  */
 typedef void (*ACameraCaptureSession_captureCallback_result)(
         void* context, ACameraCaptureSession* session,
@@ -193,7 +194,9 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param failure The {@link ACameraCaptureFailure} desribes the capture failure.
+ * @param failure The {@link ACameraCaptureFailure} desribes the capture failure. The memory is
+ *                managed by camera framework. Do not access this pointer after this callback
+ *                returns.
  */
 typedef void (*ACameraCaptureSession_captureCallback_failed)(
         void* context, ACameraCaptureSession* session,
diff --git a/include/camera/ndk/NdkCameraMetadata.h b/include/camera/ndk/NdkCameraMetadata.h
index 8a8865d..d929854 100644
--- a/include/camera/ndk/NdkCameraMetadata.h
+++ b/include/camera/ndk/NdkCameraMetadata.h
@@ -43,35 +43,78 @@
 extern "C" {
 #endif
 
+/**
+ * ACameraMetadata is opaque type that provides access to read-only camera metadata like camera
+ * characteristics (via {@link ACameraManager_getCameraCharacteristics}) or capture results (via
+ * {@link ACameraCaptureSession_captureCallback_result}).
+ */
 typedef struct ACameraMetadata ACameraMetadata;
 
-// Keep in sync with system/media/include/system/camera_metadata.h
+/**
+ * Possible data types of a metadata entry.
+ *
+ * Keep in sync with system/media/include/system/camera_metadata.h
+ */
 enum {
-    // Unsigned 8-bit integer (uint8_t)
+    /// Unsigned 8-bit integer (uint8_t)
     ACAMERA_TYPE_BYTE = 0,
-    // Signed 32-bit integer (int32_t)
+    /// Signed 32-bit integer (int32_t)
     ACAMERA_TYPE_INT32 = 1,
-    // 32-bit float (float)
+    /// 32-bit float (float)
     ACAMERA_TYPE_FLOAT = 2,
-    // Signed 64-bit integer (int64_t)
+    /// Signed 64-bit integer (int64_t)
     ACAMERA_TYPE_INT64 = 3,
-    // 64-bit float (double)
+    /// 64-bit float (double)
     ACAMERA_TYPE_DOUBLE = 4,
-    // A 64-bit fraction (ACameraMetadata_rational)
+    /// A 64-bit fraction (ACameraMetadata_rational)
     ACAMERA_TYPE_RATIONAL = 5,
-    // Number of type fields
+    /// Number of type fields
     ACAMERA_NUM_TYPES
 };
 
+/**
+ * Definition of rational data type in {@link ACameraMetadata}.
+ */
 typedef struct ACameraMetadata_rational {
     int32_t numerator;
     int32_t denominator;
 } ACameraMetadata_rational;
 
+/**
+ * A single camera metadata entry.
+ *
+ * <p>Each entry is an array of values, though many metadata fields may only have 1 entry in the
+ * array.</p>
+ */
 typedef struct ACameraMetadata_entry {
+    /**
+     * The tag identifying the entry.
+     *
+     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * entry should be interpreted and which parts of the API provide it.
+     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     */
     uint32_t tag;
+
+    /**
+     * The data type of this metadata entry.
+     *
+     * <p>Must be one of ACAMERA_TYPE_* enum values defined above. A particular tag always has the
+     * same type.</p>
+     */
     uint8_t  type;
+
+    /**
+     * Count of elements (NOT count of bytes) in this metadata entry.
+     */
     uint32_t count;
+
+    /**
+     * Pointer to the data held in this metadata entry.
+     *
+     * <p>The type field above defines which union member pointer is valid. The count field above
+     * defines the length of the data in number of elements.</p>
+     */
     union {
         uint8_t *u8;
         int32_t *i32;
@@ -82,10 +125,41 @@
     } data;
 } ACameraMetadata_entry;
 
+/**
+ * A single read-only camera metadata entry.
+ *
+ * <p>Each entry is an array of values, though many metadata fields may only have 1 entry in the
+ * array.</p>
+ */
 typedef struct ACameraMetadata_const_entry {
+    /**
+     * The tag identifying the entry.
+     *
+     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * entry should be interpreted and which parts of the API provide it.
+     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     */
     uint32_t tag;
+
+    /**
+     * The data type of this metadata entry.
+     *
+     * <p>Must be one of ACAMERA_TYPE_* enum values defined above. A particular tag always has the
+     * same type.</p>
+     */
     uint8_t  type;
+
+    /**
+     * Count of elements (NOT count of bytes) in this metadata entry.
+     */
     uint32_t count;
+
+    /**
+     * Pointer to the data held in this metadata entry.
+     *
+     * <p>The type field above defines which union member pointer is valid. The count field above
+     * defines the length of the data in number of elements.</p>
+     */
     union {
         const uint8_t *u8;
         const int32_t *i32;
@@ -96,32 +170,61 @@
     } data;
 } ACameraMetadata_const_entry;
 
-/*
- * Get a metadata entry
+/**
+ * Get a metadata entry from an input {@link ACameraMetadata}.
+ *
+ * <p>The memory of the data field in the returned entry is managed by camera framework. Do not
+ * attempt to free it.</p>
+ *
+ * @param metadata the {@link ACameraMetadata} of interest.
+ * @param tag the tag value of the camera metadata entry to be get.
+ * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
+ *        call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if metadata or entry is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_METADATA_NOT_FOUND} if input metadata does not contain an entry
+ *             of input tag value.</li></ul>
  */
 camera_status_t ACameraMetadata_getConstEntry(
-        const ACameraMetadata*, uint32_t tag, ACameraMetadata_const_entry* entry);
-
-/*
- * List all the entry tags in this metadata.
- * The memory of tags is managed by ACameraMetadata itself and must NOT be free/delete
- * by application. Do NOT access tags after calling ACameraMetadata_free
- */
-camera_status_t ACameraMetadata_getAllTags(
-        const ACameraMetadata*, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
+        const ACameraMetadata* metadata, uint32_t tag, /*out*/ACameraMetadata_const_entry* entry);
 
 /**
- * Copy a metadata. Duplicates a metadata structure.
- * The destination ACameraMetadata must be freed by the application with ACameraMetadata_free
- * after application is done using it.
- * Returns NULL when src cannot be copied
+ * List all the entry tags in input {@link ACameraMetadata}.
+ *
+ * @param metadata the {@link ACameraMetadata} of interest.
+ * @param numEntries number of metadata entries in input {@link ACameraMetadata}
+ * @param tags the tag values of the metadata entries. Length of tags is returned in numEntries
+ *             argument. The memory is managed by ACameraMetadata itself and must NOT be free/delete
+ *             by application. Do NOT access tags after calling ACameraMetadata_free.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if metadata, numEntries or tags is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ */
+camera_status_t ACameraMetadata_getAllTags(
+        const ACameraMetadata* metadata, /*out*/int32_t* numEntries, /*out*/const uint32_t** tags);
+
+/**
+ * Create a copy of input {@link ACameraMetadata}.
+ *
+ * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
+ * after application is done using it.</p>
+ *
+ * @param src the input {@link ACameraMetadata} to be copied.
+ *
+ * @return a valid ACameraMetadata pointer or NULL if the input metadata cannot be copied.
  */
 ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src);
 
 /**
- * Frees a metadata structure.
+ * Free a {@link ACameraMetadata} structure.
+ *
+ * @param metadata the {@link ACameraMetadata} to be freed.
  */
-void ACameraMetadata_free(ACameraMetadata*);
+void ACameraMetadata_free(ACameraMetadata* metadata);
 
 #ifdef __cplusplus
 } // extern "C"
diff --git a/include/camera/ndk/NdkCameraMetadataTags.h b/include/camera/ndk/NdkCameraMetadataTags.h
index 6fa0517..e7f6989 100644
--- a/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/include/camera/ndk/NdkCameraMetadataTags.h
@@ -128,7 +128,7 @@
      * FAST or HIGH_QUALITY will yield a picture with the same white point
      * as what was produced by the camera device in the earlier frame.</p>
      * <p>The expected processing pipeline is as follows:</p>
-     * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+     * <p><img alt="White balance processing pipeline" src="../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
      * <p>The white balance is encoded by two values, a 4-channel white-balance
      * gain vector (applied in the Bayer domain), and a 3x3 color transform
      * matrix (applied after demosaic).</p>
@@ -445,6 +445,10 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAe.</p>
+     * <p>The data representation is int[5 * area_count].
+     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -595,6 +599,10 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of focus areas supported by the device is determined by the value
      * of android.control.maxRegionsAf.</p>
+     * <p>The data representation is int[5 * area_count].
+     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -741,6 +749,10 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAwb.</p>
+     * <p>The data representation is int[5 * area_count].
+     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -828,7 +840,8 @@
      * ACAMERA_CONTROL_* are mostly disabled, and the camera device implements
      * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
      * as it wishes. The camera device scene mode 3A settings are provided by
-     * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+     * capture results {@link ACameraMetadata} from
+     * {@link ACameraCaptureSession_captureCallback_result}.</p>
      * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
      * is that this frame will not be used by camera device background 3A statistics
      * update, as if this frame is never captured. This mode can be used in the scenario
@@ -970,21 +983,23 @@
      * <ul>
      * <li>
      * <p>For constant-framerate recording, for each normal
-     * {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
-     * {@link android.media.CamcorderProfile CamcorderProfile} that has
-     * {@link android.media.CamcorderProfile#quality quality} in
-     * the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
-     * {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
-     * supported by the device and has
-     * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code>, this list will
-     * always include (<code>x</code>,<code>x</code>).</p>
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>, that is, a
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#quality">quality</a>
+     * in the range [
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW">QUALITY_LOW</a>,
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P">QUALITY_2160P</a>],
+     * if the profile is supported by the device and has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a>
+     * <code>x</code>, this list will always include (<code>x</code>,<code>x</code>).</p>
      * </li>
      * <li>
      * <p>Also, a camera device must either not support any
-     * {@link android.media.CamcorderProfile CamcorderProfile},
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,
      * or support at least one
-     * normal {@link android.media.CamcorderProfile CamcorderProfile} that has
-     * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code> &gt;= 24.</p>
+     * normal <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>
+     * that has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code> &gt;= 24.</p>
      * </li>
      * </ul>
      * <p>For devices at the LIMITED level or above:</p>
@@ -1190,205 +1205,45 @@
      * AE state becomes CONVERGED, then the image data associated with this result should
      * be good to use.</p>
      * <p>Below are state transition tables for different AE modes.</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device auto exposure algorithm is disabled</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State       | Transition Cause | New State | Notes
+     * :------------:|:----------------:|:---------:|:-----------------------:
+     * INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled</p>
      * <p>When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON_*:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates AE scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Camera device finishes AE scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Good values, not changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Camera device finishes AE scan</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Camera device initiates AE scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Camera device initiates AE scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values not good after unlock</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values good after unlock</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Exposure good, but too dark</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PRECAPTURE</td>
-     * <td align="center">Sequence done. ACAMERA_CONTROL_AE_LOCK is OFF</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Ready for high-quality capture</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PRECAPTURE</td>
-     * <td align="center">Sequence done. ACAMERA_CONTROL_AE_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Ready for high-quality capture</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Precapture trigger is ignored when AE is already locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Precapture trigger is ignored when AE is already locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START</td>
-     * <td align="center">PRECAPTURE</td>
-     * <td align="center">Start AE precapture metering sequence</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Currently active precapture metering sequence is canceled</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                             | New State      | Notes
+     * :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
+     * INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
+     * INACTIVE       | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
+     * SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
+     * SEARCHING      | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
+     * CONVERGED      | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
+     * FLASH_REQUIRED | ACAMERA_CONTROL_AE_LOCK is ON                 | LOCKED         | Values locked
+     * LOCKED         | ACAMERA_CONTROL_AE_LOCK is OFF                | SEARCHING      | Values not good after unlock
+     * LOCKED         | ACAMERA_CONTROL_AE_LOCK is OFF                | CONVERGED      | Values good after unlock
+     * LOCKED         | ACAMERA_CONTROL_AE_LOCK is OFF                | FLASH_REQUIRED | Exposure good, but too dark
+     * PRECAPTURE     | Sequence done. ACAMERA_CONTROL_AE_LOCK is OFF | CONVERGED      | Ready for high-quality capture
+     * PRECAPTURE     | Sequence done. ACAMERA_CONTROL_AE_LOCK is ON  | LOCKED         | Ready for high-quality capture
+     * LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
+     * LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START | PRECAPTURE     | Start AE precapture metering sequence
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled</p>
      * <p>For the above table, the camera device may skip reporting any state changes that happen
      * without application intervention (i.e. mode switch, trigger, locking). Any state that
      * can be skipped in that manner is called a transient state.</p>
      * <p>For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
      * listed in above table, it is also legal for the camera device to skip one or more
      * transient states between two results. See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device finished AE scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values are already good, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Converged after a precapture sequence, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state (excluding LOCKED)</td>
-     * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Converged after a precapture sequenceis canceled, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Camera device finished AE scan</td>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FLASH_REQUIRED</td>
-     * <td align="center">Camera device finished AE scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Converged after a new scan, transient states are skipped by camera device.</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                                            | New State      | Notes
+     * :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
+     * INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
+     * CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
+     * FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.</p>
      *
      * @see ACAMERA_CONTROL_AE_LOCK
      * @see ACAMERA_CONTROL_AE_MODE
@@ -1418,374 +1273,79 @@
      * be sharp.</p>
      * <p>Below are state transition tables for different AF modes.</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_OFF or AF_MODE_EDOF:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Never changes</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State       | Transition Cause | New State | Notes
+     * :------------:|:----------------:|:---------:|:-----------:
+     * INACTIVE      |                  | INACTIVE  | Never changes</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_AUTO or AF_MODE_MACRO:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">Start AF sweep, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">AF sweep done</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focused, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">AF sweep done</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Not focused, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Cancel/reset AF, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Cancel/reset AF</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">Start new sweep, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Cancel/reset AF</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">ACTIVE_SCAN</td>
-     * <td align="center">Start new sweep, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">Any state</td>
-     * <td align="center">Mode change</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause | New State          | Notes
+     * :-----------------:|:----------------:|:------------------:|:--------------:
+     * INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
+     * ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
+     * ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
+     * ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
+     * FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
+     * FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
+     * NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
+     * Any state          | Mode change      | INACTIVE           |</p>
      * <p>For the above table, the camera device may skip reporting any state changes that happen
      * without application intervention (i.e. mode switch, trigger, locking). Any state that
      * can be skipped in that manner is called a transient state.</p>
      * <p>For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
      * state transitions listed in above table, it is also legal for the camera device to skip
      * one or more transient states between two results. See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Focus failed after a scan, lens is now locked.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Focus is good after a scan, lens is not locked.</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause | New State          | Notes
+     * :-----------------:|:----------------:|:------------------:|:--------------:
+     * INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
+     * INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
+     * FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_VIDEO:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF state query, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device completes current scan</td>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device fails current scan</td>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Reset lens position, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Immediate transition, lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause                    | New State          | Notes
+     * :-----------------:|:-----------------------------------:|:------------------:|:--------------:
+     * INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+     * PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
+     * PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
+     * PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
+     * PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
+     * FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
+     * FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
+     * NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan</p>
      * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_PICTURE:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF state query, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device completes current scan</td>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Camera device fails current scan</td>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">End AF scan, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Reset lens position, Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">Camera device initiates new scan</td>
-     * <td align="center">PASSIVE_SCAN</td>
-     * <td align="center">Start AF scan, Lens now moving</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_FOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">Immediate trans. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">PASSIVE_UNFOCUSED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">Immediate trans. Lens now locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_TRIGGER</td>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">No effect</td>
-     * </tr>
-     * <tr>
-     * <td align="center">NOT_FOCUSED_LOCKED</td>
-     * <td align="center">AF_CANCEL</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Restart AF scan</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State            | Transition Cause                     | New State          | Notes
+     * :-----------------:|:------------------------------------:|:------------------:|:--------------:
+     * INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+     * PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
+     * PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
+     * PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
+     * PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+     * PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
+     * PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
+     * FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
+     * FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
+     * NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
+     * NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan</p>
      * <p>When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
      * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
      * camera device. When a trigger is included in a mode switch request, the trigger
      * will be evaluated in the context of the new mode in the request.
      * See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">any state</td>
-     * <td align="center">CAF--&gt;AUTO mode switch</td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Mode switch without trigger, initial state must be INACTIVE</td>
-     * </tr>
-     * <tr>
-     * <td align="center">any state</td>
-     * <td align="center">CAF--&gt;AUTO mode switch with AF_TRIGGER</td>
-     * <td align="center">trigger-reachable states from INACTIVE</td>
-     * <td align="center">Mode switch with trigger, INACTIVE is skipped</td>
-     * </tr>
-     * <tr>
-     * <td align="center">any state</td>
-     * <td align="center">AUTO--&gt;CAF mode switch</td>
-     * <td align="center">passively reachable states from INACTIVE</td>
-     * <td align="center">Mode switch without trigger, passive transient state is skipped</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State      | Transition Cause                       | New State                                | Notes
+     * :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
+     * any state    | CAF--&gt;AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
+     * any state    | CAF--&gt;AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
+     * any state    | AUTO--&gt;CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped</p>
      *
      * @see ACAMERA_CONTROL_AF_MODE
      * @see ACAMERA_CONTROL_MODE
@@ -1813,109 +1373,29 @@
      * be good to use.</p>
      * <p>Below are state transition tables for different AWB modes.</p>
      * <p>When <code>ACAMERA_CONTROL_AWB_MODE != AWB_MODE_AUTO</code>:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center"></td>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device auto white balance algorithm is disabled</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State       | Transition Cause | New State | Notes
+     * :------------:|:----------------:|:---------:|:-----------------------:
+     * INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled</p>
      * <p>When ACAMERA_CONTROL_AWB_MODE is AWB_MODE_AUTO:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device initiates AWB scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Camera device finishes AWB scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Good values, not changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Camera device initiates AWB scan</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values changing</td>
-     * </tr>
-     * <tr>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
-     * <td align="center">LOCKED</td>
-     * <td align="center">Values locked</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is OFF</td>
-     * <td align="center">SEARCHING</td>
-     * <td align="center">Values not good after unlock</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                 | New State     | Notes
+     * :-------------:|:--------------------------------:|:-------------:|:-----------------:
+     * INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
+     * INACTIVE       | ACAMERA_CONTROL_AWB_LOCK is ON    | LOCKED        | Values locked
+     * SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
+     * SEARCHING      | ACAMERA_CONTROL_AWB_LOCK is ON    | LOCKED        | Values locked
+     * CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
+     * CONVERGED      | ACAMERA_CONTROL_AWB_LOCK is ON    | LOCKED        | Values locked
+     * LOCKED         | ACAMERA_CONTROL_AWB_LOCK is OFF   | SEARCHING     | Values not good after unlock</p>
      * <p>For the above table, the camera device may skip reporting any state changes that happen
      * without application intervention (i.e. mode switch, trigger, locking). Any state that
      * can be skipped in that manner is called a transient state.</p>
      * <p>For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
      * listed in above table, it is also legal for the camera device to skip one or more
      * transient states between two results. See below table for examples:</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">State</th>
-     * <th align="center">Transition Cause</th>
-     * <th align="center">New State</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">INACTIVE</td>
-     * <td align="center">Camera device finished AWB scan</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values are already good, transient states are skipped by camera device.</td>
-     * </tr>
-     * <tr>
-     * <td align="center">LOCKED</td>
-     * <td align="center">ACAMERA_CONTROL_AWB_LOCK is OFF</td>
-     * <td align="center">CONVERGED</td>
-     * <td align="center">Values good after unlock, transient states are skipped by camera device.</td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>State        | Transition Cause                 | New State     | Notes
+     * :-------------:|:--------------------------------:|:-------------:|:-----------------:
+     * INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
+     * LOCKED         | ACAMERA_CONTROL_AWB_LOCK is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.</p>
      *
      * @see ACAMERA_CONTROL_AWB_LOCK
      * @see ACAMERA_CONTROL_AWB_MODE
@@ -2326,14 +1806,14 @@
      * <p>When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested,
      * the camera device will handle thumbnail rotation in one of the following ways:</p>
      * <ul>
-     * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
+     * <li>Set the
+     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
      *   and keep jpeg and thumbnail image data unrotated.</li>
      * <li>Rotate the jpeg and thumbnail image data and not set
-     *   {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
-     *   case, LIMITED or FULL hardware level devices will report rotated thumnail size in
-     *   capture result, so the width and height will be interchanged if 90 or 270 degree
-     *   orientation is requested. LEGACY device will always report unrotated thumbnail
-     *   size.</li>
+     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.
+     *   In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
+     *   in capture result, so the width and height will be interchanged if 90 or 270 degree
+     *   orientation is requested. LEGACY device will always report unrotated thumbnail size.</li>
      * </ul>
      *
      * @see ACAMERA_JPEG_ORIENTATION
@@ -2568,9 +2048,9 @@
      * <p>The position of the camera device's lens optical center,
      * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
      * optical center of the largest camera device facing in the
-     * same direction as this camera, in the {@link
-     * android.hardware.SensorEvent Android sensor coordinate
-     * axes}. Note that only the axis definitions are shared with
+     * same direction as this camera, in the
+     * <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate axes</a>.
+     * Note that only the axis definitions are shared with
      * the sensor coordinate system, but not the origin.</p>
      * <p>If this device is the largest or only camera device with a
      * given facing, then this position will be <code>(0, 0, 0)</code>; a
@@ -2982,14 +2462,11 @@
      * into the 3 stream types as below:</p>
      * <ul>
      * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
-     *   Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li>
-     * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
-     *   android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
-     *   RAW12}.</li>
+     *   Typically {@link AIMAGE_FORMAT_JPEG} format.</li>
+     * <li>Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
+     *   {@link AIMAGE_FORMAT_RAW12}.</li>
      * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
-     *   Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
-     *   {@link android.graphics.ImageFormat#NV21 NV21}, or
-     *   {@link android.graphics.ImageFormat#YV12 YV12}.</li>
+     *   Typically {@link AIMAGE_FORMAT_YUV_420_888}.</li>
      * </ul>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
@@ -2997,29 +2474,6 @@
     ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS =                    // int32[3]
             ACAMERA_REQUEST_START + 6,
     /**
-     * <p>The maximum numbers of any type of input streams
-     * that can be configured and used simultaneously by a camera device.</p>
-     *
-     * <p>This tag may appear in:</p>
-     * <ul>
-     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
-     * </ul>
-     *
-     * <p>When set to 0, it means no input stream is supported.</p>
-     * <p>The image format for a input stream can be any supported format returned by {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
-     * input stream, there must be at least one output stream configured to to receive the
-     * reprocessed images.</p>
-     * <p>When an input stream and some output streams are used in a reprocessing request,
-     * only the input buffer will be used to produce these output stream buffers, and a
-     * new sensor image will not be captured.</p>
-     * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
-     * stream image format will be PRIVATE, the associated output stream image format
-     * should be JPEG.</p>
-     */
-    ACAMERA_REQUEST_MAX_NUM_INPUT_STREAMS =                     // int32
-            ACAMERA_REQUEST_START + 8,
-    /**
      * <p>Specifies the number of pipeline stages the frame went
      * through from when it was exposed to when the final completed result
      * was available to the framework.</p>
@@ -3125,7 +2579,7 @@
             ACAMERA_REQUEST_START + 12,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link android.hardware.camera2.CaptureRequest}.</p>
+     * to use with {@link ACaptureRequest}.</p>
      *
      * <p>This tag may appear in:</p>
      * <ul>
@@ -3146,7 +2600,8 @@
             ACAMERA_REQUEST_START + 13,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link android.hardware.camera2.CaptureResult}.</p>
+     * to query with {@link ACameraMetadata} from
+     * {@link ACameraCaptureSession_captureCallback_result}.</p>
      *
      * <p>This tag may appear in:</p>
      * <ul>
@@ -3176,7 +2631,8 @@
             ACAMERA_REQUEST_START + 14,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link android.hardware.camera2.CameraCharacteristics}.</p>
+     * to query with {@link ACameraMetadata} from
+     * {@link ACameraManager_getCameraCharacteristics}.</p>
      *
      * <p>This tag may appear in:</p>
      * <ul>
@@ -3204,6 +2660,7 @@
      * </ul>
      *
      * <p>This control can be used to implement digital zoom.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The crop region coordinate system is based off
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being the
      * top-left corner of the sensor active array.</p>
@@ -3286,66 +2743,16 @@
      * <p>The following table describes the minimum required output stream
      * configurations based on the hardware level
      * (ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL):</p>
-     * <table>
-     * <thead>
-     * <tr>
-     * <th align="center">Format</th>
-     * <th align="center">Size</th>
-     * <th align="center">Hardware Level</th>
-     * <th align="center">Notes</th>
-     * </tr>
-     * </thead>
-     * <tbody>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE</td>
-     * <td align="center">Any</td>
-     * <td align="center"></td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">1920x1080 (1080p)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 1080p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">1280x720 (720)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 720p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">640x480 (480p)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 480p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">JPEG</td>
-     * <td align="center">320x240 (240p)</td>
-     * <td align="center">Any</td>
-     * <td align="center">if 240p &lt;= activeArraySize</td>
-     * </tr>
-     * <tr>
-     * <td align="center">YUV_420_888</td>
-     * <td align="center">all output sizes available for JPEG</td>
-     * <td align="center">FULL</td>
-     * <td align="center"></td>
-     * </tr>
-     * <tr>
-     * <td align="center">YUV_420_888</td>
-     * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
-     * <td align="center">LIMITED</td>
-     * <td align="center"></td>
-     * </tr>
-     * <tr>
-     * <td align="center">IMPLEMENTATION_DEFINED</td>
-     * <td align="center">same as YUV_420_888</td>
-     * <td align="center">Any</td>
-     * <td align="center"></td>
-     * </tr>
-     * </tbody>
-     * </table>
+     * <p>Format         | Size                                         | Hardware Level | Notes
+     * :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+     * JPEG           | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE          | Any            |
+     * JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
+     * JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
+     * JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
+     * JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
+     * YUV_420_888    | all output sizes available for JPEG          | FULL           |
+     * YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
+     * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
      * <p>Refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES for additional
      * mandatory stream configurations on a per-capability basis.</p>
      *
@@ -3374,8 +2781,6 @@
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
-     * <p>(Keep in sync with
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})</p>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -3432,21 +2837,19 @@
      * ignored).</p>
      * <p>The following formats may always have a stall duration:</p>
      * <ul>
-     * <li>{@link android.graphics.ImageFormat#JPEG}</li>
-     * <li>{@link android.graphics.ImageFormat#RAW_SENSOR}</li>
+     * <li>{@link AIMAGE_FORMAT_JPEG}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW16}</li>
      * </ul>
      * <p>The following formats will never have a stall duration:</p>
      * <ul>
-     * <li>{@link android.graphics.ImageFormat#YUV_420_888}</li>
-     * <li>{@link android.graphics.ImageFormat#RAW10}</li>
+     * <li>{@link AIMAGE_FORMAT_YUV_420_888}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW10}</li>
      * </ul>
      * <p>All other formats may or may not have an allowed stall duration on
      * a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      * for more details.</p>
      * <p>See ACAMERA_SENSOR_FRAME_DURATION for more information about
      * calculating the max frame rate (absent stalls).</p>
-     * <p>(Keep up to date with
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )</p>
      *
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -3545,8 +2948,8 @@
      * cannot process more than 1 capture at a time.</li>
      * </ul>
      * <p>The necessary information for the application, given the model above,
-     * is provided via the android.scaler.streamConfigurationMap field using
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
+     * is provided via
+     * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
      * These are used to determine the maximum frame rate / minimum frame
      * duration that is possible for a given stream configuration.</p>
      * <p>Specifically, the application can use the following rules to
@@ -3556,8 +2959,7 @@
      * <li>Let the set of currently configured input/output streams
      * be called <code>S</code>.</li>
      * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
-     * it up in android.scaler.streamConfigurationMap using {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+     * it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
      * (with its respective size/format). Let this set of frame durations be
      * called <code>F</code>.</li>
      * <li>For any given request <code>R</code>, the minimum frame duration allowed
@@ -3565,7 +2967,7 @@
      * used in <code>R</code> be called <code>S_r</code>.</li>
      * </ol>
      * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
      * using its respective size/format), then the frame duration in <code>F</code>
      * determines the steady state frame rate that the application will get
      * if it uses <code>R</code> as a repeating request. Let this special kind of
@@ -3577,7 +2979,7 @@
      * if all buffers from the previous <code>Rstall</code> have already been
      * delivered.</p>
      * <p>For more details about stalling, see
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.</p>
+     * {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.</p>
      * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
      *
@@ -3909,8 +3311,9 @@
      * timestamps for other captures from the same camera device, but are
      * not guaranteed to be comparable to any other time source.</p>
      * <p>When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE <code>==</code> REALTIME, the
-     * timestamps measure time in the same timebase as {@link
-     * android.os.SystemClock#elapsedRealtimeNanos}, and they can
+     * timestamps measure time in the same timebase as
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">elapsedRealtimeNanos</a>
+     * (or CLOCK_BOOTTIME), and they can
      * be compared to other timestamps from other subsystems that
      * are using that base.</p>
      * <p>For reprocessing, the timestamp will match the start of exposure of
@@ -4100,6 +3503,7 @@
      * optically shielded pixel areas. By blocking light, these pixels
      * provides a reliable black reference for black level compensation
      * in active array region.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This key provides a list of disjoint rectangles specifying the
      * regions of optically shielded (with metal shield) black pixel
      * regions if the camera device is capable of reading out these black
@@ -4149,7 +3553,7 @@
      * color channel listed in the CFA.</p>
      * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
      * available or the camera device advertises this key via
-     * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
+     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
@@ -4173,7 +3577,7 @@
      * estimated white level for each frame.</p>
      * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
      * available or the camera device advertises this key via
-     * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.</p>
+     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
@@ -4200,6 +3604,7 @@
      * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
      * the full pixel array, and the size of the full pixel array is given by
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system for most other keys that list pixel coordinates, including
      * ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
      * this field, with <code>(0, 0)</code> being the top-left of this rectangle.</p>
@@ -4272,7 +3677,7 @@
      * duration being clipped to the maximum. See that control for a full definition of frame
      * durations.</p>
      * <p>Refer to {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+     * ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
      * for the minimum frame duration values.</p>
      */
     ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION =                    // int64
@@ -4307,7 +3712,7 @@
      * the raw buffers produced by this sensor.</p>
      * <p>If a camera device supports raw sensor formats, either this or
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw
-     * output formats listed in android.scaler.streamConfigurationMap (this depends on
+     * output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
      * whether or not the image sensor returns buffers containing pixels that are not
      * part of the active array region for blacklevel calibration or other purposes).</p>
      * <p>Some parts of the full pixel array may not receive light from the scene,
@@ -4391,6 +3796,7 @@
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      * </ul>
      *
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
      * the region that actually receives light from the scene) before any geometric correction
      * has been applied, and should be treated as the active region rectangle for any of the
@@ -4465,7 +3871,7 @@
      * camera device, and an identity lens shading map data will be provided
      * if <code>ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE == ON</code>. For example, for lens
      * shading map with size of <code>[ 4, 3 ]</code>,
-     * the output ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP for this case will be an identity
+     * the output android.statistics.lensShadingCorrectionMap for this case will be an identity
      * map shown below:</p>
      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
@@ -4477,7 +3883,7 @@
      * <p>When set to other modes, lens shading correction will be applied by the camera
      * device. Applications can request lens shading map data by setting
      * ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE to ON, and then the camera device will provide lens
-     * shading map data in ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP; the returned shading map
+     * shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
      * data will be the one applied by the camera device for this capture request.</p>
      * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
      * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
@@ -4487,7 +3893,6 @@
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_CONTROL_AWB_MODE
-     * @see ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      */
     ACAMERA_SHADING_MODE =                                      // byte (enum)
@@ -4587,6 +3992,7 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul>
      *
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
      * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF</p>
@@ -4613,57 +4019,6 @@
             ACAMERA_STATISTICS_START + 7,
     /**
      * <p>The shading map is a low-resolution floating-point map
-     * that lists the coefficients used to correct for vignetting, for each
-     * Bayer color channel.</p>
-     *
-     * <p>This tag may appear in:</p>
-     * <ul>
-     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
-     *
-     * <p>The least shaded section of the image should have a gain factor
-     * of 1; all other sections should have gains above 1.</p>
-     * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
-     * must take into account the colorCorrection settings.</p>
-     * <p>The shading map is for the entire active pixel array, and is not
-     * affected by the crop region specified in the request. Each shading map
-     * entry is the value of the shading compensation map over a specific
-     * pixel on the sensor.  Specifically, with a (N x M) resolution shading
-     * map, and an active pixel array size (W x H), shading map entry
-     * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
-     * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
-     * The map is assumed to be bilinearly interpolated between the sample points.</p>
-     * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
-     * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
-     * The shading map is stored in a fully interleaved format.</p>
-     * <p>The shading map should have on the order of 30-40 rows and columns,
-     * and must be smaller than 64x64.</p>
-     * <p>As an example, given a very small map defined as:</p>
-     * <pre><code>width,height = [ 4, 3 ]
-     * values =
-     * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-     *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-     *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-     *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-     *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-     * </code></pre>
-     * <p>The low-resolution scaling map images for each channel are
-     * (displayed using nearest-neighbor interpolation):</p>
-     * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
-     * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
-     * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
-     * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
-     * <p>As a visualization only, inverting the full-color map to recover an
-     * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
-     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
-     *
-     * @see ACAMERA_COLOR_CORRECTION_MODE
-     */
-    ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP =            // byte
-            ACAMERA_STATISTICS_START + 10,
-    /**
-     * <p>The shading map is a low-resolution floating-point map
      * that lists the coefficients used to correct for vignetting and color shading,
      * for each Bayer color channel of RAW image data.</p>
      *
@@ -4672,20 +4027,21 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul>
      *
-     * <p>The lens shading correction is defined as a full shading correction that
-     * corrects both color shading for the output non-RAW images. After the
-     * shading map is applied, the output non-RAW images will be flat-field images
-     * for flat scenes under uniform illumination.</p>
-     * <p>When there is no lens shading correction applied to RAW output images
-     * (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> false), this map is a full lens
-     * shading correction map; when there is some lens shading correction applied
-     * to the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> true),
-     * this map reports the remaining lens shading correction map that needs to be
-     * applied to get fully shading corrected images.</p>
-     * <p>For a full shading correction map, the least shaded section of the image
-     * should have a gain factor of 1; all other sections should have gains above 1.</p>
+     * <p>The map provided here is the same map that is used by the camera device to
+     * correct both color shading and vignetting for output non-RAW images.</p>
+     * <p>When there is no lens shading correction applied to RAW
+     * output images (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code>
+     * false), this map is the complete lens shading correction
+     * map; when there is some lens shading correction applied to
+     * the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED<code>==</code> true), this map reports the remaining lens shading
+     * correction map that needs to be applied to get shading
+     * corrected images that match the camera device's output for
+     * non-RAW formats.</p>
+     * <p>For a complete shading correction map, the least shaded
+     * section of the image will have a gain factor of 1; all
+     * other sections will have gains above 1.</p>
      * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
-     * must take into account the colorCorrection settings.</p>
+     * will take into account the colorCorrection settings.</p>
      * <p>The shading map is for the entire active pixel array, and is not
      * affected by the crop region specified in the request. Each shading map
      * entry is the value of the shading compensation map over a specific
@@ -4698,8 +4054,8 @@
      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
      * The shading map is stored in a fully interleaved format, and its size
      * is provided in the camera static metadata by ACAMERA_LENS_INFO_SHADING_MAP_SIZE.</p>
-     * <p>The shading map should have on the order of 30-40 rows and columns,
-     * and must be smaller than 64x64.</p>
+     * <p>The shading map will generally have on the order of 30-40 rows and columns,
+     * and will be smaller than 64x64.</p>
      * <p>As an example, given a very small map defined as:</p>
      * <pre><code>ACAMERA_LENS_INFO_SHADING_MAP_SIZE = [ 4, 3 ]
      * ACAMERA_STATISTICS_LENS_SHADING_MAP =
@@ -4712,14 +4068,14 @@
      * </code></pre>
      * <p>The low-resolution scaling map images for each channel are
      * (displayed using nearest-neighbor interpolation):</p>
-     * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
-     * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
-     * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
-     * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+     * <p><img alt="Red lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+     * <img alt="Green (even rows) lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+     * <img alt="Green (odd rows) lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+     * <img alt="Blue lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
      * <p>As a visualization only, inverting the full-color map to recover an
      * image of a gray wall (using bicubic interpolation for visual quality)
      * as captured by the sensor gives:</p>
-     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
      * <p>Note that the RAW image data might be subject to lens shading
      * correction not reported on this map. Query
      * ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED to see if RAW image data has subject
@@ -4944,11 +4300,11 @@
      * <p>Linear mapping:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [ 0, 0, 1.0, 1.0 ]
      * </code></pre>
-     * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+     * <p><img alt="Linear mapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
      * <p>Invert mapping:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [ 0, 1.0, 1.0, 0 ]
      * </code></pre>
-     * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+     * <p><img alt="Inverting mapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [
      *   0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
@@ -4956,7 +4312,7 @@
      *   0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
      *   0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
      * </code></pre>
-     * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+     * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
      * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [
      *   0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
@@ -4964,7 +4320,7 @@
      *   0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
      *   0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
      * </code></pre>
-     * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+     * <p><img alt="sRGB tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
      *
      * @see ACAMERA_TONEMAP_CURVE_RED
      * @see ACAMERA_TONEMAP_MAX_CURVE_POINTS
@@ -5081,9 +4437,9 @@
      *
      * <p>The tonemap curve will be defined by specified standard.</p>
      * <p>sRGB (approximated by 16 control points):</p>
-     * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+     * <p><img alt="sRGB tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
      * <p>Rec. 709 (approximated by 16 control points):</p>
-     * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+     * <p><img alt="Rec. 709 tonemapping curve" src="../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
      * <p>Note that above figures show a 16 control points approximation of preset
      * curves. Camera devices may apply a different approximation to the curve.</p>
      */
@@ -5131,7 +4487,7 @@
      * <p>See the individual level enums for full descriptions of the supported capabilities.  The
      * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
      * finer-grain level, if needed. In addition, many controls have their available settings or
-     * ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics} entries.</p>
+     * ranges defined in individual metadata tag entries in this document.</p>
      * <p>Some features are not part of any particular hardware level or capability and must be
      * queried separately. These include:</p>
      * <ul>
@@ -5303,8 +4659,6 @@
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
-     * <p>(Keep in sync with {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})</p>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -6160,91 +5514,6 @@
     ACAMERA_CONTROL_SCENE_MODE_BARCODE                               = 16,
 
     /**
-     * <p>This is deprecated, please use {@link
-     * android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
-     * and {@link
-     * android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
-     * for high speed video recording.</p>
-     * <p>Optimized for high speed video recording (frame rate &gt;=60fps) use case.</p>
-     * <p>The supported high speed video sizes and fps ranges are specified in
-     * android.control.availableHighSpeedVideoConfigurations. To get desired
-     * output frame rates, the application is only allowed to select video size
-     * and fps range combinations listed in this static metadata. The fps range
-     * can be control via ACAMERA_CONTROL_AE_TARGET_FPS_RANGE.</p>
-     * <p>In this mode, the camera device will override aeMode, awbMode, and afMode to
-     * ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
-     * controls will be overridden to be FAST. Therefore, no manual control of capture
-     * and post-processing parameters is possible. All other controls operate the
-     * same as when ACAMERA_CONTROL_MODE == AUTO. This means that all other
-     * ACAMERA_CONTROL_* fields continue to work, such as</p>
-     * <ul>
-     * <li>ACAMERA_CONTROL_AE_TARGET_FPS_RANGE</li>
-     * <li>ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION</li>
-     * <li>ACAMERA_CONTROL_AE_LOCK</li>
-     * <li>ACAMERA_CONTROL_AWB_LOCK</li>
-     * <li>ACAMERA_CONTROL_EFFECT_MODE</li>
-     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
-     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
-     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
-     * <li>ACAMERA_CONTROL_AF_TRIGGER</li>
-     * <li>ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER</li>
-     * </ul>
-     * <p>Outside of ACAMERA_CONTROL_*, the following controls will work:</p>
-     * <ul>
-     * <li>ACAMERA_FLASH_MODE (automatic flash for still capture will not work since aeMode is ON)</li>
-     * <li>ACAMERA_LENS_OPTICAL_STABILIZATION_MODE (if it is supported)</li>
-     * <li>ACAMERA_SCALER_CROP_REGION</li>
-     * <li>ACAMERA_STATISTICS_FACE_DETECT_MODE</li>
-     * </ul>
-     * <p>For high speed recording use case, the actual maximum supported frame rate may
-     * be lower than what camera can output, depending on the destination Surfaces for
-     * the image data. For example, if the destination surface is from video encoder,
-     * the application need check if the video encoder is capable of supporting the
-     * high frame rate for a given video size, or it will end up with lower recording
-     * frame rate. If the destination surface is from preview window, the preview frame
-     * rate will be bounded by the screen refresh rate.</p>
-     * <p>The camera device will only support up to 2 output high speed streams
-     * (processed non-stalling format defined in ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS)
-     * in this mode. This control will be effective only if all of below conditions are true:</p>
-     * <ul>
-     * <li>The application created no more than maxNumHighSpeedStreams processed non-stalling
-     * format output streams, where maxNumHighSpeedStreams is calculated as
-     * min(2, ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS[Processed (but not-stalling)]).</li>
-     * <li>The stream sizes are selected from the sizes reported by
-     * android.control.availableHighSpeedVideoConfigurations.</li>
-     * <li>No processed non-stalling or raw streams are configured.</li>
-     * </ul>
-     * <p>When above conditions are NOT satistied, the controls of this mode and
-     * ACAMERA_CONTROL_AE_TARGET_FPS_RANGE will be ignored by the camera device,
-     * the camera device will fall back to ACAMERA_CONTROL_MODE <code>==</code> AUTO,
-     * and the returned capture result metadata will give the fps range choosen
-     * by the camera device.</p>
-     * <p>Switching into or out of this mode may trigger some camera ISP/sensor
-     * reconfigurations, which may introduce extra latency. It is recommended that
-     * the application avoids unnecessary scene mode switch as much as possible.</p>
-     *
-     * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
-     * @see ACAMERA_CONTROL_AE_LOCK
-     * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
-     * @see ACAMERA_CONTROL_AE_REGIONS
-     * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
-     * @see ACAMERA_CONTROL_AF_REGIONS
-     * @see ACAMERA_CONTROL_AF_TRIGGER
-     * @see ACAMERA_CONTROL_AWB_LOCK
-     * @see ACAMERA_CONTROL_AWB_REGIONS
-     * @see ACAMERA_CONTROL_EFFECT_MODE
-     * @see ACAMERA_CONTROL_MODE
-     * @see ACAMERA_FLASH_MODE
-     * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
-     * @see ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS
-     * @see ACAMERA_SCALER_CROP_REGION
-     * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
-     *
-     * <b>Deprecated</b>: please refer to this API documentation to find the alternatives
-     */
-    ACAMERA_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO                      = 17,
-
-    /**
      * <p>Turn on a device-specific high dynamic range (HDR) mode.</p>
      * <p>In this scene mode, the camera device captures images
      * that keep a larger range of scene illumination levels
@@ -6511,7 +5780,7 @@
     /**
      * <p>Edge enhancement is applied at different levels for different output streams,
      * based on resolution. Streams at maximum recording resolution (see {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
+     * ACameraDevice_createCaptureSession}) or below have
      * edge enhancement applied, while higher-resolution streams have no edge enhancement
      * applied. The level of edge enhancement for low-resolution streams is tuned so that
      * frame rate is not impacted, and the quality is equal to or better than FAST (since it
@@ -6765,7 +6034,7 @@
     /**
      * <p>Noise reduction is applied at different levels for different output streams,
      * based on resolution. Streams at maximum recording resolution (see {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
+     * ACameraDevice_createCaptureSession}) or below have noise
      * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
      * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
      * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
@@ -6987,26 +6256,18 @@
      * to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
      * per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
      * resolution of the device, whichever is smaller.</p>
-     * <p>More specifically, this means that a size matching the camera device's active array
-     * size is listed as a supported size for the {@link
-     * android.graphics.ImageFormat#YUV_420_888} format in either {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
-     * with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
-     * &lt;= 1/10 s, respectively; and the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry
-     * lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
-     * for the maximum-size YUV_420_888 format.  If that maximum size is listed in {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
-     * then the list of resolutions for YUV_420_888 from {@link
-     * android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
-     * least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
-     * s.</p>
-     * <p>If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
-     * android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
+     * <p>More specifically, this means that at least one output {@link
+     * AIMAGE_FORMAT_YUV_420_888} size listed in
+     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
+     * 'high resolution' defined above, and can be captured at at least 20 fps.
+     * For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
+     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
+     * size for at least 10 frames per second.
+     * Also the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range
+     * where the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.</p>
+     * <p>If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
+     * AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
      * as the maximum-size YUV_420_888 resolution is.</p>
-     * <p>If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
-     * as for the YUV_420_888 format also apply to the {@link
-     * android.graphics.ImageFormat#PRIVATE} format.</p>
      * <p>In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
      * and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
      * are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
@@ -7023,13 +6284,13 @@
      * <p>The camera device can produce depth measurements from its field of view.</p>
      * <p>This capability requires the camera device to support the following:</p>
      * <ul>
-     * <li>{@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.</li>
-     * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
+     * <li>{@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.</li>
+     * <li>{@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
      *   output format.</li>
      * <li>This camera device, and all camera devices with the same ACAMERA_LENS_FACING,
-     *   will list the following calibration entries in both
-     *   {@link android.hardware.camera2.CameraCharacteristics} and
-     *   {@link android.hardware.camera2.CaptureResult}:<ul>
+     *   will list the following calibration entries in {@link ACameraMetadata} from both
+     *   {@link ACameraManager_getCameraCharacteristics} and
+     *   {@link ACameraCaptureSession_captureCallback_result}:<ul>
      * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
      * <li>ACAMERA_LENS_POSE_ROTATION</li>
      * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
@@ -7044,7 +6305,7 @@
      * <p>Generally, depth output operates at a slower frame rate than standard color capture,
      * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
      * should be accounted for (see
-     * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
+     * {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
      * On a device that supports both depth and color-based output, to enable smooth preview,
      * using a repeating burst is recommended, where a depth-output target is only included
      * once every N frames, where N is the ratio between preview output rate and depth output
@@ -7278,8 +6539,8 @@
 
     /**
      * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
-     * {@link android.os.SystemClock#elapsedRealtimeNanos},
-     * and they can be compared to other timestamps using that base.</p>
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">elapsedRealtimeNanos</a>
+     * (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
@@ -7464,8 +6725,7 @@
      * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
      * better.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link android.hardware.camera2.CameraDevice#createCaptureSession
-     * createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
      * support for color image capture. The only exception is that the device may
      * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -7491,8 +6751,7 @@
     /**
      * <p>This camera device is capable of supporting advanced imaging applications.</p>
      * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link android.hardware.camera2.CameraDevice#createCaptureSession
-     * createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>A <code>FULL</code> device will support below capabilities:</p>
      * <ul>
      * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -7520,8 +6779,7 @@
     /**
      * <p>This camera device is running in backward compatibility mode.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
-     * documentation are supported.</p>
+     * ACameraDevice_createCaptureSession} documentation are supported.</p>
      * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
      * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
      * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -7543,7 +6801,7 @@
      * FULL-level capabilities.</p>
      * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
      * <code>LIMITED</code> tables in the {@link
-     * android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
+     * ACameraDevice_createCaptureSession}
      * documentation are guaranteed to be supported.</p>
      * <p>The following additional capabilities are guaranteed to be supported:</p>
      * <ul>
diff --git a/include/camera/ndk/NdkCaptureRequest.h b/include/camera/ndk/NdkCaptureRequest.h
index e278196..cd97f4d 100644
--- a/include/camera/ndk/NdkCaptureRequest.h
+++ b/include/camera/ndk/NdkCaptureRequest.h
@@ -49,54 +49,255 @@
 // Container for a single output target
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
+/**
+ * ACaptureRequest is an opaque type that contains settings and output targets needed to capture
+ * a single image from camera device.
+ *
+ * <p>ACaptureRequest contains the configuration for the capture hardware (sensor, lens, flash),
+ * the processing pipeline, the control algorithms, and the output buffers. Also
+ * contains the list of target {@link ANativeWindow}s to send image data to for this
+ * capture.</p>
+ *
+ * <p>ACaptureRequest is created by {@link ACameraDevice_createCaptureRequest}.
+ *
+ * <p>ACaptureRequest is given to {@link ACameraCaptureSession_capture} or
+ * {@link ACameraCaptureSession_setRepeatingRequest} to capture images from a camera.</p>
+ *
+ * <p>Each request can specify a different subset of target {@link ANativeWindow}s for the
+ * camera to send the captured data to. All the {@link ANativeWindow}s used in a request must
+ * be part of the {@link ANativeWindow} list given to the last call to
+ * {@link ACameraDevice_createCaptureSession}, when the request is submitted to the
+ * session.</p>
+ *
+ * <p>For example, a request meant for repeating preview might only include the
+ * {@link ANativeWindow} for the preview SurfaceView or SurfaceTexture, while a
+ * high-resolution still capture would also include a {@link ANativeWindow} from a
+ * {@link AImageReader} configured for high-resolution JPEG images.</p>
+ *
+ * @see ACameraDevice_createCaptureRequest
+ * @see ACameraCaptureSession_capture
+ * @see ACameraCaptureSession_setRepeatingRequest
+ */
 typedef struct ACaptureRequest ACaptureRequest;
 
-camera_status_t ACameraOutputTarget_create(ANativeWindow* window, ACameraOutputTarget** out);
-void ACameraOutputTarget_free(ACameraOutputTarget*);
+/**
+ * Create a ACameraOutputTarget object.
+ *
+ * <p>The ACameraOutputTarget is used in {@link ACaptureRequest_addTarget} method to add an output
+ * {@link ANativeWindow} to ACaptureRequest. Use {@link ACameraOutputTarget_free} to free the object
+ * and its memory after application no longer needs the {@link ACameraOutputTarget}.</p>
+ *
+ * @param window the {@link ANativeWindow} to be associated with the {@link ACameraOutputTarget}
+ * @param output the output {@link ACameraOutputTarget} will be stored here if the
+ *                  method call succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds. The created ACameraOutputTarget will
+ *                                be filled in the output argument.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if window or output is NULL.</li></ul>
+ *
+ * @see ACaptureRequest_addTarget
+ */
+camera_status_t ACameraOutputTarget_create(ANativeWindow* window, ACameraOutputTarget** output);
 
-camera_status_t ACaptureRequest_addTarget(ACaptureRequest*, const ACameraOutputTarget*);
-camera_status_t ACaptureRequest_removeTarget(ACaptureRequest*, const ACameraOutputTarget*);
-//TODO: do we need API to query added targets?
+/**
+ * Free a ACameraOutputTarget object.
+ *
+ * @param output the {@link ACameraOutputTarget} to be freed.
+ *
+ * @see ACameraOutputTarget_create
+ */
+void ACameraOutputTarget_free(ACameraOutputTarget* output);
 
-/*
- * Get a metadata entry
+/**
+ * Add an {@link ACameraOutputTarget} object to {@link ACaptureRequest}.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param output the output {@link ACameraOutputTarget} to be added to capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request or output is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_addTarget(ACaptureRequest* request,
+        const ACameraOutputTarget* output);
+
+/**
+ * Remove an {@link ACameraOutputTarget} object from {@link ACaptureRequest}.
+ *
+ * <p>This method has no effect if the ACameraOutputTarget does not exist in ACaptureRequest.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param output the output {@link ACameraOutputTarget} to be removed from capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request or output is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_removeTarget(ACaptureRequest* request,
+        const ACameraOutputTarget* output);
+
+/**
+ * Get a metadata entry from input {@link ACaptureRequest}.
+ *
+ * <p>The memory of the data field in returned entry is managed by camera framework. Do not
+ * attempt to free it.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be get.
+ * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
+ *        call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if metadata or entry is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_METADATA_NOT_FOUND} if the capture request does not contain an
+ *             entry of input tag value.</li></ul>
  */
 camera_status_t ACaptureRequest_getConstEntry(
-        const ACaptureRequest*, uint32_t tag, ACameraMetadata_const_entry* entry);
+        const ACaptureRequest* request, uint32_t tag, ACameraMetadata_const_entry* entry);
 
 /*
- * List all the entry tags in this capture request.
- * The memory of tags is managed by ACaptureRequest itself and must NOT be free/delete
- * by application. Calling ACaptureRequest_setEntry_* API will invalidate previous
- * output of ACaptureRequest_getAllTags. Do not access tags after calling
- * ACaptureRequest_setEntry_*. To get new list of tags after updating capture request,
- * application must call ACaptureRequest_getAllTags again.
- * Do NOT access tags after calling ACaptureRequest_free.
+ * List all the entry tags in input {@link ACaptureRequest}.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param numEntries number of metadata entries in input {@link ACaptureRequest}
+ * @param tags the tag values of the metadata entries. Length of tags is returned in numEntries
+ *             argument. The memory is managed by ACaptureRequest itself and must NOT be free/delete
+ *             by application. Calling ACaptureRequest_setEntry_* methods will invalidate previous
+ *             output of ACaptureRequest_getAllTags. Do not access tags after calling
+ *             ACaptureRequest_setEntry_*. To get new list of tags after updating capture request,
+ *             application must call ACaptureRequest_getAllTags again. Do NOT access tags after
+ *             calling ACaptureRequest_free.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request, numEntries or tags is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
  */
 camera_status_t ACaptureRequest_getAllTags(
-        const ACaptureRequest*, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
+        const ACaptureRequest* request, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
 
-/*
- * Set an entry of corresponding type.
- * The entry tag's type must match corresponding set API or an
- * ACAMERA_ERROR_INVALID_PARAMETER error will occur.
- * Also, the input ACameraMetadata* must belong to a capture request or an
- * ACAMERA_ERROR_INVALID_PARAMETER error will occur.
+/**
+ * Set/change a camera capture control entry with unsigned 8 bits data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not unsigned 8 bits, or
+ *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_u8(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const uint8_t* data);
-camera_status_t ACaptureRequest_setEntry_i32(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const int32_t* data);
-camera_status_t ACaptureRequest_setEntry_float(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const float* data);
-camera_status_t ACaptureRequest_setEntry_i64(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const int64_t* data);
-camera_status_t ACaptureRequest_setEntry_double(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const double* data);
-camera_status_t ACaptureRequest_setEntry_rational(
-        ACaptureRequest*, uint32_t tag, uint32_t count, const ACameraMetadata_rational* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const uint8_t* data);
 
-// free the capture request created by ACameraDevice_createCaptureRequest
+/**
+ * Set/change a camera capture control entry with signed 32 bits data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not signed 32 bits, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_i32(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const int32_t* data);
+
+/**
+ * Set/change a camera capture control entry with float data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not float, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_float(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const float* data);
+
+/**
+ * Set/change a camera capture control entry with signed 64 bits data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not signed 64 bits, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_i64(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const int64_t* data);
+
+/**
+ * Set/change a camera capture control entry with double data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not double, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_double(
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const double* data);
+
+/**
+ * Set/change a camera capture control entry with rational data type.
+ *
+ * <p>Set count to 0 and data to NULL to remove a tag from the capture request.</p>
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param tag the tag value of the camera metadata entry to be set.
+ * @param count number of elements to be set in data argument
+ * @param data the entries to be set/change in the capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL, count is larger than
+ *             zero while data is NULL, the data type of the tag is not rational, or
+ *             the tag is not controllable by application.</li></ul>
+ */
+camera_status_t ACaptureRequest_setEntry_rational(
+        ACaptureRequest* request, uint32_t tag, uint32_t count,
+        const ACameraMetadata_rational* data);
+
+/**
+ * Free a {@link ACaptureRequest} structure.
+ *
+ * @param request the {@link ACaptureRequest} to be freed.
+ */
 void ACaptureRequest_free(ACaptureRequest* request);
 
 #ifdef __cplusplus
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 585ef59..2e6646a 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -238,6 +238,7 @@
     static status_t getInputForAttr(const audio_attributes_t *attr,
                                     audio_io_handle_t *input,
                                     audio_session_t session,
+                                    pid_t pid,
                                     uid_t uid,
                                     uint32_t samplingRate,
                                     audio_format_t format,
diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h
index 44d6c0b..498de8e 100644
--- a/include/media/AudioTimestamp.h
+++ b/include/media/AudioTimestamp.h
@@ -34,7 +34,7 @@
     struct timespec mTime;     // corresponding CLOCK_MONOTONIC when frame is expected to present
 };
 
-struct ExtendedTimestamp {
+struct alignas(8) /* bug 29096183, bug 29108507 */ ExtendedTimestamp {
     enum Location {
         LOCATION_INVALID = -1,
         // Locations in the audio playback / record pipeline.
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 1ade4ba..984bc02 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -73,6 +73,7 @@
                                 // reference and will release it when the track is destroyed.
                                 // However on failure, the client is responsible for release.
                                 audio_io_handle_t output,
+                                pid_t pid,
                                 pid_t tid,  // -1 means unused, otherwise must be valid non-0
                                 audio_session_t *sessionId,
                                 int clientUid,
@@ -89,6 +90,7 @@
                                 const String16& callingPackage,
                                 size_t *pFrameCount,
                                 track_flags_t *flags,
+                                pid_t pid,
                                 pid_t tid,  // -1 means unused, otherwise must be valid non-0
                                 int clientUid,
                                 audio_session_t *sessionId,
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index 80437dc..0e9e3bc 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -81,6 +81,7 @@
     virtual status_t  getInputForAttr(const audio_attributes_t *attr,
                               audio_io_handle_t *input,
                               audio_session_t session,
+                              pid_t pid,
                               uid_t uid,
                               uint32_t samplingRate,
                               audio_format_t format,
diff --git a/include/media/IMediaDeathNotifier.h b/include/media/IMediaDeathNotifier.h
index bb3d0d8..aca6678 100644
--- a/include/media/IMediaDeathNotifier.h
+++ b/include/media/IMediaDeathNotifier.h
@@ -30,7 +30,7 @@
     virtual ~IMediaDeathNotifier() { removeObitRecipient(this); }
 
     virtual void died() = 0;
-    static const sp<IMediaPlayerService>& getMediaPlayerService();
+    static const sp<IMediaPlayerService> getMediaPlayerService();
 
 private:
     IMediaDeathNotifier &operator=(const IMediaDeathNotifier &);
diff --git a/include/media/IMediaSource.h b/include/media/IMediaSource.h
index f7586a7..709f425 100644
--- a/include/media/IMediaSource.h
+++ b/include/media/IMediaSource.h
@@ -32,6 +32,11 @@
 public:
     DECLARE_META_INTERFACE(MediaSource);
 
+    enum {
+        // Maximum number of buffers would be read in readMultiple.
+        kMaxNumReadMultiple = 128,
+    };
+
     // To be called before any other methods on this object, except
     // getFormat().
     virtual status_t start(MetaData *params = NULL) = 0;
@@ -87,7 +92,7 @@
     };
 
     // Returns a new buffer of data. Call blocks until a
-    // buffer is available, an error is encountered of the end of the stream
+    // buffer is available, an error is encountered or the end of the stream
     // is reached.
     // End of stream is signalled by a result of ERROR_END_OF_STREAM.
     // A result of INFO_FORMAT_CHANGED indicates that the format of this
@@ -96,6 +101,19 @@
     virtual status_t read(
             MediaBuffer **buffer, const ReadOptions *options = NULL) = 0;
 
+    // Returns a vector of new buffers of data. The vector size could be
+    // <= |maxNumBuffers|. Used for buffers with small size
+    // since all buffer data are passed back by binder, not shared memory.
+    // Call blocks until an error is encountered, or the end of the stream is
+    // reached, or format change is hit, or |kMaxNumReadMultiple| buffers have
+    // been read.
+    // End of stream is signalled by a result of ERROR_END_OF_STREAM.
+    // A result of INFO_FORMAT_CHANGED indicates that the format of this
+    // MediaSource has changed mid-stream, the client can continue reading
+    // but should be prepared for buffers of the new configuration.
+    virtual status_t readMultiple(
+            Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers = 1) = 0;
+
     // Causes this source to suspend pulling data from its upstream source
     // until a subsequent read-with-seek. Currently only supported by
     // OMXCodec.
@@ -126,6 +144,10 @@
         return ERROR_UNSUPPORTED;
     }
 
+    virtual status_t readMultiple(
+            Vector<MediaBuffer *> * /* buffers */, uint32_t /* maxNumBuffers = 1 */) {
+        return ERROR_UNSUPPORTED;
+    }
 protected:
     virtual ~BnMediaSource();
 
diff --git a/include/media/MemoryLeakTrackUtil.h b/include/media/MemoryLeakTrackUtil.h
index d2618aa..4c1a60c 100644
--- a/include/media/MemoryLeakTrackUtil.h
+++ b/include/media/MemoryLeakTrackUtil.h
@@ -16,11 +16,16 @@
 #ifndef MEMORY_LEAK_TRACK_UTIL_H
 #define MEMORY_LEAK_TRACK_UTIL_H
 
+#include <iostream>
+
 namespace android {
 /*
- * Dump the memory address of the calling process to the given fd.
+ * Dump the heap memory of the calling process, sorted by total size
+ * (allocation size * number of allocations).
+ *
+ *    limit is the number of unique allocations to return.
  */
-extern void dumpMemoryAddresses(int fd);
+extern std::string dumpMemoryAddresses(size_t limit);
 
 };
 
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index d14bb7b..f4d0acd 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -250,6 +250,8 @@
     bool mUsingNativeWindow;
     sp<ANativeWindow> mNativeWindow;
     int mNativeWindowUsageBits;
+    android_native_rect_t mLastNativeWindowCrop;
+    int32_t mLastNativeWindowDataSpace;
     sp<AMessage> mConfigFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mOutputFormat;
diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h
index 3074910..8fc410d 100644
--- a/include/media/stagefright/AudioSource.h
+++ b/include/media/stagefright/AudioSource.h
@@ -38,7 +38,9 @@
             const String16 &opPackageName,
             uint32_t sampleRate,
             uint32_t channels,
-            uint32_t outSampleRate = 0);
+            uint32_t outSampleRate = 0,
+            uid_t uid = -1,
+            pid_t pid = -1);
 
     status_t initCheck() const;
 
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index ba375a2..be7e5c1 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -52,6 +52,8 @@
     kKeyPcmEncoding       = 'PCMe',  // int32_t (audio encoding enum)
     kKeyFrameRate         = 'frmR',  // int32_t (video frame rate fps)
     kKeyBitRate           = 'brte',  // int32_t (bps)
+    kKeyMaxBitRate        = 'mxBr',  // int32_t (bps)
+    kKeyStreamHeader      = 'stHd',  // raw data
     kKeyESDS              = 'esds',  // raw data
     kKeyAACProfile        = 'aacp',  // int32_t
     kKeyAVCC              = 'avcc',  // raw data
@@ -192,8 +194,7 @@
     kKeyNalLengthSize     = 'nals', // int32_t
 
     // HDR related
-    kKeyMinLuminance     = 'minL', // int32_t, min luminance of the content in cd/m2.
-    kKeyMaxLuminance     = 'maxL', // int32_t, max luminance of the content in cd/m2.
+    kKeyHdrStaticInfo    = 'hdrS', // HDRStaticInfo
 
     // color aspects
     kKeyColorRange       = 'cRng', // int32_t, color range, value defined by ColorAspects.Range
diff --git a/include/media/stagefright/NuMediaExtractor.h b/include/media/stagefright/NuMediaExtractor.h
index 6606c58..03e2185 100644
--- a/include/media/stagefright/NuMediaExtractor.h
+++ b/include/media/stagefright/NuMediaExtractor.h
@@ -44,6 +44,11 @@
         SAMPLE_FLAG_ENCRYPTED   = 2,
     };
 
+    // identical to IMediaExtractor::GetTrackMetaDataFlags
+    enum GetTrackFormatFlags {
+        kIncludeExtensiveMetaData = 1, // reads sample table and possibly stream headers
+    };
+
     NuMediaExtractor();
 
     status_t setDataSource(
@@ -56,7 +61,7 @@
     status_t setDataSource(const sp<DataSource> &datasource);
 
     size_t countTracks() const;
-    status_t getTrackFormat(size_t index, sp<AMessage> *format) const;
+    status_t getTrackFormat(size_t index, sp<AMessage> *format, uint32_t flags = 0) const;
 
     status_t getFileFormat(sp<AMessage> *format) const;
 
diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h
index 17631a0..01b3e3f 100644
--- a/include/media/stagefright/Utils.h
+++ b/include/media/stagefright/Utils.h
@@ -48,6 +48,11 @@
 void convertMessageToMetaData(
         const sp<AMessage> &format, sp<MetaData> &meta);
 
+// Returns a pointer to the next NAL start code in buffer of size |length| starting at |data|, or
+// a pointer to the end of the buffer if the start code is not found.
+// TODO: combine this with avc_utils::getNextNALUnit
+const uint8_t *findNextNalStartCode(const uint8_t *data, size_t length);
+
 AString MakeUserAgent();
 
 // Convert a MIME type to a AudioSystem::audio_format
diff --git a/include/media/stagefright/foundation/ABitReader.h b/include/media/stagefright/foundation/ABitReader.h
index c3bf0ff..a30dd2e 100644
--- a/include/media/stagefright/foundation/ABitReader.h
+++ b/include/media/stagefright/foundation/ABitReader.h
@@ -30,23 +30,44 @@
     ABitReader(const uint8_t *data, size_t size);
     virtual ~ABitReader();
 
-    uint32_t getBits(size_t n);
-    void skipBits(size_t n);
+    // Tries to get |n| bits. If not successful, returns |fallback|. Otherwise, returns result.
+    // Reading 0 bits will always succeed and return 0.
+    uint32_t getBitsWithFallback(size_t n, uint32_t fallback);
 
+    // Tries to get |n| bits. If not successful, returns false. Otherwise, stores result in |out|
+    // and returns true. Use !overRead() to determine if this call was successful. Reading 0 bits
+    // will always succeed and write 0 in |out|.
+    bool getBitsGraceful(size_t n, uint32_t *out);
+
+    // Gets |n| bits and returns result. ABORTS if unsuccessful. Reading 0 bits will always
+    // succeed.
+    uint32_t getBits(size_t n);
+
+    // Tries to skip |n| bits. Returns true iff successful. Skipping 0 bits will always succeed.
+    bool skipBits(size_t n);
+
+    // "Puts" |n| bits with the value |x| back virtually into the bit stream. The put-back bits
+    // are not actually written into the data, but are tracked in a separate buffer that can
+    // store at most 32 bits. This is a no-op if the stream has already been over-read.
     void putBits(uint32_t x, size_t n);
 
     size_t numBitsLeft() const;
 
     const uint8_t *data() const;
 
+    // Returns true iff the stream was over-read (e.g. any getBits operation has been unsuccessful
+    // due to overread (and not trying to read >32 bits).)
+    bool overRead() const { return mOverRead; }
+
 protected:
     const uint8_t *mData;
     size_t mSize;
 
     uint32_t mReservoir;  // left-aligned bits
     size_t mNumBitsLeft;
+    bool mOverRead;
 
-    virtual void fillReservoir();
+    virtual bool fillReservoir();
 
     DISALLOW_EVIL_CONSTRUCTORS(ABitReader);
 };
@@ -60,7 +81,7 @@
 private:
     int32_t mNumZeros;
 
-    virtual void fillReservoir();
+    virtual bool fillReservoir();
 
     DISALLOW_EVIL_CONSTRUCTORS(NALBitReader);
 };
diff --git a/include/ndk/NdkImage.h b/include/ndk/NdkImage.h
index eab7ead..cd0b11e 100644
--- a/include/ndk/NdkImage.h
+++ b/include/ndk/NdkImage.h
@@ -42,21 +42,371 @@
 extern "C" {
 #endif
 
+/**
+ * AImage is an opaque type that provides access to image generated by {@link AImageReader}.
+ */
 typedef struct AImage AImage;
 
 // Formats not listed here will not be supported by AImageReader
-enum {
+enum AIMAGE_FORMATS {
+    /**
+     * Multi-plane Android YUV 420 format.
+     *
+     * <p>This format is a generic YCbCr format, capable of describing any 4:2:0
+     * chroma-subsampled planar or semiplanar buffer (but not fully interleaved),
+     * with 8 bits per color sample.</p>
+     *
+     * <p>Images in this format are always represented by three separate buffers
+     * of data, one for each color plane. Additional information always
+     * accompanies the buffers, describing the row stride and the pixel stride
+     * for each plane.</p>
+     *
+     * <p>The order of planes is guaranteed such that plane #0 is always Y, plane #1 is always
+     * U (Cb), and plane #2 is always V (Cr).</p>
+     *
+     * <p>The Y-plane is guaranteed not to be interleaved with the U/V planes
+     * (in particular, pixel stride is always 1 in {@link AImage_getPlanePixelStride}).</p>
+     *
+     * <p>The U/V planes are guaranteed to have the same row stride and pixel stride, that is, the
+     * return value of {@link AImage_getPlaneRowStride} for the U/V plane are guaranteed to be the
+     * same, and the return value of {@link AImage_getPlanePixelStride} for the U/V plane are also
+     * guaranteed to be the same.</p>
+     *
+     * <p>For example, the {@link AImage} object can provide data
+     * in this format from a {@link ACameraDevice} through an {@link AImageReader} object.</p>
+     *
+     * <p>This format is always supported as an output format for the android Camera2 NDK API.</p>
+     *
+     * @see AImage
+     * @see AImageReader
+     * @see ACameraDevice
+     */
     AIMAGE_FORMAT_YUV_420_888       = 0x23,
+
+    /**
+     * Compressed JPEG format.
+     *
+     * <p>This format is always supported as an output format for the android Camera2 NDK API.</p>
+     */
     AIMAGE_FORMAT_JPEG              = 0x100,
+
+    /**
+     * 16 bits per pixel raw camera sensor image format, usually representing a single-channel
+     * Bayer-mosaic image.
+     *
+     * <p>The layout of the color mosaic, the maximum and minimum encoding
+     * values of the raw pixel data, the color space of the image, and all other
+     * needed information to interpret a raw sensor image must be queried from
+     * the {@link ACameraDevice} which produced the image.</p>
+     */
     AIMAGE_FORMAT_RAW16             = 0x20,
+
+    /**
+     * Private raw camera sensor image format, a single channel image with implementation depedent
+     * pixel layout.
+     *
+     * <p>AIMAGE_FORMAT_RAW_PRIVATE is a format for unprocessed raw image buffers coming from an
+     * image sensor. The actual structure of buffers of this format is implementation-dependent.</p>
+     *
+     */
     AIMAGE_FORMAT_RAW_PRIVATE       = 0x24,
+
+    /**
+     * Android 10-bit raw format.
+     *
+     * <p>
+     * This is a single-plane, 10-bit per pixel, densely packed (in each row),
+     * unprocessed format, usually representing raw Bayer-pattern images coming
+     * from an image sensor.
+     * </p>
+     * <p>
+     * In an image buffer with this format, starting from the first pixel of
+     * each row, each 4 consecutive pixels are packed into 5 bytes (40 bits).
+     * Each one of the first 4 bytes contains the top 8 bits of each pixel, The
+     * fifth byte contains the 2 least significant bits of the 4 pixels, the
+     * exact layout data for each 4 consecutive pixels is illustrated below
+     * (Pi[j] stands for the jth bit of the ith pixel):
+     * </p>
+     * <table>
+     * <tr>
+     * <th align="center"></th>
+     * <th align="center">bit 7</th>
+     * <th align="center">bit 6</th>
+     * <th align="center">bit 5</th>
+     * <th align="center">bit 4</th>
+     * <th align="center">bit 3</th>
+     * <th align="center">bit 2</th>
+     * <th align="center">bit 1</th>
+     * <th align="center">bit 0</th>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 0:</td>
+     * <td align="center">P0[9]</td>
+     * <td align="center">P0[8]</td>
+     * <td align="center">P0[7]</td>
+     * <td align="center">P0[6]</td>
+     * <td align="center">P0[5]</td>
+     * <td align="center">P0[4]</td>
+     * <td align="center">P0[3]</td>
+     * <td align="center">P0[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 1:</td>
+     * <td align="center">P1[9]</td>
+     * <td align="center">P1[8]</td>
+     * <td align="center">P1[7]</td>
+     * <td align="center">P1[6]</td>
+     * <td align="center">P1[5]</td>
+     * <td align="center">P1[4]</td>
+     * <td align="center">P1[3]</td>
+     * <td align="center">P1[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 2:</td>
+     * <td align="center">P2[9]</td>
+     * <td align="center">P2[8]</td>
+     * <td align="center">P2[7]</td>
+     * <td align="center">P2[6]</td>
+     * <td align="center">P2[5]</td>
+     * <td align="center">P2[4]</td>
+     * <td align="center">P2[3]</td>
+     * <td align="center">P2[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 3:</td>
+     * <td align="center">P3[9]</td>
+     * <td align="center">P3[8]</td>
+     * <td align="center">P3[7]</td>
+     * <td align="center">P3[6]</td>
+     * <td align="center">P3[5]</td>
+     * <td align="center">P3[4]</td>
+     * <td align="center">P3[3]</td>
+     * <td align="center">P3[2]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 4:</td>
+     * <td align="center">P3[1]</td>
+     * <td align="center">P3[0]</td>
+     * <td align="center">P2[1]</td>
+     * <td align="center">P2[0]</td>
+     * <td align="center">P1[1]</td>
+     * <td align="center">P1[0]</td>
+     * <td align="center">P0[1]</td>
+     * <td align="center">P0[0]</td>
+     * </tr>
+     * </table>
+     * <p>
+     * This format assumes
+     * <ul>
+     * <li>a width multiple of 4 pixels</li>
+     * <li>an even height</li>
+     * </ul>
+     * </p>
+     *
+     * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
+     * not pixels.
+     *
+     * <p>
+     * Since this is a densely packed format, the pixel stride is always 0. The
+     * application must use the pixel data layout defined in above table to
+     * access each row data. When row stride is equal to (width * (10 / 8)), there
+     * will be no padding bytes at the end of each row, the entire image data is
+     * densely packed. When stride is larger than (width * (10 / 8)), padding
+     * bytes will be present at the end of each row.
+     * </p>
+     * <p>
+     * For example, the {@link AImage} object can provide data in this format from a
+     * {@link ACameraDevice} (if supported) through a {@link AImageReader} object.
+     * The number of planes returned by {@link AImage_getNumberOfPlanes} will always be 1.
+     * The pixel stride is undefined ({@link AImage_getPlanePixelStride} will return
+     * {@link AMEDIA_ERROR_UNSUPPORTED}), and the {@link AImage_getPlaneRowStride} described the
+     * vertical neighboring pixel distance (in bytes) between adjacent rows.
+     * </p>
+     *
+     * @see AImage
+     * @see AImageReader
+     * @see ACameraDevice
+     */
     AIMAGE_FORMAT_RAW10             = 0x25,
+
+    /**
+     * Android 12-bit raw format.
+     *
+     * <p>
+     * This is a single-plane, 12-bit per pixel, densely packed (in each row),
+     * unprocessed format, usually representing raw Bayer-pattern images coming
+     * from an image sensor.
+     * </p>
+     * <p>
+     * In an image buffer with this format, starting from the first pixel of each
+     * row, each two consecutive pixels are packed into 3 bytes (24 bits). The first
+     * and second byte contains the top 8 bits of first and second pixel. The third
+     * byte contains the 4 least significant bits of the two pixels, the exact layout
+     * data for each two consecutive pixels is illustrated below (Pi[j] stands for
+     * the jth bit of the ith pixel):
+     * </p>
+     * <table>
+     * <tr>
+     * <th align="center"></th>
+     * <th align="center">bit 7</th>
+     * <th align="center">bit 6</th>
+     * <th align="center">bit 5</th>
+     * <th align="center">bit 4</th>
+     * <th align="center">bit 3</th>
+     * <th align="center">bit 2</th>
+     * <th align="center">bit 1</th>
+     * <th align="center">bit 0</th>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 0:</td>
+     * <td align="center">P0[11]</td>
+     * <td align="center">P0[10]</td>
+     * <td align="center">P0[ 9]</td>
+     * <td align="center">P0[ 8]</td>
+     * <td align="center">P0[ 7]</td>
+     * <td align="center">P0[ 6]</td>
+     * <td align="center">P0[ 5]</td>
+     * <td align="center">P0[ 4]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 1:</td>
+     * <td align="center">P1[11]</td>
+     * <td align="center">P1[10]</td>
+     * <td align="center">P1[ 9]</td>
+     * <td align="center">P1[ 8]</td>
+     * <td align="center">P1[ 7]</td>
+     * <td align="center">P1[ 6]</td>
+     * <td align="center">P1[ 5]</td>
+     * <td align="center">P1[ 4]</td>
+     * </tr>
+     * <tr>
+     * <td align="center">Byte 2:</td>
+     * <td align="center">P1[ 3]</td>
+     * <td align="center">P1[ 2]</td>
+     * <td align="center">P1[ 1]</td>
+     * <td align="center">P1[ 0]</td>
+     * <td align="center">P0[ 3]</td>
+     * <td align="center">P0[ 2]</td>
+     * <td align="center">P0[ 1]</td>
+     * <td align="center">P0[ 0]</td>
+     * </tr>
+     * </table>
+     * <p>
+     * This format assumes
+     * <ul>
+     * <li>a width multiple of 4 pixels</li>
+     * <li>an even height</li>
+     * </ul>
+     * </p>
+     *
+     * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
+     * not pixels.
+     *
+     * <p>
+     * Since this is a densely packed format, the pixel stride is always 0. The
+     * application must use the pixel data layout defined in above table to
+     * access each row data. When row stride is equal to (width * (12 / 8)), there
+     * will be no padding bytes at the end of each row, the entire image data is
+     * densely packed. When stride is larger than (width * (12 / 8)), padding
+     * bytes will be present at the end of each row.
+     * </p>
+     * <p>
+     * For example, the {@link AImage} object can provide data in this format from a
+     * {@link ACameraDevice} (if supported) through a {@link AImageReader} object.
+     * The number of planes returned by {@link AImage_getNumberOfPlanes} will always be 1.
+     * The pixel stride is undefined ({@link AImage_getPlanePixelStride} will return
+     * {@link AMEDIA_ERROR_UNSUPPORTED}), and the {@link AImage_getPlaneRowStride} described the
+     * vertical neighboring pixel distance (in bytes) between adjacent rows.
+     * </p>
+     *
+     * @see AImage
+     * @see AImageReader
+     * @see ACameraDevice
+     */
     AIMAGE_FORMAT_RAW12             = 0x26,
+
+    /**
+     * Android dense depth image format.
+     *
+     * <p>Each pixel is 16 bits, representing a depth ranging measurement from a depth camera or
+     * similar sensor. The 16-bit sample consists of a confidence value and the actual ranging
+     * measurement.</p>
+     *
+     * <p>The confidence value is an estimate of correctness for this sample.  It is encoded in the
+     * 3 most significant bits of the sample, with a value of 0 representing 100% confidence, a
+     * value of 1 representing 0% confidence, a value of 2 representing 1/7, a value of 3
+     * representing 2/7, and so on.</p>
+     *
+     * <p>As an example, the following sample extracts the range and confidence from the first pixel
+     * of a DEPTH16-format {@link AImage}, and converts the confidence to a floating-point value
+     * between 0 and 1.f inclusive, with 1.f representing maximum confidence:
+     *
+     * <pre>
+     *    uint16_t* data;
+     *    int dataLength;
+     *    AImage_getPlaneData(image, 0, (uint8_t**)&data, &dataLength);
+     *    uint16_t depthSample = data[0];
+     *    uint16_t depthRange = (depthSample & 0x1FFF);
+     *    uint16_t depthConfidence = ((depthSample >> 13) & 0x7);
+     *    float depthPercentage = depthConfidence == 0 ? 1.f : (depthConfidence - 1) / 7.f;
+     * </pre>
+     * </p>
+     *
+     * <p>This format assumes
+     * <ul>
+     * <li>an even width</li>
+     * <li>an even height</li>
+     * <li>a horizontal stride multiple of 16 pixels</li>
+     * </ul>
+     * </p>
+     *
+     * <pre> y_size = stride * height </pre>
+     *
+     * When produced by a camera, the units for the range are millimeters.
+     */
     AIMAGE_FORMAT_DEPTH16           = 0x44363159,
+
+    /**
+     * Android sparse depth point cloud format.
+     *
+     * <p>A variable-length list of 3D points plus a confidence value, with each point represented
+     * by four floats; first the X, Y, Z position coordinates, and then the confidence value.</p>
+     *
+     * <p>The number of points is ((size of the buffer in bytes) / 16).
+     *
+     * <p>The coordinate system and units of the position values depend on the source of the point
+     * cloud data. The confidence value is between 0.f and 1.f, inclusive, with 0 representing 0%
+     * confidence and 1.f representing 100% confidence in the measured position values.</p>
+     *
+     * <p>As an example, the following code extracts the first depth point in a DEPTH_POINT_CLOUD
+     * format {@link AImage}:
+     * <pre>
+     *    float* data;
+     *    int dataLength;
+     *    AImage_getPlaneData(image, 0, (uint8_t**)&data, &dataLength);
+     *    float x = data[0];
+     *    float y = data[1];
+     *    float z = data[2];
+     *    float confidence = data[3];
+     * </pre>
+     *
+     */
     AIMAGE_FORMAT_DEPTH_POINT_CLOUD = 0x101,
-    AIMAGE_FORMAT_PRIVATE           = 0x22 ///> Not supported by AImageReader yet
+
+    /**
+     * Android private opaque image format.
+     *
+     * <p>This format is not currently supported by {@link AImageReader}.</p>
+     */
+    AIMAGE_FORMAT_PRIVATE           = 0x22
 };
 
+/**
+ * Data type describing an cropped rectangle returned by {@link AImage_getCropRect}.
+ *
+ * <p>Note that the right and bottom coordinates are exclusive, so the width of the rectangle is
+ * (right - left) and the height of the rectangle is (bottom - top).</p>
+ */
 typedef struct AImageCropRect {
     int32_t left;
     int32_t top;
@@ -64,40 +414,192 @@
     int32_t bottom;
 } AImageCropRect;
 
-// Return the image back to system and delete the AImage from memory
-// Do NOT use `image` after this call
+/**
+ * Return the image back the the system and delete the AImage object from memory.
+ *
+ * <p>Do NOT use the image pointer after this method returns.
+ * Note that if the parent {@link AImageReader} is closed, all the {@link AImage} objects acquired
+ * from the parent reader will be returned to system. All AImage_* methods except this method will
+ * return {@link AMEDIA_ERROR_INVALID_OBJECT}. Application still needs to call this method on those
+ * {@link AImage} objects to fully delete the {@link AImage} object from memory.</p>
+ *
+ * @param image The {@link AImage} to be deleted.
+ */
 void AImage_delete(AImage* image);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the width of the input {@link AImage}.
+ *
+ * @param image the {@link AImage} of interest.
+ * @param width the width of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or width is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getWidth(const AImage* image, /*out*/int32_t* width);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the height of the input {@link AImage}.
+ *
+ * @param image the {@link AImage} of interest.
+ * @param height the height of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or height is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getHeight(const AImage* image, /*out*/int32_t* height);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the format of the input {@link AImage}.
+ *
+ * <p>The format value will be one of AIMAGE_FORMAT_* enum value.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param format the format of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or format is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getFormat(const AImage* image, /*out*/int32_t* format);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the cropped rectangle of the input {@link AImage}.
+ *
+ * <p>The crop rectangle specifies the region of valid pixels in the image, using coordinates in the
+ * largest-resolution plane.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param rect the cropped rectangle of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or rect is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getCropRect(const AImage* image, /*out*/AImageCropRect* rect);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the timestamp of the input {@link AImage}.
+ *
+ * <p>
+ * The timestamp is measured in nanoseconds, and is normally monotonically increasing. The
+ * timestamps for the images from different sources may have different timebases therefore may not
+ * be comparable. The specific meaning and timebase of the timestamp depend on the source providing
+ * images. For images generated by camera, the timestamp value will match
+ * {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted} and
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
+ * </p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param timestampNs the timestamp of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or timestampNs is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getTimestamp(const AImage* image, /*out*/int64_t* timestampNs);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the number of planes of the input {@link AImage}.
+ *
+ * <p>The number of plane of an {@link AImage} is determined by its format, which can be queried by
+ * {@link AImage_getFormat} method.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param numPlanes the number of planes of the image will be filled here if the method call
+ *         succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or numPlanes is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getNumberOfPlanes(const AImage* image, /*out*/int32_t* numPlanes);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the pixel stride of the input {@link AImage}.
+ *
+ * <p>This is the distance between two consecutive pixel values in a row of pixels. It may be
+ * larger than the size of a single pixel to account for interleaved image data or padded formats.
+ * Note that pixel stride is undefined for some formats such as {@link AIMAGE_FORMAT_RAW_PRIVATE},
+ * and calling this method on images of these formats will cause {@link AMEDIA_ERROR_UNSUPPORTED}
+ * being returned.
+ * For formats where pixel stride is well defined, the pixel stride is always greater than 0.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
+ * @param pixelStride the pixel stride of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or pixelStride is NULL, or planeIdx
+ *                 is out of the range of [0, numOfPlanes - 1].</li>
+ *         <li>{@link AMEDIA_ERROR_UNSUPPORTED} if pixel stride is undefined for the format of input
+ *                 image.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getPlanePixelStride(
         const AImage* image, int planeIdx, /*out*/int32_t* pixelStride);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
+/**
+ * Query the row stride of the input {@link AImage}.
+ *
+ * <p>This is the distance between the start of two consecutive rows of pixels in the image. Note
+ * that row stried is undefined for some formats such as {@link AIMAGE_FORMAT_RAW_PRIVATE}, and
+ * calling this method on images of these formats will cause {@link AMEDIA_ERROR_UNSUPPORTED}
+ * being returned.
+ * For formats where row stride is well defined, the row stride is always greater than 0.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
+ * @param rowStride the row stride of the image will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or rowStride is NULL, or planeIdx
+ *                 is out of the range of [0, numOfPlanes - 1].</li>
+ *         <li>{@link AMEDIA_ERROR_UNSUPPORTED} if row stride is undefined for the format of input
+ *                 image.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getPlaneRowStride(
         const AImage* image, int planeIdx, /*out*/int32_t* rowStride);
 
-// AMEDIA_ERROR_INVALID_OBJECT will be returned if the parent AImageReader is deleted
-// Note that once the AImage or the parent AImageReader is deleted, the `*data` returned from
-// previous AImage_getPlaneData call becomes dangling pointer. Do NOT use it after
-// AImage or AImageReader is deleted
+/**
+ * Get the data pointer of the input image for direct application access.
+ *
+ * <p>Note that once the {@link AImage} or the parent {@link AImageReader} is deleted, the data
+ * pointer from previous AImage_getPlaneData call becomes invalid. Do NOT use it after the
+ * {@link AImage} or the parent {@link AImageReader} is deleted.</p>
+ *
+ * @param image the {@link AImage} of interest.
+ * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
+ * @param data the data pointer of the image will be filled here if the method call succeeeds.
+ * @param dataLength the valid length of data will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image, data or dataLength is NULL, or
+ *                 planeIdx is out of the range of [0, numOfPlanes - 1].</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
 media_status_t AImage_getPlaneData(
         const AImage* image, int planeIdx,
         /*out*/uint8_t** data, /*out*/int* dataLength);
diff --git a/include/ndk/NdkImageReader.h b/include/ndk/NdkImageReader.h
index 9e7483d..7c7ec6a 100644
--- a/include/ndk/NdkImageReader.h
+++ b/include/ndk/NdkImageReader.h
@@ -44,38 +44,253 @@
 extern "C" {
 #endif
 
+/**
+ * AImage is an opaque type that allows direct application access to image data rendered into a
+ * {@link ANativeWindow}.
+ */
 typedef struct AImageReader AImageReader;
 
+/**
+ * Create a new reader for images of the desired size and format.
+ *
+ * <p>
+ * The maxImages parameter determines the maximum number of {@link AImage} objects that can be
+ * acquired from the {@link AImageReader} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary for the use case.
+ * </p>
+ * <p>
+ * The valid sizes and formats depend on the source of the image data.
+ * </p>
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param format The format of the Image that this reader will produce. This must be one of the
+ *            AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}. Note that not all
+ *            formats are supported, like {@link AIMAGE_FORMAT_PRIVATE}.
+ * @param maxImages The maximum number of images the user will want to access simultaneously. This
+ *            should be as small as possible to limit memory use. Once maxImages Images are obtained
+ *            by the user, one of them has to be released before a new {@link AImage} will become
+ *            available for access through {@link AImageReader_acquireLatestImage} or
+ *            {@link AImageReader_acquireNextImage}. Must be greater than 0.
+ * @param reader The created image reader will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ *                 height, format, maxImages arguments is not supported.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImage
+ */
 media_status_t AImageReader_new(
         int32_t width, int32_t height, int32_t format, int32_t maxImages,
         /*out*/AImageReader** reader);
 
-// Return all images acquired from this AImageReader back to system and delete
-// the AImageReader instance from memory
-// Do NOT use `reader` after this call
+/**
+ * Delete an {@link AImageReader} and return all images generated by this reader to system.
+ *
+ * <p>This method will return all {@link AImage} objects acquired by this reader (via
+ * {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}) to system,
+ * making any of data pointers obtained from {@link AImage_getPlaneData} invalid. Do NOT access
+ * the reader object or any of those data pointers after this method returns.</p>
+ *
+ * @param reader The image reader to be deleted.
+ */
 void AImageReader_delete(AImageReader* reader);
 
-// Do NOT call ANativeWindow_release on the output. Just use AImageReader_delete.
-media_status_t AImageReader_getWindow(AImageReader*, /*out*/ANativeWindow** window);
+/**
+ * Get a {@link ANativeWindow} that can be used to produce {@link AImage} for this image reader.
+ *
+ * @param reader The image reader of interest.
+ * @param window The output {@link ANativeWindow} will be filled here if the method call succeeds.
+ *                The {@link ANativeWindow} is managed by this image reader. Do NOT call
+ *                {@link ANativeWindow_release} on it. Instead, use {@link AImageReader_delete}.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or window is NULL.</li></ul>
+ */
+media_status_t AImageReader_getWindow(AImageReader* reader, /*out*/ANativeWindow** window);
 
+/**
+ * Query the default width of the {@link AImage} generated by this reader, in pixels.
+ *
+ * <p>The width may be overridden by the producer sending buffers to this reader's
+ * {@link ANativeWindow}. If so, the actual width of the images can be found using
+ * {@link AImage_getWidth}.</p>
+ *
+ * @param reader The image reader of interest.
+ * @param width the default width of the reader will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or width is NULL.</li></ul>
+ */
 media_status_t AImageReader_getWidth(const AImageReader* reader, /*out*/int32_t* width);
+
+/**
+ * Query the default height of the {@link AImage} generated by this reader, in pixels.
+ *
+ * <p>The height may be overridden by the producer sending buffers to this reader's
+ * {@link ANativeWindow}. If so, the actual height of the images can be found using
+ * {@link AImage_getHeight}.</p>
+ *
+ * @param reader The image reader of interest.
+ * @param height the default height of the reader will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or height is NULL.</li></ul>
+ */
 media_status_t AImageReader_getHeight(const AImageReader* reader, /*out*/int32_t* height);
+
+/**
+ * Query the format of the {@link AImage} generated by this reader.
+ *
+ * @param reader The image reader of interest.
+ * @param format the fromat of the reader will be filled here if the method call succeeeds. The
+ *                value will be one of the AIMAGE_FORMAT_* enum value defiend in {@link NdkImage.h}.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or format is NULL.</li></ul>
+ */
 media_status_t AImageReader_getFormat(const AImageReader* reader, /*out*/int32_t* format);
+
+/**
+ * Query the maximum number of concurrently acquired {@link AImage}s of this reader.
+ *
+ * @param reader The image reader of interest.
+ * @param maxImages the maximum number of concurrently acquired images of the reader will be filled
+ *                here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or maxImages is NULL.</li></ul>
+ */
 media_status_t AImageReader_getMaxImages(const AImageReader* reader, /*out*/int32_t* maxImages);
 
+/**
+ * Acquire the next {@link AImage} from the image reader's queue.
+ *
+ * <p>Warning: Consider using {@link AImageReader_acquireLatestImage} instead, as it will
+ * automatically release older images, and allow slower-running processing routines to catch
+ * up to the newest frame. Usage of {@link AImageReader_acquireNextImage} is recommended for
+ * batch/background processing. Incorrectly using this method can cause images to appear
+ * with an ever-increasing delay, followed by a complete stall where no new images seem to appear.
+ * </p>
+ *
+ * <p>
+ * This method will fail if {@link AImageReader_getMaxImages maxImages} have been acquired with
+ * {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}. In particular
+ * a sequence of {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}
+ * calls greater than {@link AImageReader_getMaxImages maxImages} without calling
+ * {@link AImage_delete} in-between will exhaust the underlying queue. At such a time,
+ * {@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} will be returned until more images are released with
+ * {@link AImage_delete}.
+ * </p>
+ *
+ * @param reader The image reader of interest.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or image is NULL.</li>
+ *         <li>{@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} if the number of concurrently acquired
+ *                 images has reached the limit.</li>
+ *         <li>{@link AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE} if there is no buffers currently
+ *                 available in the reader queue.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImageReader_acquireLatestImage
+ */
 media_status_t AImageReader_acquireNextImage(AImageReader* reader, /*out*/AImage** image);
 
+/**
+
+ * Acquire the latest {@link AImage} from the image reader's queue, dropping older images.
+ *
+ * <p>
+ * This operation will acquire all the images possible from the image reader, but
+ * {@link AImage_delete} all images that aren't the latest. This function is recommended to use over
+ * {@link AImageReader_acquireNextImage} for most use-cases, as it's more suited for real-time
+ * processing.
+ * </p>
+ * <p>
+ * Note that {@link AImageReader_getMaxImages maxImages} should be at least 2 for
+ * {@link AImageReader_acquireLatestImage} to be any different than
+ * {@link AImageReader_acquireNextImage} - discarding all-but-the-newest {@link AImage} requires
+ * temporarily acquiring two {@link AImage}s at once. Or more generally, calling
+ * {@link AImageReader_acquireLatestImage} with less than two images of margin, that is
+ * (maxImages - currentAcquiredImages < 2) will not discard as expected.
+ * </p>
+ * <p>
+ * This method will fail if {@link AImageReader_getMaxImages maxImages} have been acquired with
+ * {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}. In particular
+ * a sequence of {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}
+ * calls greater than {@link AImageReader_getMaxImages maxImages} without calling
+ * {@link AImage_delete} in-between will exhaust the underlying queue. At such a time,
+ * {@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} will be returned until more images are released with
+ * {@link AImage_delete}.
+ * </p>
+ *
+ * @param reader The image reader of interest.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or image is NULL.</li>
+ *         <li>{@link AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED} if the number of concurrently acquired
+ *                 images has reached the limit.</li>
+ *         <li>{@link AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE} if there is no buffers currently
+ *                 available in the reader queue.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImageReader_acquireNextImage
+ */
 media_status_t AImageReader_acquireLatestImage(AImageReader* reader, /*out*/AImage** image);
 
-// The callback happens on one dedicated thread per AImageReader instance
-// It's okay to use AImageReader_*/AImage_* APIs within the callback
+
+/**
+ * The definition of {@link AImageReader} new image available callback.
+ *
+ * @param context The optional application context provided by user in
+ *                {@link AImageReader_setImageListener}.
+ * @param session The camera capture session whose state is changing.
+ */
 typedef void (*AImageReader_ImageCallback)(void* context, AImageReader* reader);
 
 typedef struct AImageReader_ImageListener {
-    void*                      context; // optional application context.
+    /// optional application context.
+    void*                      context;
+
+    /**
+     * This callback is called when there is a new image available for in the image reader's queue.
+     *
+     * <p>The callback happens on one dedicated thread per {@link AImageReader} instance. It is okay
+     * to use AImageReader_* and AImage_* methods within the callback. Note that it is possible that
+     * calling {@link AImageReader_acquireNextImage} or {@link AImageReader_acquireLatestImage}
+     * returns {@link AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE} within this callback. For example, when
+     * there are multiple images and callbacks queued, if application called
+     * {@link AImageReader_acquireLatestImage}, some images will be returned to system before their
+     * corresponding callback is executed.</p>
+     */
     AImageReader_ImageCallback onImageAvailable;
 } AImageReader_ImageListener;
 
+/**
+ * Set the onImageAvailable listener of this image reader.
+ *
+ * <p>Note that calling this method will replace previously registered listeners.</p>
+ *
+ * @param reader The image reader of interest.
+ * @param listener the {@link AImageReader_ImageListener} to be registered. Set this to NULL if
+ *                 application no longer needs to listen to new images.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL.</li></ul>
+ */
 media_status_t AImageReader_setImageListener(
         AImageReader* reader, AImageReader_ImageListener* listener);
 
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 63f9ed7..2bdfd43 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -49,6 +49,7 @@
     IResourceManagerService.cpp \
     IStreamSource.cpp \
     MediaCodecInfo.cpp \
+    MediaUtils.cpp \
     Metadata.cpp \
     mediarecorder.cpp \
     IMediaMetadataRetriever.cpp \
@@ -81,6 +82,9 @@
 
 LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
 
+# for memory heap analysis
+LOCAL_STATIC_LIBRARIES := libc_malloc_debug_backtrace libc_logging
+
 LOCAL_MODULE:= libmedia
 
 LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 2976a5c..d9bb856 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -533,7 +533,8 @@
     status = AudioSystem::getInputForAttr(&mAttributes, &input,
                                         mSessionId,
                                         // FIXME compare to AudioTrack
-                                        IPCThreadState::self()->getCallingUid(),
+                                        mClientPid,
+                                        mClientUid,
                                         mSampleRate, mFormat, mChannelMask,
                                         mFlags, mSelectedDeviceId);
 
@@ -615,6 +616,7 @@
                                                        opPackageName,
                                                        &temp,
                                                        &trackFlags,
+                                                       mClientPid,
                                                        tid,
                                                        mClientUid,
                                                        &mSessionId,
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index bbdf65e..808b3ab 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -836,6 +836,7 @@
 status_t AudioSystem::getInputForAttr(const audio_attributes_t *attr,
                                 audio_io_handle_t *input,
                                 audio_session_t session,
+                                pid_t pid,
                                 uid_t uid,
                                 uint32_t samplingRate,
                                 audio_format_t format,
@@ -846,7 +847,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NO_INIT;
     return aps->getInputForAttr(
-            attr, input, session, uid, samplingRate, format, channelMask, flags, selectedDeviceId);
+            attr, input, session, pid, uid,
+            samplingRate, format, channelMask, flags, selectedDeviceId);
 }
 
 status_t AudioSystem::startInput(audio_io_handle_t input,
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 22a5acd..1963da3 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -1401,6 +1401,7 @@
                                                       &trackFlags,
                                                       mSharedBuffer,
                                                       output,
+                                                      mClientPid,
                                                       tid,
                                                       &mSessionId,
                                                       mClientUid,
@@ -2389,6 +2390,9 @@
                     } else {
                         timestamp.mPosition = (uint32_t)(ets.mPosition[location] - frames);
                     }
+                } else if (location == ExtendedTimestamp::LOCATION_KERNEL) {
+                    ALOGV_IF(mPreviousLocation == ExtendedTimestamp::LOCATION_SERVER,
+                            "getTimestamp() location moved from server to kernel");
                 }
                 mPreviousLocation = location;
             } else {
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index aa75188..92e65e4 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -104,6 +104,7 @@
                                 track_flags_t *flags,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_io_handle_t output,
+                                pid_t pid,
                                 pid_t tid,
                                 audio_session_t *sessionId,
                                 int clientUid,
@@ -128,6 +129,7 @@
             data.writeInt32(false);
         }
         data.writeInt32((int32_t) output);
+        data.writeInt32((int32_t) pid);
         data.writeInt32((int32_t) tid);
         audio_session_t lSessionId = AUDIO_SESSION_ALLOCATE;
         if (sessionId != NULL) {
@@ -179,6 +181,7 @@
                                 const String16& opPackageName,
                                 size_t *pFrameCount,
                                 track_flags_t *flags,
+                                pid_t pid,
                                 pid_t tid,
                                 int clientUid,
                                 audio_session_t *sessionId,
@@ -199,6 +202,7 @@
         data.writeInt64(frameCount);
         track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
         data.writeInt32(lFlags);
+        data.writeInt32((int32_t) pid);
         data.writeInt32((int32_t) tid);
         data.writeInt32((int32_t) clientUid);
         audio_session_t lSessionId = AUDIO_SESSION_ALLOCATE;
@@ -950,6 +954,7 @@
                 buffer = interface_cast<IMemory>(data.readStrongBinder());
             }
             audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
+            pid_t pid = (pid_t) data.readInt32();
             pid_t tid = (pid_t) data.readInt32();
             audio_session_t sessionId = (audio_session_t) data.readInt32();
             int clientUid = data.readInt32();
@@ -962,7 +967,7 @@
             } else {
                 track = createTrack(
                         (audio_stream_type_t) streamType, sampleRate, format,
-                        channelMask, &frameCount, &flags, buffer, output, tid,
+                        channelMask, &frameCount, &flags, buffer, output, pid, tid,
                         &sessionId, clientUid, &status);
                 LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
             }
@@ -982,6 +987,7 @@
             const String16& opPackageName = data.readString16();
             size_t frameCount = data.readInt64();
             track_flags_t flags = (track_flags_t) data.readInt32();
+            pid_t pid = (pid_t) data.readInt32();
             pid_t tid = (pid_t) data.readInt32();
             int clientUid = data.readInt32();
             audio_session_t sessionId = (audio_session_t) data.readInt32();
@@ -990,8 +996,9 @@
             sp<IMemory> buffers;
             status_t status = NO_ERROR;
             sp<IAudioRecord> record = openRecord(input,
-                    sampleRate, format, channelMask, opPackageName, &frameCount, &flags, tid,
-                    clientUid, &sessionId, &notificationFrames, cblk, buffers, &status);
+                    sampleRate, format, channelMask, opPackageName, &frameCount, &flags,
+                    pid, tid, clientUid, &sessionId, &notificationFrames, cblk, buffers,
+                    &status);
             LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
             reply->writeInt64(frameCount);
             reply->writeInt32(flags);
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 4ea67da..6405d6d 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -280,6 +280,7 @@
     virtual status_t getInputForAttr(const audio_attributes_t *attr,
                                      audio_io_handle_t *input,
                                      audio_session_t session,
+                                     pid_t pid,
                                      uid_t uid,
                                      uint32_t samplingRate,
                                      audio_format_t format,
@@ -299,6 +300,7 @@
         }
         data.write(attr, sizeof(audio_attributes_t));
         data.writeInt32(session);
+        data.writeInt32(pid);
         data.writeInt32(uid);
         data.writeInt32(samplingRate);
         data.writeInt32(static_cast <uint32_t>(format));
@@ -959,6 +961,7 @@
             audio_attributes_t attr;
             data.read(&attr, sizeof(audio_attributes_t));
             audio_session_t session = (audio_session_t)data.readInt32();
+            pid_t pid = (pid_t)data.readInt32();
             uid_t uid = (uid_t)data.readInt32();
             uint32_t samplingRate = data.readInt32();
             audio_format_t format = (audio_format_t) data.readInt32();
@@ -966,7 +969,7 @@
             audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
             audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status_t status = getInputForAttr(&attr, &input, session, uid,
+            status_t status = getInputForAttr(&attr, &input, session, pid, uid,
                                               samplingRate, format, channelMask,
                                               flags, selectedDeviceId);
             reply->writeInt32(status);
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index d4360ea..c43ef66 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -31,7 +31,7 @@
 SortedVector< wp<IMediaDeathNotifier> > IMediaDeathNotifier::sObitRecipients;
 
 // establish binder interface to MediaPlayerService
-/*static*/const sp<IMediaPlayerService>&
+/*static*/const sp<IMediaPlayerService>
 IMediaDeathNotifier::getMediaPlayerService()
 {
     ALOGV("getMediaPlayerService");
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index 84f1181..7e40e4f 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -37,6 +37,7 @@
     PAUSE,
     GETFORMAT,
     READ,
+    READMULTIPLE,
     RELEASE_BUFFER
 };
 
@@ -189,6 +190,37 @@
         return ret;
     }
 
+    virtual status_t readMultiple(Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers) {
+        ALOGV("readMultiple");
+        if (buffers == NULL || !buffers->isEmpty()) {
+            return BAD_VALUE;
+        }
+        Parcel data, reply;
+        data.writeInterfaceToken(BpMediaSource::getInterfaceDescriptor());
+        data.writeUint32(maxNumBuffers);
+        status_t ret = remote()->transact(READMULTIPLE, data, &reply);
+        if (ret != NO_ERROR) {
+            return ret;
+        }
+        // wrap the returned data in a vector of MediaBuffers
+        int32_t bufCount = 0;
+        while (1) {
+            if (reply.readInt32() == 0) {
+                break;
+            }
+            int32_t len = reply.readInt32();
+            ALOGV("got len %d", len);
+            MediaBuffer *buf = new MediaBuffer(len);
+            reply.read(buf->data(), len);
+            buf->meta_data()->updateFromParcel(reply);
+            buffers->push_back(buf);
+            ++bufCount;
+        }
+        ret = reply.readInt32();
+        ALOGV("got status %d, bufCount %d", ret, bufCount);
+        return ret;
+    }
+
     virtual status_t pause() {
         ALOGV("pause");
         Parcel data, reply;
@@ -340,6 +372,37 @@
             }
             return NO_ERROR;
         }
+        case READMULTIPLE: {
+            ALOGV("readmultiple");
+            CHECK_INTERFACE(IMediaSource, data, reply);
+            uint32_t maxNumBuffers;
+            data.readUint32(&maxNumBuffers);
+            status_t ret = NO_ERROR;
+            uint32_t bufferCount = 0;
+            if (maxNumBuffers > kMaxNumReadMultiple) {
+                maxNumBuffers = kMaxNumReadMultiple;
+            }
+            while (bufferCount < maxNumBuffers) {
+                if (reply->dataSize() >= MediaBuffer::kSharedMemThreshold) {
+                    break;
+                }
+
+                MediaBuffer *buf = NULL;
+                ret = read(&buf, NULL);
+                if (ret != NO_ERROR || buf == NULL) {
+                    break;
+                }
+                ++bufferCount;
+                reply->writeInt32(1);  // indicate one more MediaBuffer.
+                reply->writeByteArray(
+                        buf->range_length(), (uint8_t*)buf->data() + buf->range_offset());
+                buf->meta_data()->writeToParcel(*reply);
+                buf->release();
+            }
+            reply->writeInt32(0);  // indicate no more MediaBuffer.
+            reply->writeInt32(ret);
+            return NO_ERROR;
+        }
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/MediaUtils.cpp b/media/libmedia/MediaUtils.cpp
new file mode 100644
index 0000000..a02ca65
--- /dev/null
+++ b/media/libmedia/MediaUtils.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MediaUtils"
+#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <cutils/properties.h>
+#include <sys/resource.h>
+#include <unistd.h>
+
+#include "MediaUtils.h"
+
+namespace android {
+
+void limitProcessMemory(
+    const char *property,
+    size_t numberOfBytes,
+    size_t percentageOfTotalMem) {
+
+    long pageSize = sysconf(_SC_PAGESIZE);
+    long numPages = sysconf(_SC_PHYS_PAGES);
+    size_t maxMem = SIZE_MAX;
+
+    if (pageSize > 0 && numPages > 0) {
+        if (size_t(numPages) < SIZE_MAX / size_t(pageSize)) {
+            maxMem = size_t(numPages) * size_t(pageSize);
+        }
+        ALOGV("physMem: %zu", maxMem);
+        if (percentageOfTotalMem > 100) {
+            ALOGW("requested %zu%% of total memory, using 100%%", percentageOfTotalMem);
+            percentageOfTotalMem = 100;
+        }
+        maxMem = maxMem / 100 * percentageOfTotalMem;
+        if (numberOfBytes < maxMem) {
+            maxMem = numberOfBytes;
+        }
+        ALOGV("requested limit: %zu", maxMem);
+    } else {
+        ALOGW("couldn't determine total RAM");
+    }
+
+    int64_t propVal = property_get_int64(property, maxMem);
+    if (propVal > 0 && uint64_t(propVal) <= SIZE_MAX) {
+        maxMem = propVal;
+    }
+    ALOGV("actual limit: %zu", maxMem);
+
+    struct rlimit limit;
+    getrlimit(RLIMIT_AS, &limit);
+    ALOGV("original limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
+    limit.rlim_cur = maxMem;
+    setrlimit(RLIMIT_AS, &limit);
+    limit.rlim_cur = -1;
+    limit.rlim_max = -1;
+    getrlimit(RLIMIT_AS, &limit);
+    ALOGV("new limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
+
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaUtils.h b/media/libmedia/MediaUtils.h
new file mode 100644
index 0000000..f80dd30
--- /dev/null
+++ b/media/libmedia/MediaUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MEDIA_UTILS_H
+#define _MEDIA_UTILS_H
+
+namespace android {
+
+/**
+   Limit the amount of memory a process can allocate using setrlimit(RLIMIT_AS).
+   The value to use will be read from the specified system property, or if the
+   property doesn't exist it will use the specified number of bytes or the
+   specified percentage of total memory, whichever is smaller.
+*/
+void limitProcessMemory(
+    const char *property,
+    size_t numberOfBytes,
+    size_t percentageOfTotalMem);
+
+}   // namespace android
+
+#endif  // _MEDIA_UTILS_H
diff --git a/media/libmedia/MemoryLeakTrackUtil.cpp b/media/libmedia/MemoryLeakTrackUtil.cpp
index 554dbae..18f5f25 100644
--- a/media/libmedia/MemoryLeakTrackUtil.cpp
+++ b/media/libmedia/MemoryLeakTrackUtil.cpp
@@ -14,166 +14,84 @@
  * limitations under the License.
  */
 
-#include <media/MemoryLeakTrackUtil.h>
 
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/types.h>
-#include <unistd.h>
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MemoryLeackTrackUtil"
+#include <utils/Log.h>
+
+#include "media/MemoryLeakTrackUtil.h"
+#include <sstream>
 
 /*
- * The code here originally resided in MediaPlayerService.cpp and was
- * shamelessly copied over to support memory leak tracking from
- * multiple places.
+ * The code here originally resided in MediaPlayerService.cpp
  */
-namespace android {
 
+// Figure out the abi based on defined macros.
 #if defined(__arm__)
+#define ABI_STRING "arm"
+#elif defined(__aarch64__)
+#define ABI_STRING "arm64"
+#elif defined(__mips__) && !defined(__LP64__)
+#define ABI_STRING "mips"
+#elif defined(__mips__) && defined(__LP64__)
+#define ABI_STRING "mips64"
+#elif defined(__i386__)
+#define ABI_STRING "x86"
+#elif defined(__x86_64__)
+#define ABI_STRING "x86_64"
+#else
+#error "Unsupported ABI"
+#endif
+
+extern std::string backtrace_string(const uintptr_t* frames, size_t frame_count);
+
+namespace android {
 
 extern "C" void get_malloc_leak_info(uint8_t** info, size_t* overallSize,
         size_t* infoSize, size_t* totalMemory, size_t* backtraceSize);
 
 extern "C" void free_malloc_leak_info(uint8_t* info);
 
-// Use the String-class below instead of String8 to allocate all memory
-// beforehand and not reenter the heap while we are examining it...
-struct MyString8 {
-    static const size_t MAX_SIZE = 256 * 1024;
-
-    MyString8()
-        : mPtr((char *)malloc(MAX_SIZE)) {
-        *mPtr = '\0';
-    }
-
-    ~MyString8() {
-        free(mPtr);
-    }
-
-    void append(const char *s) {
-        strncat(mPtr, s, MAX_SIZE - size() - 1);
-    }
-
-    const char *string() const {
-        return mPtr;
-    }
-
-    size_t size() const {
-        return strlen(mPtr);
-    }
-
-    void clear() {
-        *mPtr = '\0';
-    }
-
-private:
-    char *mPtr;
-
-    MyString8(const MyString8 &);
-    MyString8 &operator=(const MyString8 &);
-};
-
-void dumpMemoryAddresses(int fd)
+std::string dumpMemoryAddresses(size_t limit)
 {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-    MyString8 result;
-
-    typedef struct {
-        size_t size;
-        size_t dups;
-        intptr_t * backtrace;
-    } AllocEntry;
-
-    uint8_t *info = NULL;
-    size_t overallSize = 0;
-    size_t infoSize = 0;
-    size_t totalMemory = 0;
-    size_t backtraceSize = 0;
-
+    uint8_t *info;
+    size_t overallSize;
+    size_t infoSize;
+    size_t totalMemory;
+    size_t backtraceSize;
     get_malloc_leak_info(&info, &overallSize, &infoSize, &totalMemory, &backtraceSize);
-    if (info) {
-        uint8_t *ptr = info;
-        size_t count = overallSize / infoSize;
 
-        snprintf(buffer, SIZE, " Allocation count %i\n", count);
-        result.append(buffer);
-        snprintf(buffer, SIZE, " Total memory %i\n", totalMemory);
-        result.append(buffer);
-
-        AllocEntry * entries = new AllocEntry[count];
-
-        for (size_t i = 0; i < count; i++) {
-            // Each entry should be size_t, size_t, intptr_t[backtraceSize]
-            AllocEntry *e = &entries[i];
-
-            e->size = *reinterpret_cast<size_t *>(ptr);
-            ptr += sizeof(size_t);
-
-            e->dups = *reinterpret_cast<size_t *>(ptr);
-            ptr += sizeof(size_t);
-
-            e->backtrace = reinterpret_cast<intptr_t *>(ptr);
-            ptr += sizeof(intptr_t) * backtraceSize;
-        }
-
-        // Now we need to sort the entries.  They come sorted by size but
-        // not by stack trace which causes problems using diff.
-        bool moved;
-        do {
-            moved = false;
-            for (size_t i = 0; i < (count - 1); i++) {
-                AllocEntry *e1 = &entries[i];
-                AllocEntry *e2 = &entries[i+1];
-
-                bool swap = e1->size < e2->size;
-                if (e1->size == e2->size) {
-                    for(size_t j = 0; j < backtraceSize; j++) {
-                        if (e1->backtrace[j] == e2->backtrace[j]) {
-                            continue;
-                        }
-                        swap = e1->backtrace[j] < e2->backtrace[j];
-                        break;
-                    }
-                }
-                if (swap) {
-                    AllocEntry t = entries[i];
-                    entries[i] = entries[i+1];
-                    entries[i+1] = t;
-                    moved = true;
-                }
-            }
-        } while (moved);
-
-        write(fd, result.string(), result.size());
-        result.clear();
-
-        for (size_t i = 0; i < count; i++) {
-            AllocEntry *e = &entries[i];
-
-            snprintf(buffer, SIZE, "size %8i, dup %4i, ", e->size, e->dups);
-            result.append(buffer);
-            for (size_t ct = 0; (ct < backtraceSize) && e->backtrace[ct]; ct++) {
-                if (ct) {
-                    result.append(", ");
-                }
-                snprintf(buffer, SIZE, "0x%08x", e->backtrace[ct]);
-                result.append(buffer);
-            }
-            result.append("\n");
-
-            write(fd, result.string(), result.size());
-            result.clear();
-        }
-
-        delete[] entries;
-        free_malloc_leak_info(info);
+    size_t count;
+    if (info == nullptr || overallSize == 0 || infoSize == 0
+            || (count = overallSize / infoSize) == 0) {
+        ALOGD("no malloc info, libc.debug.malloc.program property should be set");
+        return std::string();
     }
+
+    std::ostringstream oss;
+    oss << totalMemory << " bytes in " << count << " allocations\n";
+    oss << "  ABI: '" ABI_STRING "'" << "\n\n";
+    if (count > limit) count = limit;
+
+    // The memory is sorted based on total size which is useful for finding
+    // worst memory offenders. For diffs, sometimes it is preferable to sort
+    // based on the backtrace.
+    for (size_t i = 0; i < count; i++) {
+        struct AllocEntry {
+            size_t size;  // bit 31 is set if this is zygote allocated memory
+            size_t allocations;
+            uintptr_t backtrace[];
+        };
+
+        const AllocEntry * const e = (AllocEntry *)(info + i * infoSize);
+
+        oss << (e->size * e->allocations)
+                << " bytes ( " << e->size << " bytes * " << e->allocations << " allocations )\n";
+        oss << backtrace_string(e->backtrace, backtraceSize) << "\n";
+    }
+    oss << "\n";
+    free_malloc_leak_info(info);
+    return oss.str();
 }
 
-#else
-// Does nothing
-void dumpMemoryAddresses(int fd __unused) {}
-
-#endif
 }  // namespace android
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 2795101..8725dfe 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -152,7 +152,7 @@
     ALOGV("setDataSource(%s)", url);
     status_t err = BAD_VALUE;
     if (url != NULL) {
-        const sp<IMediaPlayerService>& service(getMediaPlayerService());
+        const sp<IMediaPlayerService> service(getMediaPlayerService());
         if (service != 0) {
             sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
             if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
@@ -169,7 +169,7 @@
 {
     ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
     status_t err = UNKNOWN_ERROR;
-    const sp<IMediaPlayerService>& service(getMediaPlayerService());
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
         sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
@@ -185,7 +185,7 @@
 {
     ALOGV("setDataSource(IDataSource)");
     status_t err = UNKNOWN_ERROR;
-    const sp<IMediaPlayerService>& service(getMediaPlayerService());
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
         sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
@@ -396,11 +396,22 @@
     }
     Mutex::Autolock _l(mLock);
     if (mPlayer == 0) return INVALID_OPERATION;
+
+    if (rate.mSpeed != 0.f && !(mCurrentState & MEDIA_PLAYER_STARTED)
+            && (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED
+                    | MEDIA_PLAYER_PLAYBACK_COMPLETE))) {
+        mPlayer->setLooping(mLoop);
+        mPlayer->setVolume(mLeftVolume, mRightVolume);
+        mPlayer->setAuxEffectSendLevel(mSendLevel);
+    }
+
     status_t err = mPlayer->setPlaybackSettings(rate);
     if (err == OK) {
         if (rate.mSpeed == 0.f && mCurrentState == MEDIA_PLAYER_STARTED) {
             mCurrentState = MEDIA_PLAYER_PAUSED;
-        } else if (rate.mSpeed != 0.f && mCurrentState == MEDIA_PLAYER_PAUSED) {
+        } else if (rate.mSpeed != 0.f
+                && (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED
+                    | MEDIA_PLAYER_PLAYBACK_COMPLETE))) {
             mCurrentState = MEDIA_PLAYER_STARTED;
         }
     }
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index de3b214..59c077a 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -662,7 +662,7 @@
 {
     ALOGV("constructor");
 
-    const sp<IMediaPlayerService>& service(getMediaPlayerService());
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != NULL) {
         mMediaRecorder = service->createMediaRecorder(opPackageName);
     }
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index cd91e72..acba6d7 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -536,7 +536,9 @@
             }
         }
         if (dumpMem) {
-            dumpMemoryAddresses(fd);
+            result.append("\nDumping memory:\n");
+            std::string s = dumpMemoryAddresses(100 /* limit */);
+            result.append(s.c_str(), s.size());
         }
         if (unreachableMemory) {
             result.append("\nDumping unreachable memory:\n");
@@ -682,10 +684,6 @@
     mCodecDeathListener = new ServiceDeathNotifier(binder, p, MEDIACODEC_PROCESS_DEATH);
     binder->linkToDeath(mCodecDeathListener);
 
-    binder = sm->getService(String16("media.audio_flinger"));
-    mAudioDeathListener = new ServiceDeathNotifier(binder, p, AUDIO_PROCESS_DEATH);
-    binder->linkToDeath(mAudioDeathListener);
-
     if (!p->hardwareOutput()) {
         Mutex::Autolock l(mLock);
         mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 0ecfdbc..01977f5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -231,7 +231,7 @@
         MEDIASERVER_PROCESS_DEATH = 0,
         MEDIAEXTRACTOR_PROCESS_DEATH = 1,
         MEDIACODEC_PROCESS_DEATH = 2,
-        AUDIO_PROCESS_DEATH = 3,
+        AUDIO_PROCESS_DEATH = 3,   // currently no need to track this
         CAMERA_PROCESS_DEATH = 4
     };
 
@@ -419,7 +419,6 @@
 
         sp<IBinder::DeathRecipient> mExtractorDeathListener;
         sp<IBinder::DeathRecipient> mCodecDeathListener;
-        sp<IBinder::DeathRecipient> mAudioDeathListener;
 #if CALLBACK_ANTAGONIZER
                     Antagonizer*                mAntagonizer;
 #endif
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 2832166..d011d70 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -378,11 +378,6 @@
             MediaPlayerService::MEDIACODEC_PROCESS_DEATH);
     binder->linkToDeath(mCodecDeathListener);
 
-    binder = sm->getService(String16("media.audio_flinger"));
-    mAudioDeathListener = new ServiceDeathNotifier(binder, listener,
-            MediaPlayerService::AUDIO_PROCESS_DEATH);
-    binder->linkToDeath(mAudioDeathListener);
-
     return OK;
 }
 
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 6e70194..eceb653 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -87,7 +87,6 @@
 
     sp<IBinder::DeathRecipient> mCameraDeathListener;
     sp<IBinder::DeathRecipient> mCodecDeathListener;
-    sp<IBinder::DeathRecipient> mAudioDeathListener;
 
     pid_t                  mPid;
     Mutex                  mLock;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 6114af8..97ba76b 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -934,7 +934,9 @@
                 mOpPackageName,
                 sourceSampleRate,
                 mAudioChannels,
-                mSampleRate);
+                mSampleRate,
+                mClientUid,
+                mClientPid);
 
     status_t err = audioSource->initCheck();
 
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 9e33cb5..56042d4 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1420,14 +1420,28 @@
         options.setNonBlocking();
     }
 
+    bool couldReadMultiple = (!mIsWidevine && trackType == MEDIA_TRACK_TYPE_AUDIO);
     for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
-        MediaBuffer *mbuf;
-        status_t err = track->mSource->read(&mbuf, &options);
+        Vector<MediaBuffer *> mediaBuffers;
+        status_t err = NO_ERROR;
+
+        if (!seeking && couldReadMultiple) {
+            err = track->mSource->readMultiple(&mediaBuffers, (maxBuffers - numBuffers));
+        } else {
+            MediaBuffer *mbuf = NULL;
+            err = track->mSource->read(&mbuf, &options);
+            if (err == OK && mbuf != NULL) {
+                mediaBuffers.push_back(mbuf);
+            }
+        }
 
         options.clearSeekTo();
 
-        if (err == OK) {
+        size_t id = 0;
+        size_t count = mediaBuffers.size();
+        for (; id < count; ++id) {
             int64_t timeUs;
+            MediaBuffer *mbuf = mediaBuffers[id];
             if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
                 mbuf->meta_data()->dumpToLog();
                 track->mPackets->signalEOS(ERROR_MALFORMED);
@@ -1450,7 +1464,16 @@
             formatChange = false;
             seeking = false;
             ++numBuffers;
-        } else if (err == WOULD_BLOCK) {
+        }
+        if (id < count) {
+            // Error, some mediaBuffer doesn't have kKeyTime.
+            for (; id < count; ++id) {
+                mediaBuffers[id]->release();
+            }
+            break;
+        }
+
+        if (err == WOULD_BLOCK) {
             break;
         } else if (err == INFO_FORMAT_CHANGED) {
 #if 0
@@ -1459,7 +1482,7 @@
                     NULL,
                     false /* discard */);
 #endif
-        } else {
+        } else if (err != OK) {
             queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
             track->mPackets->signalEOS(err);
             break;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 46a51ce..0b10ae4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -188,11 +188,11 @@
       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
       mVideoFpsHint(-1.f),
       mStarted(false),
+      mPrepared(false),
       mResetting(false),
       mSourceStarted(false),
       mPaused(false),
       mPausedByClient(true),
-      mPendingBufferingFlag(PENDING_BUFFERING_FLAG_NONE),
       mPausedForBuffering(false) {
     clearFlushComplete();
 }
@@ -723,10 +723,6 @@
                 onStart();
             }
             mPausedByClient = false;
-            if (mPendingBufferingFlag != PENDING_BUFFERING_FLAG_NONE) {
-                notifyListener(MEDIA_INFO, mPendingBufferingFlag, 0);
-                mPendingBufferingFlag = PENDING_BUFFERING_FLAG_NONE;
-            }
             break;
         }
 
@@ -773,9 +769,17 @@
                     newRate.mSpeed = mPlaybackSettings.mSpeed;
                     mPlaybackSettings = newRate;
                 } else { /* rate.mSpeed != 0.f */
-                    onResume();
-                    mPausedByClient = false;
                     mPlaybackSettings = rate;
+                    if (mStarted) {
+                        // do not resume yet if the source is still buffering
+                        if (!mPausedForBuffering) {
+                            onResume();
+                        }
+                    } else if (mPrepared) {
+                        onStart();
+                    }
+
+                    mPausedByClient = false;
                 }
             }
 
@@ -1218,8 +1222,6 @@
                 break;
             }
 
-            mPendingBufferingFlag = PENDING_BUFFERING_FLAG_NONE;
-
             mDeferredActions.push_back(
                     new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
                                            FLUSH_CMD_FLUSH /* video */));
@@ -1330,6 +1332,7 @@
     mAudioEOS = false;
     mVideoEOS = false;
     mStarted = true;
+    mPaused = false;
 
     uint32_t flags = 0;
 
@@ -1480,7 +1483,8 @@
     mScanSourcesPending = true;
 }
 
-void NuPlayer::tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo) {
+void NuPlayer::tryOpenAudioSinkForOffload(
+        const sp<AMessage> &format, const sp<MetaData> &audioMeta, bool hasVideo) {
     // Note: This is called early in NuPlayer to determine whether offloading
     // is possible; otherwise the decoders call the renderer openAudioSink directly.
 
@@ -1490,8 +1494,6 @@
         // Any failure we turn off mOffloadAudio.
         mOffloadAudio = false;
     } else if (mOffloadAudio) {
-        sp<MetaData> audioMeta =
-                mSource->getFormatMeta(true /* audio */);
         sendMetaDataToHal(mAudioSink, audioMeta);
     }
 }
@@ -1538,7 +1540,7 @@
     }
 }
 
-void NuPlayer::determineAudioModeChange() {
+void NuPlayer::determineAudioModeChange(const sp<AMessage> &audioFormat) {
     if (mSource == NULL || mAudioSink == NULL) {
         return;
     }
@@ -1561,8 +1563,7 @@
             mRenderer->signalEnableOffloadAudio();
         }
         // open audio sink early under offload mode.
-        sp<AMessage> format = mSource->getFormat(true /*audio*/);
-        tryOpenAudioSinkForOffload(format, hasVideo);
+        tryOpenAudioSinkForOffload(audioFormat, audioMeta, hasVideo);
     } else {
         if (mOffloadAudio) {
             mRenderer->signalDisableOffloadAudio();
@@ -1621,7 +1622,7 @@
         notify->setInt32("generation", mAudioDecoderGeneration);
 
         if (checkAudioModeChange) {
-            determineAudioModeChange();
+            determineAudioModeChange(format);
         }
         if (mOffloadAudio) {
             mSource->setOffloadAudio(true /* offload */);
@@ -1963,7 +1964,6 @@
     }
     mPreviousSeekTimeUs = seekTimeUs;
     mSource->seekTo(seekTimeUs);
-    mPendingBufferingFlag = PENDING_BUFFERING_FLAG_NONE;
     ++mTimedTextGeneration;
 
     // everything's flushed, continue playback.
@@ -2022,6 +2022,7 @@
     }
 
     mStarted = false;
+    mPrepared = false;
     mResetting = false;
     mSourceStarted = false;
 }
@@ -2134,6 +2135,8 @@
                         new FlushDecoderAction(FLUSH_CMD_SHUTDOWN /* audio */,
                                                FLUSH_CMD_SHUTDOWN /* video */));
                 processDeferredActions();
+            } else {
+                mPrepared = true;
             }
 
             sp<NuPlayerDriver> driver = mDriver.promote();
@@ -2204,17 +2207,7 @@
                 mPausedForBuffering = true;
                 onPause();
             }
-            // fall-thru
-        }
-
-        case Source::kWhatBufferingStart:
-        {
-            if (mPausedByClient) {
-                mPendingBufferingFlag = PENDING_BUFFERING_FLAG_START;
-            } else {
-                notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0);
-                mPendingBufferingFlag = PENDING_BUFFERING_FLAG_NONE;
-            }
+            notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0);
             break;
         }
 
@@ -2231,13 +2224,7 @@
                     onResume();
                 }
             }
-            // fall-thru
-        }
-
-        case Source::kWhatBufferingEnd:
-        {
             notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0);
-            mPendingBufferingFlag = PENDING_BUFFERING_FLAG_NONE;
             break;
         }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index f6eb49e..ae17c76 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -183,12 +183,6 @@
         FLUSH_CMD_SHUTDOWN,
     };
 
-    enum PendingBufferingFlag {
-        PENDING_BUFFERING_FLAG_NONE = MEDIA_INFO_UNKNOWN,
-        PENDING_BUFFERING_FLAG_START = MEDIA_INFO_BUFFERING_START,
-        PENDING_BUFFERING_FLAG_END = MEDIA_INFO_BUFFERING_END,
-    };
-
     // Status of flush responses from the decoder and renderer.
     bool mFlushComplete[2][2];
 
@@ -204,6 +198,7 @@
     AVSyncSettings mSyncSettings;
     float mVideoFpsHint;
     bool mStarted;
+    bool mPrepared;
     bool mResetting;
     bool mSourceStarted;
 
@@ -215,9 +210,6 @@
     // still become true, when we pause internally due to buffering.
     bool mPausedByClient;
 
-    // Pending buffering flag which is not sent to client due to being paused.
-    PendingBufferingFlag mPendingBufferingFlag;
-
     // Pause state as requested by source (internally) due to buffering
     bool mPausedForBuffering;
 
@@ -232,11 +224,12 @@
         mFlushComplete[1][1] = false;
     }
 
-    void tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo);
+    void tryOpenAudioSinkForOffload(
+            const sp<AMessage> &format, const sp<MetaData> &audioMeta, bool hasVideo);
     void closeAudioSink();
     void restartAudio(
             int64_t currentPositionUs, bool forceNonOffload, bool needsToCreateAudioDecoder);
-    void determineAudioModeChange();
+    void determineAudioModeChange(const sp<AMessage> &audioFormat);
 
     status_t instantiateDecoder(
             bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange = true);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 06bb53d..0f4dce9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -243,7 +243,10 @@
 status_t NuPlayerDriver::start() {
     ALOGD("start(%p), state is %d, eos is %d", this, mState, mAtEOS);
     Mutex::Autolock autoLock(mLock);
+    return start_l();
+}
 
+status_t NuPlayerDriver::start_l() {
     switch (mState) {
         case STATE_UNPREPARED:
         {
@@ -357,8 +360,11 @@
         if (rate.mSpeed == 0.f && mState == STATE_RUNNING) {
             mState = STATE_PAUSED;
             notifyListener_l(MEDIA_PAUSED);
-        } else if (rate.mSpeed != 0.f && mState == STATE_PAUSED) {
-            mState = STATE_RUNNING;
+        } else if (rate.mSpeed != 0.f
+                && (mState == STATE_PAUSED
+                    || mState == STATE_STOPPED_AND_PREPARED
+                    || mState == STATE_PREPARED)) {
+            err = start_l();
         }
     }
     return err;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index d5b4ba1..26d3a60 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -123,6 +123,7 @@
     bool mAutoLoop;
 
     status_t prepare_l();
+    status_t start_l();
     void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 4ae8e82..b47a4f1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -962,7 +962,15 @@
         }
 
         entry->mOffset += written;
-        if (entry->mOffset == entry->mBuffer->size()) {
+        size_t remainder = entry->mBuffer->size() - entry->mOffset;
+        if ((ssize_t)remainder < mAudioSink->frameSize()) {
+            if (remainder > 0) {
+                ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
+                        remainder);
+                entry->mOffset += remainder;
+                copy -= remainder;
+            }
+
             entry->mNotifyConsumed->post();
             mAudioQueue.erase(mAudioQueue.begin());
 
@@ -990,7 +998,8 @@
             // AudioSink write is called in non-blocking mode.
             // It may return with a short count when:
             //
-            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
+            // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
+            //    discarded.
             // 2) The data to be copied exceeds the available buffer in AudioSink.
             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
@@ -1770,7 +1779,7 @@
                     mime.c_str(), audioFormat);
 
             int avgBitRate = -1;
-            format->findInt32("bit-rate", &avgBitRate);
+            format->findInt32("bitrate", &avgBitRate);
 
             int32_t aacProfile = -1;
             if (audioFormat == AUDIO_FORMAT_AAC
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index fba4540..0176eafa 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -46,8 +46,6 @@
         kWhatFlagsChanged,
         kWhatVideoSizeChanged,
         kWhatBufferingUpdate,
-        kWhatBufferingStart,
-        kWhatBufferingEnd,
         kWhatPauseOnBufferingStart,
         kWhatResumeOnBufferingEnd,
         kWhatCacheStats,
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index ba40876..1b7dff5 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -778,7 +778,7 @@
         mBuffering = true;
 
         sp<AMessage> notify = dupNotify();
-        notify->setInt32("what", kWhatBufferingStart);
+        notify->setInt32("what", kWhatPauseOnBufferingStart);
         notify->post();
     }
 }
@@ -794,7 +794,7 @@
         mBuffering = false;
 
         sp<AMessage> notify = dupNotify();
-        notify->setInt32("what", kWhatBufferingEnd);
+        notify->setInt32("what", kWhatResumeOnBufferingEnd);
         notify->post();
     }
 
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index d4c88de..c4147e1 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -29,6 +29,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
 
 namespace android {
 
@@ -217,14 +218,21 @@
     return static_cast<AnotherPacketSource *>(source.get());
 }
 
-sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
+sp<AMessage> NuPlayer::StreamingSource::getFormat(bool audio) {
     sp<AnotherPacketSource> source = getSource(audio);
 
+    sp<AMessage> format = new AMessage;
     if (source == NULL) {
-        return NULL;
+        format->setInt32("err", -EWOULDBLOCK);
+        return format;
     }
 
-    return source->getFormat();
+    sp<MetaData> meta = source->getFormat();
+    status_t err = convertMetaDataToMessage(meta, &format);
+    if (err != OK) {
+        format->setInt32("err", err);
+    }
+    return format;
 }
 
 status_t NuPlayer::StreamingSource::dequeueAccessUnit(
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
index 1f95f3c..db88c7f 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.h
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -46,7 +46,7 @@
 
     virtual void onMessageReceived(const sp<AMessage> &msg);
 
-    virtual sp<MetaData> getFormatMeta(bool audio);
+    virtual sp<AMessage> getFormat(bool audio);
 
 private:
     enum {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 99996ed..cbc28e3 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -498,6 +498,7 @@
       mNode(0),
       mUsingNativeWindow(false),
       mNativeWindowUsageBits(0),
+      mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
       mIsVideo(false),
       mIsEncoder(false),
       mFatalError(false),
@@ -540,6 +541,8 @@
     mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
     mInputEOSResult = OK;
 
+    memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
+
     changeState(mUninitializedState);
 }
 
@@ -973,6 +976,9 @@
     usage |= kVideoGrallocUsage;
     *finalUsage = usage;
 
+    memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
+    mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN;
+
     ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
     return setNativeWindowSizeFormatAndUsage(
             nativeWindow,
@@ -1726,6 +1732,14 @@
         return INVALID_OPERATION;
     }
 
+    // propagate bitrate to the output so that the muxer has it
+    if (encoder && msg->findInt32("bitrate", &bitRate)) {
+        // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
+        // average bitrate. We've been setting both bitrate and max-bitrate to this same value.
+        outputFormat->setInt32("bitrate", bitRate);
+        outputFormat->setInt32("max-bitrate", bitRate);
+    }
+
     int32_t storeMeta;
     if (encoder
             && msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
@@ -5999,6 +6013,10 @@
                 }
                 mCodec->addKeyFormatChangesToRenderBufferNotification(reply);
                 mCodec->sendFormatChange();
+            } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) {
+                // If potentially rendering onto a surface, always save key format data (crop &
+                // data space) so that we can set it if and once the buffer is rendered.
+                mCodec->addKeyFormatChangesToRenderBufferNotification(reply);
             }
 
             if (mCodec->usingMetadataOnEncoderOutput()) {
@@ -6099,15 +6117,19 @@
     }
 
     android_native_rect_t crop;
-    if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) {
+    if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)
+            && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) {
+        mCodec->mLastNativeWindowCrop = crop;
         status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
         ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
     }
 
     int32_t dataSpace;
-    if (msg->findInt32("dataspace", &dataSpace)) {
+    if (msg->findInt32("dataspace", &dataSpace)
+            && dataSpace != mCodec->mLastNativeWindowDataSpace) {
         status_t err = native_window_set_buffers_data_space(
                 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace);
+        mCodec->mLastNativeWindowDataSpace = dataSpace;
         ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err);
     }
 
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index c8b61ca..790c6da 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -51,7 +51,8 @@
 
 AudioSource::AudioSource(
         audio_source_t inputSource, const String16 &opPackageName,
-        uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate)
+        uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
+        uid_t uid, pid_t pid)
     : mStarted(false),
       mSampleRate(sampleRate),
       mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),
@@ -91,7 +92,12 @@
                     (size_t) (bufCount * frameCount),
                     AudioRecordCallbackFunction,
                     this,
-                    frameCount /*notificationFrames*/);
+                    frameCount /*notificationFrames*/,
+                    AUDIO_SESSION_ALLOCATE,
+                    AudioRecord::TRANSFER_DEFAULT,
+                    AUDIO_INPUT_FLAG_NONE,
+                    uid,
+                    pid);
         mInitCheck = mRecord->initCheck();
         if (mInitCheck != OK) {
             mRecord.clear();
diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp
index 255dcd0..d36ac65 100644
--- a/media/libstagefright/DRMExtractor.cpp
+++ b/media/libstagefright/DRMExtractor.cpp
@@ -200,7 +200,17 @@
                 continue;
             }
 
-            CHECK(dstOffset + 4 <= (*buffer)->size());
+            if (dstOffset > SIZE_MAX - 4 ||
+                dstOffset + 4 > SIZE_MAX - nalLength ||
+                dstOffset + 4 + nalLength > (*buffer)->size()) {
+                (*buffer)->release();
+                (*buffer) = NULL;
+                if (decryptedDrmBuffer.data) {
+                    delete [] decryptedDrmBuffer.data;
+                    decryptedDrmBuffer.data = NULL;
+                }
+                return ERROR_MALFORMED;
+            }
 
             dstData[dstOffset++] = 0;
             dstData[dstOffset++] = 0;
diff --git a/media/libstagefright/ESDS.cpp b/media/libstagefright/ESDS.cpp
index 8fbb57c..c31720d 100644
--- a/media/libstagefright/ESDS.cpp
+++ b/media/libstagefright/ESDS.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "ESDS"
 #include <utils/Log.h>
 
+#include <media/stagefright/Utils.h>
+
 #include "include/ESDS.h"
 
 #include <string.h>
@@ -194,12 +196,25 @@
     return err;
 }
 
+status_t ESDS::getBitRate(uint32_t *brateMax, uint32_t *brateAvg) const {
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    *brateMax = mBitRateMax;
+    *brateAvg = mBitRateAvg;
+
+    return OK;
+};
+
 status_t ESDS::parseDecoderConfigDescriptor(size_t offset, size_t size) {
     if (size < 13) {
         return ERROR_MALFORMED;
     }
 
     mObjectTypeIndication = mData[offset];
+    mBitRateMax = U32_AT(mData + offset + 5);
+    mBitRateAvg = U32_AT(mData + offset + 9);
 
     offset += 13;
     size -= 13;
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index 087c903..718710a 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "HevcUtils"
 
 #include <cstring>
+#include <utility>
 
 #include "include/HevcUtils.h"
 #include "include/avc_utils.h"
@@ -39,7 +40,8 @@
     kHevcNalUnitTypeSuffixSei,
 };
 
-HevcParameterSets::HevcParameterSets() {
+HevcParameterSets::HevcParameterSets()
+    : mInfo(kInfoNone) {
 }
 
 status_t HevcParameterSets::addNalUnit(const uint8_t* data, size_t size) {
@@ -149,17 +151,21 @@
     // Skip reserved
     reader.skipBits(16);
 
-    mParams.add(kGeneralProfileSpace, reader.getBits(2));
-    mParams.add(kGeneralTierFlag, reader.getBits(1));
-    mParams.add(kGeneralProfileIdc, reader.getBits(5));
-    mParams.add(kGeneralProfileCompatibilityFlags, reader.getBits(32));
-    mParams.add(
-            kGeneralConstraintIndicatorFlags,
-            ((uint64_t)reader.getBits(16) << 32) | reader.getBits(32));
-    mParams.add(kGeneralLevelIdc, reader.getBits(8));
-    // 96 bits total for general profile.
+    if (reader.atLeastNumBitsLeft(96)) {
+        mParams.add(kGeneralProfileSpace, reader.getBits(2));
+        mParams.add(kGeneralTierFlag, reader.getBits(1));
+        mParams.add(kGeneralProfileIdc, reader.getBits(5));
+        mParams.add(kGeneralProfileCompatibilityFlags, reader.getBits(32));
+        mParams.add(
+                kGeneralConstraintIndicatorFlags,
+                ((uint64_t)reader.getBits(16) << 32) | reader.getBits(32));
+        mParams.add(kGeneralLevelIdc, reader.getBits(8));
+        // 96 bits total for general profile.
+    } else {
+        reader.skipBits(96);
+    }
 
-    return OK;
+    return reader.overRead() ? ERROR_MALFORMED : OK;
 }
 
 status_t HevcParameterSets::parseSps(const uint8_t* data, size_t size) {
@@ -167,7 +173,7 @@
     NALBitReader reader(data, size);
     // Skip sps_video_parameter_set_id
     reader.skipBits(4);
-    uint8_t maxSubLayersMinus1 = reader.getBits(3);
+    uint8_t maxSubLayersMinus1 = reader.getBitsWithFallback(3, 0);
     // Skip sps_temporal_id_nesting_flag;
     reader.skipBits(1);
     // Skip general profile
@@ -176,8 +182,8 @@
         bool subLayerProfilePresentFlag[8];
         bool subLayerLevelPresentFlag[8];
         for (int i = 0; i < maxSubLayersMinus1; ++i) {
-            subLayerProfilePresentFlag[i] = reader.getBits(1);
-            subLayerLevelPresentFlag[i] = reader.getBits(1);
+            subLayerProfilePresentFlag[i] = reader.getBitsWithFallback(1, 0);
+            subLayerLevelPresentFlag[i] = reader.getBitsWithFallback(1, 0);
         }
         // Skip reserved
         reader.skipBits(2 * (8 - maxSubLayersMinus1));
@@ -193,31 +199,152 @@
         }
     }
     // Skip sps_seq_parameter_set_id
-    parseUE(&reader);
-    uint8_t chromaFormatIdc = parseUE(&reader);
+    skipUE(&reader);
+    uint8_t chromaFormatIdc = parseUEWithFallback(&reader, 0);
     mParams.add(kChromaFormatIdc, chromaFormatIdc);
     if (chromaFormatIdc == 3) {
         // Skip separate_colour_plane_flag
         reader.skipBits(1);
     }
     // Skip pic_width_in_luma_samples
-    parseUE(&reader);
+    skipUE(&reader);
     // Skip pic_height_in_luma_samples
-    parseUE(&reader);
-    if (reader.getBits(1) /* i.e. conformance_window_flag */) {
+    skipUE(&reader);
+    if (reader.getBitsWithFallback(1, 0) /* i.e. conformance_window_flag */) {
         // Skip conf_win_left_offset
-        parseUE(&reader);
+        skipUE(&reader);
         // Skip conf_win_right_offset
-        parseUE(&reader);
+        skipUE(&reader);
         // Skip conf_win_top_offset
-        parseUE(&reader);
+        skipUE(&reader);
         // Skip conf_win_bottom_offset
-        parseUE(&reader);
+        skipUE(&reader);
     }
-    mParams.add(kBitDepthLumaMinus8, parseUE(&reader));
-    mParams.add(kBitDepthChromaMinus8, parseUE(&reader));
+    mParams.add(kBitDepthLumaMinus8, parseUEWithFallback(&reader, 0));
+    mParams.add(kBitDepthChromaMinus8, parseUEWithFallback(&reader, 0));
 
-    return OK;
+    // log2_max_pic_order_cnt_lsb_minus4
+    size_t log2MaxPicOrderCntLsb = parseUEWithFallback(&reader, 0) + (size_t)4;
+    bool spsSubLayerOrderingInfoPresentFlag = reader.getBitsWithFallback(1, 0);
+    for (uint32_t i = spsSubLayerOrderingInfoPresentFlag ? 0 : maxSubLayersMinus1;
+            i <= maxSubLayersMinus1; ++i) {
+        skipUE(&reader); // sps_max_dec_pic_buffering_minus1[i]
+        skipUE(&reader); // sps_max_num_reorder_pics[i]
+        skipUE(&reader); // sps_max_latency_increase_plus1[i]
+    }
+
+    skipUE(&reader); // log2_min_luma_coding_block_size_minus3
+    skipUE(&reader); // log2_diff_max_min_luma_coding_block_size
+    skipUE(&reader); // log2_min_luma_transform_block_size_minus2
+    skipUE(&reader); // log2_diff_max_min_luma_transform_block_size
+    skipUE(&reader); // max_transform_hierarchy_depth_inter
+    skipUE(&reader); // max_transform_hierarchy_depth_intra
+    if (reader.getBitsWithFallback(1, 0)) { // scaling_list_enabled_flag u(1)
+        // scaling_list_data
+        if (reader.getBitsWithFallback(1, 0)) { // sps_scaling_list_data_present_flag
+            for (uint32_t sizeId = 0; sizeId < 4; ++sizeId) {
+                for (uint32_t matrixId = 0; matrixId < 6; matrixId += (sizeId == 3) ? 3 : 1) {
+                    if (!reader.getBitsWithFallback(1, 1)) {
+                        // scaling_list_pred_mode_flag[sizeId][matrixId]
+                        skipUE(&reader); // scaling_list_pred_matrix_id_delta[sizeId][matrixId]
+                    } else {
+                        uint32_t coefNum = std::min(64, (1 << (4 + (sizeId << 1))));
+                        if (sizeId > 1) {
+                            skipSE(&reader); // scaling_list_dc_coef_minus8[sizeId − 2][matrixId]
+                        }
+                        for (uint32_t i = 0; i < coefNum; ++i) {
+                            skipSE(&reader); // scaling_list_delta_coef
+                        }
+                    }
+                }
+            }
+        }
+    }
+    reader.skipBits(1); // amp_enabled_flag
+    reader.skipBits(1); // sample_adaptive_offset_enabled_flag u(1)
+    if (reader.getBitsWithFallback(1, 0)) { // pcm_enabled_flag
+        reader.skipBits(4); // pcm_sample_bit_depth_luma_minus1
+        reader.skipBits(4); // pcm_sample_bit_depth_chroma_minus1 u(4)
+        skipUE(&reader); // log2_min_pcm_luma_coding_block_size_minus3
+        skipUE(&reader); // log2_diff_max_min_pcm_luma_coding_block_size
+        reader.skipBits(1); // pcm_loop_filter_disabled_flag
+    }
+    uint32_t numShortTermRefPicSets = parseUEWithFallback(&reader, 0);
+    uint32_t numPics = 0;
+    for (uint32_t i = 0; i < numShortTermRefPicSets; ++i) {
+        // st_ref_pic_set(i)
+        if (i != 0 && reader.getBitsWithFallback(1, 0)) { // inter_ref_pic_set_prediction_flag
+            reader.skipBits(1); // delta_rps_sign
+            skipUE(&reader); // abs_delta_rps_minus1
+            uint32_t nextNumPics = 0;
+            for (uint32_t j = 0; j <= numPics; ++j) {
+                if (reader.getBitsWithFallback(1, 0) // used_by_curr_pic_flag[j]
+                        || reader.getBitsWithFallback(1, 0)) { // use_delta_flag[j]
+                    ++nextNumPics;
+                }
+            }
+            numPics = nextNumPics;
+        } else {
+            uint32_t numNegativePics = parseUEWithFallback(&reader, 0);
+            uint32_t numPositivePics = parseUEWithFallback(&reader, 0);
+            if (numNegativePics > UINT32_MAX - numPositivePics) {
+                return ERROR_MALFORMED;
+            }
+            numPics = numNegativePics + numPositivePics;
+            for (uint32_t j = 0; j < numPics; ++j) {
+                skipUE(&reader); // delta_poc_s0|1_minus1[i]
+                reader.skipBits(1); // used_by_curr_pic_s0|1_flag[i]
+            }
+        }
+    }
+    if (reader.getBitsWithFallback(1, 0)) { // long_term_ref_pics_present_flag
+        uint32_t numLongTermRefPicSps = parseUEWithFallback(&reader, 0);
+        for (uint32_t i = 0; i < numLongTermRefPicSps; ++i) {
+            reader.skipBits(log2MaxPicOrderCntLsb); // lt_ref_pic_poc_lsb_sps[i]
+            reader.skipBits(1); // used_by_curr_pic_lt_sps_flag[i]
+        }
+    }
+    reader.skipBits(1); // sps_temporal_mvp_enabled_flag
+    reader.skipBits(1); // strong_intra_smoothing_enabled_flag
+    if (reader.getBitsWithFallback(1, 0)) { // vui_parameters_present_flag
+        if (reader.getBitsWithFallback(1, 0)) { // aspect_ratio_info_present_flag
+            uint32_t aspectRatioIdc = reader.getBitsWithFallback(8, 0);
+            if (aspectRatioIdc == 0xFF /* EXTENDED_SAR */) {
+                reader.skipBits(16); // sar_width
+                reader.skipBits(16); // sar_height
+            }
+        }
+        if (reader.getBitsWithFallback(1, 0)) { // overscan_info_present_flag
+            reader.skipBits(1); // overscan_appropriate_flag
+        }
+        if (reader.getBitsWithFallback(1, 0)) { // video_signal_type_present_flag
+            reader.skipBits(3); // video_format
+            uint32_t videoFullRangeFlag;
+            if (reader.getBitsGraceful(1, &videoFullRangeFlag)) {
+                mParams.add(kVideoFullRangeFlag, videoFullRangeFlag);
+            }
+            if (reader.getBitsWithFallback(1, 0)) { // colour_description_present_flag
+                mInfo = (Info)(mInfo | kInfoHasColorDescription);
+                uint32_t colourPrimaries, transferCharacteristics, matrixCoeffs;
+                if (reader.getBitsGraceful(8, &colourPrimaries)) {
+                    mParams.add(kColourPrimaries, colourPrimaries);
+                }
+                if (reader.getBitsGraceful(8, &transferCharacteristics)) {
+                    mParams.add(kTransferCharacteristics, transferCharacteristics);
+                    if (transferCharacteristics == 16 /* ST 2084 */
+                            || transferCharacteristics == 18 /* ARIB STD-B67 HLG */) {
+                        mInfo = (Info)(mInfo | kInfoIsHdr);
+                    }
+                }
+                if (reader.getBitsGraceful(8, &matrixCoeffs)) {
+                    mParams.add(kMatrixCoeffs, matrixCoeffs);
+                }
+            }
+            // skip rest of VUI
+        }
+    }
+
+    return reader.overRead() ? ERROR_MALFORMED : OK;
 }
 
 status_t HevcParameterSets::parsePps(
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index f296d9a..6a67fcf 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -51,6 +51,11 @@
 
 namespace android {
 
+enum {
+    // max track header chunk to return
+    kMaxTrackHeaderSize = 32,
+};
+
 class MPEG4Source : public MediaSource {
 public:
     // Caller retains ownership of both "dataSource" and "sampleTable".
@@ -476,6 +481,22 @@
                             ((int64_t)sampleTime * 1000000) / track->timescale);
                 }
             }
+
+            // MPEG2 tracks do not provide CSD, so read the stream header
+            if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)) {
+                off64_t offset;
+                size_t size;
+                if (track->sampleTable->getMetaDataForSample(
+                            0 /* sampleIndex */, &offset, &size, NULL /* sampleTime */) == OK) {
+                    if (size > kMaxTrackHeaderSize) {
+                        size = kMaxTrackHeaderSize;
+                    }
+                    uint8_t header[kMaxTrackHeaderSize];
+                    if (mDataSource->readAt(offset, &header, size) == (ssize_t)size) {
+                        track->meta->setData(kKeyStreamHeader, 'mdat', header, size);
+                    }
+                }
+            }
         }
     }
 
@@ -536,6 +557,10 @@
     }
     if (psshsize > 0 && psshsize <= UINT32_MAX) {
         char *buf = (char*)malloc(psshsize);
+        if (!buf) {
+            ALOGE("b/28471206");
+            return NO_MEMORY;
+        }
         char *ptr = buf;
         for (size_t i = 0; i < mPssh.size(); i++) {
             memcpy(ptr, mPssh[i].uuid, 20); // uuid + length
@@ -1541,8 +1566,9 @@
 
                 const char *mime;
                 CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));
-                if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
-                    // AVC requires compression ratio of at least 2, and uses
+                if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
+                        || !strcmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+                    // AVC & HEVC requires compression ratio of at least 2, and uses
                     // macroblocks
                     max_size = ((width + 15) / 16) * ((height + 15) / 16) * 192;
                 } else {
@@ -1738,12 +1764,42 @@
             break;
         }
 
+        case FOURCC('b', 't', 'r', 't'):
+        {
+            *offset += chunk_size;
+
+            uint8_t buffer[12];
+            if (chunk_data_size != sizeof(buffer)) {
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(
+                    data_offset, buffer, chunk_data_size) < chunk_data_size) {
+                return ERROR_IO;
+            }
+
+            uint32_t maxBitrate = U32_AT(&buffer[4]);
+            uint32_t avgBitrate = U32_AT(&buffer[8]);
+            if (maxBitrate > 0 && maxBitrate < INT32_MAX) {
+                mLastTrack->meta->setInt32(kKeyMaxBitRate, (int32_t)maxBitrate);
+            }
+            if (avgBitrate > 0 && avgBitrate < INT32_MAX) {
+                mLastTrack->meta->setInt32(kKeyBitRate, (int32_t)avgBitrate);
+            }
+            break;
+        }
+
         case FOURCC('a', 'v', 'c', 'C'):
         {
             *offset += chunk_size;
 
             sp<ABuffer> buffer = new ABuffer(chunk_data_size);
 
+            if (buffer->data() == NULL) {
+                ALOGE("b/28471206");
+                return NO_MEMORY;
+            }
+
             if (mDataSource->readAt(
                         data_offset, buffer->data(), chunk_data_size) < chunk_data_size) {
                 return ERROR_IO;
@@ -1761,6 +1817,11 @@
         {
             sp<ABuffer> buffer = new ABuffer(chunk_data_size);
 
+            if (buffer->data() == NULL) {
+                ALOGE("b/28471206");
+                return NO_MEMORY;
+            }
+
             if (mDataSource->readAt(
                         data_offset, buffer->data(), chunk_data_size) < chunk_data_size) {
                 return ERROR_IO;
@@ -2094,6 +2155,10 @@
                     return ERROR_MALFORMED;
                 }
                 sp<ABuffer> buffer = new ABuffer(chunk_data_size + 1);
+                if (buffer->data() == NULL) {
+                    ALOGE("b/28471206");
+                    return NO_MEMORY;
+                }
                 if (mDataSource->readAt(
                     data_offset, buffer->data(), chunk_data_size) != (ssize_t)chunk_data_size) {
                     return ERROR_IO;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index db20590..24fb987 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -1929,22 +1929,6 @@
     *type = (byte & 0x1F);
 }
 
-static const uint8_t *findNextStartCode(
-        const uint8_t *data, size_t length) {
-
-    ALOGV("findNextStartCode: %p %zu", data, length);
-
-    size_t bytesLeft = length;
-    while (bytesLeft > 4  &&
-            memcmp("\x00\x00\x00\x01", &data[length - bytesLeft], 4)) {
-        --bytesLeft;
-    }
-    if (bytesLeft <= 4) {
-        bytesLeft = 0; // Last parameter set
-    }
-    return &data[length - bytesLeft];
-}
-
 const uint8_t *MPEG4Writer::Track::parseParamSet(
         const uint8_t *data, size_t length, int type, size_t *paramSetLen) {
 
@@ -1952,7 +1936,7 @@
     CHECK(type == kNalUnitTypeSeqParamSet ||
           type == kNalUnitTypePicParamSet);
 
-    const uint8_t *nextStartCode = findNextStartCode(data, length);
+    const uint8_t *nextStartCode = findNextNalStartCode(data, length);
     *paramSetLen = nextStartCode - data;
     if (*paramSetLen == 0) {
         ALOGE("Param set is malformed, since its length is 0");
@@ -1973,6 +1957,7 @@
             if (mProfileIdc != data[1] ||
                 mProfileCompatible != data[2] ||
                 mLevelIdc != data[3]) {
+                // COULD DO: set profile/level to the lowest required to support all SPSs
                 ALOGE("Inconsistent profile/level found in seq parameter sets");
                 return NULL;
             }
@@ -2198,10 +2183,7 @@
     const uint8_t *nextStartCode = data;
     size_t bytesLeft = size;
     while (bytesLeft > 4 && !memcmp("\x00\x00\x00\x01", tmp, 4)) {
-        nextStartCode = findNextStartCode(tmp + 4, bytesLeft - 4);
-        if (nextStartCode == NULL) {
-            return ERROR_MALFORMED;
-        }
+        nextStartCode = findNextNalStartCode(tmp + 4, bytesLeft - 4);
         status_t err = paramSets.addNalUnit(tmp + 4, (nextStartCode - tmp) - 4);
         if (err != OK) {
             return ERROR_MALFORMED;
@@ -3057,11 +3039,14 @@
     mOwner->writeInt8(0x15);   // streamType AudioStream
 
     mOwner->writeInt16(0x03);  // XXX
-    mOwner->writeInt8(0x00);   // buffer size 24-bit
-    int32_t bitRate;
-    bool success = mMeta->findInt32(kKeyBitRate, &bitRate);
-    mOwner->writeInt32(success ? bitRate : 96000); // max bit rate
-    mOwner->writeInt32(success ? bitRate : 96000); // avg bit rate
+    mOwner->writeInt8(0x00);   // buffer size 24-bit (0x300)
+
+    int32_t avgBitrate = 0;
+    (void)mMeta->findInt32(kKeyBitRate, &avgBitrate);
+    int32_t maxBitrate = 0;
+    (void)mMeta->findInt32(kKeyMaxBitRate, &maxBitrate);
+    mOwner->writeInt32(maxBitrate);
+    mOwner->writeInt32(avgBitrate);
 
     mOwner->writeInt8(0x05);   // DecoderSpecificInfoTag
     mOwner->writeInt8(mCodecSpecificDataSize);
@@ -3095,12 +3080,17 @@
     mOwner->writeInt8(0x11);  // streamType VisualStream
 
     static const uint8_t kData[] = {
-        0x01, 0x77, 0x00,
-        0x00, 0x03, 0xe8, 0x00,
-        0x00, 0x03, 0xe8, 0x00
+        0x01, 0x77, 0x00, // buffer size 96000 bytes
     };
     mOwner->write(kData, sizeof(kData));
 
+    int32_t avgBitrate = 0;
+    (void)mMeta->findInt32(kKeyBitRate, &avgBitrate);
+    int32_t maxBitrate = 0;
+    (void)mMeta->findInt32(kKeyMaxBitRate, &maxBitrate);
+    mOwner->writeInt32(maxBitrate);
+    mOwner->writeInt32(avgBitrate);
+
     mOwner->writeInt8(0x05);  // DecoderSpecificInfoTag
 
     mOwner->writeInt8(mCodecSpecificDataSize);
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 271c69b..a669dca 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -232,7 +232,7 @@
 }
 
 status_t NuMediaExtractor::getTrackFormat(
-        size_t index, sp<AMessage> *format) const {
+        size_t index, sp<AMessage> *format, uint32_t flags) const {
     Mutex::Autolock autoLock(mLock);
 
     *format = NULL;
@@ -245,7 +245,7 @@
         return -ERANGE;
     }
 
-    sp<MetaData> meta = mImpl->getTrackMetaData(index);
+    sp<MetaData> meta = mImpl->getTrackMetaData(index, flags);
     // Extractors either support trackID-s or not, so either all tracks have trackIDs or none.
     // Generate trackID if missing.
     int32_t trackID;
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 7279f6c..e994069 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -207,12 +207,14 @@
 // static
 MuxOMX::node_location MuxOMX::getPreferredCodecLocation(const char *name) {
     if (sCodecProcessEnabled) {
+        // all codecs go to codec process unless excluded using system property, in which case
         // all non-secure decoders, OMX.google.* codecs and encoders can go in the codec process
         // (non-OMX.google.* encoders can be excluded using system property.)
         if ((strcasestr(name, "decoder")
                         && strcasestr(name, ".secure") != name + strlen(name) - 7)
                 || (strcasestr(name, "encoder")
                         && !property_get_bool("media.stagefright.legacyencoder", false))
+                || !property_get_bool("media.stagefright.less-secure", false)
                 || !strncasecmp(name, "OMX.google.", 11)) {
             return CODECPROCESS;
         }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 3e1badf..4303d09 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -21,15 +21,20 @@
 #include <stdio.h>
 #include <sys/stat.h>
 
+#include <utility>
+
 #include "include/ESDS.h"
 #include "include/HevcUtils.h"
 
 #include <arpa/inet.h>
 #include <cutils/properties.h>
 #include <media/openmax/OMX_Audio.h>
+#include <media/openmax/OMX_Video.h>
+#include <media/openmax/OMX_VideoExt.h>
 #include <media/stagefright/CodecBase.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALookup.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
@@ -91,6 +96,7 @@
     return OK;
 }
 
+#if 0
 static void convertMetaDataToMessageInt32(
         const sp<MetaData> &meta, sp<AMessage> &msg, uint32_t key, const char *name) {
     int32_t value;
@@ -98,6 +104,7 @@
         msg->setInt32(name, value);
     }
 }
+#endif
 
 static void convertMetaDataToMessageColorAspects(const sp<MetaData> &meta, sp<AMessage> &msg) {
     // 0 values are unspecified
@@ -134,6 +141,456 @@
     }
 }
 
+static bool isHdr(const sp<AMessage> &format) {
+    // if CSD specifies HDR transfer(s), we assume HDR. Otherwise, if it specifies non-HDR
+    // transfers, we must assume non-HDR. This is because CSD trumps any color-transfer key
+    // in the format.
+    int32_t isHdr;
+    if (format->findInt32("android._is-hdr", &isHdr)) {
+        return isHdr;
+    }
+
+    // if user/container supplied HDR static info without transfer set, assume true
+    if (format->contains("hdr-static-info") && !format->contains("color-transfer")) {
+        return true;
+    }
+    // otherwise, verify that an HDR transfer function is set
+    int32_t transfer;
+    if (format->findInt32("color-transfer", &transfer)) {
+        return transfer == ColorUtils::kColorTransferST2084
+                || transfer == ColorUtils::kColorTransferHLG;
+    }
+    return false;
+}
+
+static void parseAacProfileFromCsd(const sp<ABuffer> &csd, sp<AMessage> &format) {
+    if (csd->size() < 2) {
+        return;
+    }
+
+    uint16_t audioObjectType = U16_AT((uint8_t*)csd->data());
+    if ((audioObjectType & 0xF800) == 0xF800) {
+        audioObjectType = 32 + ((audioObjectType >> 5) & 0x3F);
+    } else {
+        audioObjectType >>= 11;
+    }
+
+    const static ALookup<uint16_t, OMX_AUDIO_AACPROFILETYPE> profiles {
+        { 1,  OMX_AUDIO_AACObjectMain     },
+        { 2,  OMX_AUDIO_AACObjectLC       },
+        { 3,  OMX_AUDIO_AACObjectSSR      },
+        { 4,  OMX_AUDIO_AACObjectLTP      },
+        { 5,  OMX_AUDIO_AACObjectHE       },
+        { 6,  OMX_AUDIO_AACObjectScalable },
+        { 17, OMX_AUDIO_AACObjectERLC     },
+        { 23, OMX_AUDIO_AACObjectLD       },
+        { 29, OMX_AUDIO_AACObjectHE_PS    },
+        { 39, OMX_AUDIO_AACObjectELD      },
+    };
+
+    OMX_AUDIO_AACPROFILETYPE profile;
+    if (profiles.map(audioObjectType, &profile)) {
+        format->setInt32("profile", profile);
+    }
+}
+
+static void parseAvcProfileLevelFromAvcc(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
+    if (size < 4 || ptr[0] != 1) {  // configurationVersion == 1
+        return;
+    }
+    const uint8_t profile = ptr[1];
+    const uint8_t constraints = ptr[2];
+    const uint8_t level = ptr[3];
+
+    const static ALookup<uint8_t, OMX_VIDEO_AVCLEVELTYPE> levels {
+        {  9, OMX_VIDEO_AVCLevel1b }, // technically, 9 is only used for High+ profiles
+        { 10, OMX_VIDEO_AVCLevel1  },
+        { 11, OMX_VIDEO_AVCLevel11 }, // prefer level 1.1 for the value 11
+        { 11, OMX_VIDEO_AVCLevel1b },
+        { 12, OMX_VIDEO_AVCLevel12 },
+        { 13, OMX_VIDEO_AVCLevel13 },
+        { 20, OMX_VIDEO_AVCLevel2  },
+        { 21, OMX_VIDEO_AVCLevel21 },
+        { 22, OMX_VIDEO_AVCLevel22 },
+        { 30, OMX_VIDEO_AVCLevel3  },
+        { 31, OMX_VIDEO_AVCLevel31 },
+        { 32, OMX_VIDEO_AVCLevel32 },
+        { 40, OMX_VIDEO_AVCLevel4  },
+        { 41, OMX_VIDEO_AVCLevel41 },
+        { 42, OMX_VIDEO_AVCLevel42 },
+        { 50, OMX_VIDEO_AVCLevel5  },
+        { 51, OMX_VIDEO_AVCLevel51 },
+        { 52, OMX_VIDEO_AVCLevel52 },
+    };
+    const static ALookup<uint8_t, OMX_VIDEO_AVCPROFILETYPE> profiles {
+        { 66, OMX_VIDEO_AVCProfileBaseline },
+        { 77, OMX_VIDEO_AVCProfileMain     },
+        { 88, OMX_VIDEO_AVCProfileExtended },
+        { 100, OMX_VIDEO_AVCProfileHigh    },
+        { 110, OMX_VIDEO_AVCProfileHigh10  },
+        { 122, OMX_VIDEO_AVCProfileHigh422 },
+        { 244, OMX_VIDEO_AVCProfileHigh444 },
+    };
+
+    // set profile & level if they are recognized
+    OMX_VIDEO_AVCPROFILETYPE codecProfile;
+    OMX_VIDEO_AVCLEVELTYPE codecLevel;
+    if (profiles.map(profile, &codecProfile)) {
+        format->setInt32("profile", codecProfile);
+        if (levels.map(level, &codecLevel)) {
+            // for 9 && 11 decide level based on profile and constraint_set3 flag
+            if (level == 11 && (profile == 66 || profile == 77 || profile == 88)) {
+                codecLevel = (constraints & 0x10) ? OMX_VIDEO_AVCLevel1b : OMX_VIDEO_AVCLevel11;
+            }
+            format->setInt32("level", codecLevel);
+        }
+    }
+}
+
+static void parseH263ProfileLevelFromD263(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
+    if (size < 7) {
+        return;
+    }
+
+    const uint8_t profile = ptr[6];
+    const uint8_t level = ptr[5];
+
+    const static ALookup<uint8_t, OMX_VIDEO_H263PROFILETYPE> profiles {
+        { 0, OMX_VIDEO_H263ProfileBaseline },
+        { 1, OMX_VIDEO_H263ProfileH320Coding },
+        { 2, OMX_VIDEO_H263ProfileBackwardCompatible },
+        { 3, OMX_VIDEO_H263ProfileISWV2 },
+        { 4, OMX_VIDEO_H263ProfileISWV3 },
+        { 5, OMX_VIDEO_H263ProfileHighCompression },
+        { 6, OMX_VIDEO_H263ProfileInternet },
+        { 7, OMX_VIDEO_H263ProfileInterlace },
+        { 8, OMX_VIDEO_H263ProfileHighLatency },
+    };
+
+    const static ALookup<uint8_t, OMX_VIDEO_H263LEVELTYPE> levels {
+        { 10, OMX_VIDEO_H263Level10 },
+        { 20, OMX_VIDEO_H263Level20 },
+        { 30, OMX_VIDEO_H263Level30 },
+        { 40, OMX_VIDEO_H263Level40 },
+        { 45, OMX_VIDEO_H263Level45 },
+        { 50, OMX_VIDEO_H263Level50 },
+        { 60, OMX_VIDEO_H263Level60 },
+        { 70, OMX_VIDEO_H263Level70 },
+    };
+
+    // set profile & level if they are recognized
+    OMX_VIDEO_H263PROFILETYPE codecProfile;
+    OMX_VIDEO_H263LEVELTYPE codecLevel;
+    if (profiles.map(profile, &codecProfile)) {
+        format->setInt32("profile", codecProfile);
+        if (levels.map(level, &codecLevel)) {
+            format->setInt32("level", codecLevel);
+        }
+    }
+}
+
+static void parseHevcProfileLevelFromHvcc(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
+    if (size < 13 || ptr[0] != 1) {  // configurationVersion == 1
+        return;
+    }
+
+    const uint8_t profile = ptr[1] & 0x1F;
+    const uint8_t tier = (ptr[1] & 0x20) >> 5;
+    const uint8_t level = ptr[12];
+
+    const static ALookup<std::pair<uint8_t, uint8_t>, OMX_VIDEO_HEVCLEVELTYPE> levels {
+        { { 0, 30  }, OMX_VIDEO_HEVCMainTierLevel1  },
+        { { 0, 60  }, OMX_VIDEO_HEVCMainTierLevel2  },
+        { { 0, 63  }, OMX_VIDEO_HEVCMainTierLevel21 },
+        { { 0, 90  }, OMX_VIDEO_HEVCMainTierLevel3  },
+        { { 0, 93  }, OMX_VIDEO_HEVCMainTierLevel31 },
+        { { 0, 120 }, OMX_VIDEO_HEVCMainTierLevel4  },
+        { { 0, 123 }, OMX_VIDEO_HEVCMainTierLevel41 },
+        { { 0, 150 }, OMX_VIDEO_HEVCMainTierLevel5  },
+        { { 0, 153 }, OMX_VIDEO_HEVCMainTierLevel51 },
+        { { 0, 156 }, OMX_VIDEO_HEVCMainTierLevel52 },
+        { { 0, 180 }, OMX_VIDEO_HEVCMainTierLevel6  },
+        { { 0, 183 }, OMX_VIDEO_HEVCMainTierLevel61 },
+        { { 0, 186 }, OMX_VIDEO_HEVCMainTierLevel62 },
+        { { 1, 30  }, OMX_VIDEO_HEVCHighTierLevel1  },
+        { { 1, 60  }, OMX_VIDEO_HEVCHighTierLevel2  },
+        { { 1, 63  }, OMX_VIDEO_HEVCHighTierLevel21 },
+        { { 1, 90  }, OMX_VIDEO_HEVCHighTierLevel3  },
+        { { 1, 93  }, OMX_VIDEO_HEVCHighTierLevel31 },
+        { { 1, 120 }, OMX_VIDEO_HEVCHighTierLevel4  },
+        { { 1, 123 }, OMX_VIDEO_HEVCHighTierLevel41 },
+        { { 1, 150 }, OMX_VIDEO_HEVCHighTierLevel5  },
+        { { 1, 153 }, OMX_VIDEO_HEVCHighTierLevel51 },
+        { { 1, 156 }, OMX_VIDEO_HEVCHighTierLevel52 },
+        { { 1, 180 }, OMX_VIDEO_HEVCHighTierLevel6  },
+        { { 1, 183 }, OMX_VIDEO_HEVCHighTierLevel61 },
+        { { 1, 186 }, OMX_VIDEO_HEVCHighTierLevel62 },
+    };
+
+    const static ALookup<uint8_t, OMX_VIDEO_HEVCPROFILETYPE> profiles {
+        { 1, OMX_VIDEO_HEVCProfileMain   },
+        { 2, OMX_VIDEO_HEVCProfileMain10 },
+    };
+
+    // set profile & level if they are recognized
+    OMX_VIDEO_HEVCPROFILETYPE codecProfile;
+    OMX_VIDEO_HEVCLEVELTYPE codecLevel;
+    if (!profiles.map(profile, &codecProfile)) {
+        if (ptr[2] & 0x40 /* general compatibility flag 1 */) {
+            codecProfile = OMX_VIDEO_HEVCProfileMain;
+        } else if (ptr[2] & 0x20 /* general compatibility flag 2 */) {
+            codecProfile = OMX_VIDEO_HEVCProfileMain10;
+        } else {
+            return;
+        }
+    }
+
+    // bump to HDR profile
+    if (isHdr(format) && codecProfile == OMX_VIDEO_HEVCProfileMain10) {
+        codecProfile = OMX_VIDEO_HEVCProfileMain10HDR10;
+    }
+
+    format->setInt32("profile", codecProfile);
+    if (levels.map(std::make_pair(tier, level), &codecLevel)) {
+        format->setInt32("level", codecLevel);
+    }
+}
+
+static void parseMpeg2ProfileLevelFromHeader(
+        const uint8_t *data, size_t size, sp<AMessage> &format) {
+    // find sequence extension
+    const uint8_t *seq = (const uint8_t*)memmem(data, size, "\x00\x00\x01\xB5", 4);
+    if (seq != NULL && seq + 5 < data + size) {
+        const uint8_t start_code = seq[4] >> 4;
+        if (start_code != 1 /* sequence extension ID */) {
+            return;
+        }
+        const uint8_t indication = ((seq[4] & 0xF) << 4) | ((seq[5] & 0xF0) >> 4);
+
+        const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles {
+            { 0x50, OMX_VIDEO_MPEG2ProfileSimple  },
+            { 0x40, OMX_VIDEO_MPEG2ProfileMain    },
+            { 0x30, OMX_VIDEO_MPEG2ProfileSNR     },
+            { 0x20, OMX_VIDEO_MPEG2ProfileSpatial },
+            { 0x10, OMX_VIDEO_MPEG2ProfileHigh    },
+        };
+
+        const static ALookup<uint8_t, OMX_VIDEO_MPEG2LEVELTYPE> levels {
+            { 0x0A, OMX_VIDEO_MPEG2LevelLL  },
+            { 0x08, OMX_VIDEO_MPEG2LevelML  },
+            { 0x06, OMX_VIDEO_MPEG2LevelH14 },
+            { 0x04, OMX_VIDEO_MPEG2LevelHL  },
+            { 0x02, OMX_VIDEO_MPEG2LevelHP  },
+        };
+
+        const static ALookup<uint8_t,
+                std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE>> escapes {
+            /* unsupported
+            { 0x8E, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelLL  } },
+            { 0x8D, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelML  } },
+            { 0x8B, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelH14 } },
+            { 0x8A, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelHL  } }, */
+            { 0x85, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelML  } },
+            { 0x82, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelHL  } },
+        };
+
+        OMX_VIDEO_MPEG2PROFILETYPE profile;
+        OMX_VIDEO_MPEG2LEVELTYPE level;
+        std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE> profileLevel;
+        if (escapes.map(indication, &profileLevel)) {
+            format->setInt32("profile", profileLevel.first);
+            format->setInt32("level", profileLevel.second);
+        } else if (profiles.map(indication & 0x70, &profile)) {
+            format->setInt32("profile", profile);
+            if (levels.map(indication & 0xF, &level)) {
+                format->setInt32("level", level);
+            }
+        }
+    }
+}
+
+static void parseMpeg2ProfileLevelFromEsds(ESDS &esds, sp<AMessage> &format) {
+    // esds seems to only contain the profile for MPEG-2
+    uint8_t objType;
+    if (esds.getObjectTypeIndication(&objType) == OK) {
+        const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles{
+            { 0x60, OMX_VIDEO_MPEG2ProfileSimple  },
+            { 0x61, OMX_VIDEO_MPEG2ProfileMain    },
+            { 0x62, OMX_VIDEO_MPEG2ProfileSNR     },
+            { 0x63, OMX_VIDEO_MPEG2ProfileSpatial },
+            { 0x64, OMX_VIDEO_MPEG2ProfileHigh    },
+            { 0x65, OMX_VIDEO_MPEG2Profile422     },
+        };
+
+        OMX_VIDEO_MPEG2PROFILETYPE profile;
+        if (profiles.map(objType, &profile)) {
+            format->setInt32("profile", profile);
+        }
+    }
+}
+
+static void parseMpeg4ProfileLevelFromCsd(const sp<ABuffer> &csd, sp<AMessage> &format) {
+    const uint8_t *data = csd->data();
+    // find visual object sequence
+    const uint8_t *seq = (const uint8_t*)memmem(data, csd->size(), "\x00\x00\x01\xB0", 4);
+    if (seq != NULL && seq + 4 < data + csd->size()) {
+        const uint8_t indication = seq[4];
+
+        const static ALookup<uint8_t,
+                std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE>> table {
+            { 0b00000001, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level1  } },
+            { 0b00000010, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level2  } },
+            { 0b00000011, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level3  } },
+            { 0b00000100, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level4a } },
+            { 0b00000101, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level5  } },
+            { 0b00000110, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level6  } },
+            { 0b00001000, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level0  } },
+            { 0b00001001, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level0b } },
+            { 0b00010000, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level0  } },
+            { 0b00010001, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level1  } },
+            { 0b00010010, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level2  } },
+            /* unsupported
+            { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level0  } },
+            { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level1  } },
+            { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level2  } }, */
+            { 0b00100001, { OMX_VIDEO_MPEG4ProfileCore,              OMX_VIDEO_MPEG4Level1  } },
+            { 0b00100010, { OMX_VIDEO_MPEG4ProfileCore,              OMX_VIDEO_MPEG4Level2  } },
+            { 0b00110010, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level2  } },
+            { 0b00110011, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level3  } },
+            { 0b00110100, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level4  } },
+            /* deprecated
+            { 0b01000010, { OMX_VIDEO_MPEG4ProfileNbit,              OMX_VIDEO_MPEG4Level2  } }, */
+            { 0b01010001, { OMX_VIDEO_MPEG4ProfileScalableTexture,   OMX_VIDEO_MPEG4Level1  } },
+            { 0b01100001, { OMX_VIDEO_MPEG4ProfileSimpleFace,        OMX_VIDEO_MPEG4Level1  } },
+            { 0b01100010, { OMX_VIDEO_MPEG4ProfileSimpleFace,        OMX_VIDEO_MPEG4Level2  } },
+            { 0b01100011, { OMX_VIDEO_MPEG4ProfileSimpleFBA,         OMX_VIDEO_MPEG4Level1  } },
+            { 0b01100100, { OMX_VIDEO_MPEG4ProfileSimpleFBA,         OMX_VIDEO_MPEG4Level2  } },
+            { 0b01110001, { OMX_VIDEO_MPEG4ProfileBasicAnimated,     OMX_VIDEO_MPEG4Level1  } },
+            { 0b01110010, { OMX_VIDEO_MPEG4ProfileBasicAnimated,     OMX_VIDEO_MPEG4Level2  } },
+            { 0b10000001, { OMX_VIDEO_MPEG4ProfileHybrid,            OMX_VIDEO_MPEG4Level1  } },
+            { 0b10000010, { OMX_VIDEO_MPEG4ProfileHybrid,            OMX_VIDEO_MPEG4Level2  } },
+            { 0b10010001, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level1  } },
+            { 0b10010010, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level2  } },
+            { 0b10010011, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level3  } },
+            { 0b10010100, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level4  } },
+            { 0b10100001, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level1  } },
+            { 0b10100010, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level2  } },
+            { 0b10100011, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level3  } },
+            { 0b10110001, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level1  } },
+            { 0b10110010, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level2  } },
+            { 0b10110011, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level3  } },
+            { 0b10110100, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level4  } },
+            { 0b11000001, { OMX_VIDEO_MPEG4ProfileAdvancedCore,      OMX_VIDEO_MPEG4Level1  } },
+            { 0b11000010, { OMX_VIDEO_MPEG4ProfileAdvancedCore,      OMX_VIDEO_MPEG4Level2  } },
+            { 0b11010001, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level1  } },
+            { 0b11010010, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level2  } },
+            { 0b11010011, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level3  } },
+            /* unsupported
+            { 0b11100001, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level1  } },
+            { 0b11100010, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level2  } },
+            { 0b11100011, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level3  } },
+            { 0b11100100, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level4  } },
+            { 0b11100101, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level1  } },
+            { 0b11100110, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level2  } },
+            { 0b11100111, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level3  } },
+            { 0b11101000, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level4  } },
+            { 0b11101011, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level5  } },
+            { 0b11101100, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level6  } }, */
+            { 0b11110000, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level0  } },
+            { 0b11110001, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level1  } },
+            { 0b11110010, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level2  } },
+            { 0b11110011, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level3  } },
+            { 0b11110100, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level4  } },
+            { 0b11110101, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level5  } },
+            { 0b11110111, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level3b } },
+            /* deprecated
+            { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level0  } },
+            { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level1  } },
+            { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level2  } },
+            { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level3  } },
+            { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level4  } },
+            { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level5  } }, */
+        };
+
+        std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE> profileLevel;
+        if (table.map(indication, &profileLevel)) {
+            format->setInt32("profile", profileLevel.first);
+            format->setInt32("level", profileLevel.second);
+        }
+    }
+}
+
+static void parseVp9ProfileLevelFromCsd(const sp<ABuffer> &csd, sp<AMessage> &format) {
+    const uint8_t *data = csd->data();
+    size_t remaining = csd->size();
+
+    while (remaining >= 2) {
+        const uint8_t id = data[0];
+        const uint8_t length = data[1];
+        remaining -= 2;
+        data += 2;
+        if (length > remaining) {
+            break;
+        }
+        switch (id) {
+            case 1 /* profileId */:
+                if (length >= 1) {
+                    const static ALookup<uint8_t, OMX_VIDEO_VP9PROFILETYPE> profiles {
+                        { 0, OMX_VIDEO_VP9Profile0 },
+                        { 1, OMX_VIDEO_VP9Profile1 },
+                        { 2, OMX_VIDEO_VP9Profile2 },
+                        { 3, OMX_VIDEO_VP9Profile3 },
+                    };
+
+                    const static ALookup<OMX_VIDEO_VP9PROFILETYPE, OMX_VIDEO_VP9PROFILETYPE> toHdr {
+                        { OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Profile2HDR },
+                        { OMX_VIDEO_VP9Profile3, OMX_VIDEO_VP9Profile3HDR },
+                    };
+
+                    OMX_VIDEO_VP9PROFILETYPE profile;
+                    if (profiles.map(data[0], &profile)) {
+                        // convert to HDR profile
+                        if (isHdr(format)) {
+                            toHdr.lookup(profile, &profile);
+                        }
+
+                        format->setInt32("profile", profile);
+                    }
+                }
+                break;
+            case 2 /* levelId */:
+                if (length >= 1) {
+                    const static ALookup<uint8_t, OMX_VIDEO_VP9LEVELTYPE> levels {
+                        { 10, OMX_VIDEO_VP9Level1  },
+                        { 11, OMX_VIDEO_VP9Level11 },
+                        { 20, OMX_VIDEO_VP9Level2  },
+                        { 21, OMX_VIDEO_VP9Level21 },
+                        { 30, OMX_VIDEO_VP9Level3  },
+                        { 31, OMX_VIDEO_VP9Level31 },
+                        { 40, OMX_VIDEO_VP9Level4  },
+                        { 41, OMX_VIDEO_VP9Level41 },
+                        { 50, OMX_VIDEO_VP9Level5  },
+                        { 51, OMX_VIDEO_VP9Level51 },
+                        { 52, OMX_VIDEO_VP9Level52 },
+                        { 60, OMX_VIDEO_VP9Level6  },
+                        { 61, OMX_VIDEO_VP9Level61 },
+                        { 62, OMX_VIDEO_VP9Level62 },
+                    };
+
+                    OMX_VIDEO_VP9LEVELTYPE level;
+                    if (levels.map(data[0], &level)) {
+                        format->setInt32("level", level);
+                    }
+                }
+                break;
+            default:
+                break;
+        }
+        remaining -= length;
+        data += length;
+    }
+}
+
 status_t convertMetaDataToMessage(
         const sp<MetaData> &meta, sp<AMessage> *format) {
 
@@ -157,9 +614,15 @@
         msg->setInt64("durationUs", durationUs);
     }
 
-    int avgBitRate;
-    if (meta->findInt32(kKeyBitRate, &avgBitRate)) {
-        msg->setInt32("bit-rate", avgBitRate);
+    int32_t avgBitRate = 0;
+    if (meta->findInt32(kKeyBitRate, &avgBitRate) && avgBitRate > 0) {
+        msg->setInt32("bitrate", avgBitRate);
+    }
+
+    int32_t maxBitRate;
+    if (meta->findInt32(kKeyMaxBitRate, &maxBitRate)
+            && maxBitRate > 0 && maxBitRate >= avgBitRate) {
+        msg->setInt32("max-bitrate", maxBitRate);
     }
 
     int32_t isSync;
@@ -209,8 +672,14 @@
             msg->setInt32("rotation-degrees", rotationDegrees);
         }
 
-        convertMetaDataToMessageInt32(meta, msg, kKeyMinLuminance, "min-luminance");
-        convertMetaDataToMessageInt32(meta, msg, kKeyMaxLuminance, "max-luminance");
+        uint32_t type;
+        const void *data;
+        size_t size;
+        if (meta->findData(kKeyHdrStaticInfo, &type, &data, &size)
+                && type == 'hdrS' && size == sizeof(HDRStaticInfo)) {
+            ColorUtils::setHDRStaticInfoIntoFormat(*(HDRStaticInfo*)data, msg);
+        }
+
         convertMetaDataToMessageColorAspects(meta, msg);
     } else if (!strncasecmp("audio/", mime, 6)) {
         int32_t numChannels, sampleRate;
@@ -289,8 +758,8 @@
             ALOGE("b/23680780");
             return BAD_VALUE;
         }
-        uint8_t profile __unused = ptr[1];
-        uint8_t level __unused = ptr[3];
+
+        parseAvcProfileLevelFromAvcc(ptr, size, msg);
 
         // There is decodable content out there that fails the following
         // assertion, let's be lenient for now...
@@ -386,12 +855,11 @@
             ALOGE("b/23680780");
             return BAD_VALUE;
         }
-        uint8_t profile __unused = ptr[1] & 31;
-        uint8_t level __unused = ptr[12];
+
+        const size_t dataSize = size; // save for later
         ptr += 22;
         size -= 22;
 
-
         size_t numofArrays = (char)ptr[0];
         ptr += 1;
         size -= 1;
@@ -403,6 +871,8 @@
         }
         buffer->setRange(0, 0);
 
+        HevcParameterSets hvcc;
+
         for (i = 0; i < numofArrays; i++) {
             if (size < 3) {
                 ALOGE("b/23680780");
@@ -434,6 +904,7 @@
                 if (err != OK) {
                     return err;
                 }
+                (void)hvcc.addNalUnit(ptr, length);
 
                 ptr += length;
                 size -= length;
@@ -443,6 +914,14 @@
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-0", buffer);
 
+        // if we saw VUI color information we know whether this is HDR because VUI trumps other
+        // format parameters for HEVC.
+        HevcParameterSets::Info info = hvcc.getInfo();
+        if (info & hvcc.kInfoHasColorDescription) {
+            msg->setInt32("android._is-hdr", (info & hvcc.kInfoIsHdr) != 0);
+        }
+
+        parseHevcProfileLevelFromHvcc((const uint8_t *)data, dataSize, msg);
     } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
         ESDS esds((const char *)data, size);
         if (esds.InitCheck() != (status_t)OK) {
@@ -465,6 +944,34 @@
         buffer->meta()->setInt32("csd", true);
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-0", buffer);
+
+        if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
+            parseMpeg4ProfileLevelFromCsd(buffer, msg);
+        } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)) {
+            parseMpeg2ProfileLevelFromEsds(esds, msg);
+            if (meta->findData(kKeyStreamHeader, &type, &data, &size)) {
+                parseMpeg2ProfileLevelFromHeader((uint8_t*)data, size, msg);
+            }
+        } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+            parseAacProfileFromCsd(buffer, msg);
+        }
+
+        uint32_t maxBitrate, avgBitrate;
+        if (esds.getBitRate(&maxBitrate, &avgBitrate) == OK) {
+            if (!meta->hasData(kKeyBitRate)
+                    && avgBitrate > 0 && avgBitrate <= INT32_MAX) {
+                msg->setInt32("bitrate", (int32_t)avgBitrate);
+            } else {
+                (void)msg->findInt32("bitrate", (int32_t*)&avgBitrate);
+            }
+            if (!meta->hasData(kKeyMaxBitRate)
+                    && maxBitrate > 0 && maxBitrate <= INT32_MAX && maxBitrate >= avgBitrate) {
+                msg->setInt32("max-bitrate", (int32_t)maxBitrate);
+            }
+        }
+    } else if (meta->findData(kTypeD263, &type, &data, &size)) {
+        const uint8_t *ptr = (const uint8_t *)data;
+        parseH263ProfileLevelFromD263(ptr, size, msg);
     } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
         sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
         if (buffer.get() == NULL || buffer->base() == NULL) {
@@ -537,6 +1044,8 @@
         buffer->meta()->setInt32("csd", true);
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-0", buffer);
+
+        parseVp9ProfileLevelFromCsd(buffer, msg);
     }
 
     // TODO expose "crypto-key"/kKeyCryptoKey through public api
@@ -551,12 +1060,20 @@
     return OK;
 }
 
-static size_t reassembleAVCC(const sp<ABuffer> &csd0, const sp<ABuffer> csd1, char *avcc) {
+const uint8_t *findNextNalStartCode(const uint8_t *data, size_t length) {
+    uint8_t *res = NULL;
+    if (length > 4) {
+        // minus 1 as to not match NAL start code at end
+        res = (uint8_t *)memmem(data, length - 1, "\x00\x00\x00\x01", 4);
+    }
+    return res != NULL && res < data + length - 4 ? res : &data[length];
+}
 
+static size_t reassembleAVCC(const sp<ABuffer> &csd0, const sp<ABuffer> csd1, char *avcc) {
     avcc[0] = 1;        // version
-    avcc[1] = 0x64;     // profile
-    avcc[2] = 0;        // unused (?)
-    avcc[3] = 0xd;      // level
+    avcc[1] = 0x64;     // profile (default to high)
+    avcc[2] = 0;        // constraints (default to none)
+    avcc[3] = 0xd;      // level (default to 1.3)
     avcc[4] = 0xff;     // reserved+size
 
     size_t i = 0;
@@ -564,26 +1081,28 @@
     int lastparamoffset = 0;
     int avccidx = 6;
     do {
-        if (i >= csd0->size() - 4 ||
-                memcmp(csd0->data() + i, "\x00\x00\x00\x01", 4) == 0) {
-            if (i >= csd0->size() - 4) {
-                // there can't be another param here, so use all the rest
-                i = csd0->size();
+        i = findNextNalStartCode(csd0->data() + i, csd0->size() - i) - csd0->data();
+        ALOGV("block at %zu, last was %d", i, lastparamoffset);
+        if (lastparamoffset > 0) {
+            const uint8_t *lastparam = csd0->data() + lastparamoffset;
+            int size = i - lastparamoffset;
+            if (size > 3) {
+                if (numparams && memcmp(avcc + 1, lastparam + 1, 3)) {
+                    ALOGW("Inconsisted profile/level found in SPS: %x,%x,%x vs %x,%x,%x",
+                            avcc[1], avcc[2], avcc[3], lastparam[1], lastparam[2], lastparam[3]);
+                } else if (!numparams) {
+                    // fill in profile, constraints and level
+                    memcpy(avcc + 1, lastparam + 1, 3);
+                }
             }
-            ALOGV("block at %zu, last was %d", i, lastparamoffset);
-            if (lastparamoffset > 0) {
-                int size = i - lastparamoffset;
-                avcc[avccidx++] = size >> 8;
-                avcc[avccidx++] = size & 0xff;
-                memcpy(avcc+avccidx, csd0->data() + lastparamoffset, size);
-                avccidx += size;
-                numparams++;
-            }
-            i += 4;
-            lastparamoffset = i;
-        } else {
-            i++;
+            avcc[avccidx++] = size >> 8;
+            avcc[avccidx++] = size & 0xff;
+            memcpy(avcc+avccidx, lastparam, size);
+            avccidx += size;
+            numparams++;
         }
+        i += 4;
+        lastparamoffset = i;
     } while(i < csd0->size());
     ALOGV("csd0 contains %d params", numparams);
 
@@ -595,26 +1114,18 @@
     int numpicparamsoffset = avccidx;
     avccidx++;
     do {
-        if (i >= csd1->size() - 4 ||
-                memcmp(csd1->data() + i, "\x00\x00\x00\x01", 4) == 0) {
-            if (i >= csd1->size() - 4) {
-                // there can't be another param here, so use all the rest
-                i = csd1->size();
-            }
-            ALOGV("block at %zu, last was %d", i, lastparamoffset);
-            if (lastparamoffset > 0) {
-                int size = i - lastparamoffset;
-                avcc[avccidx++] = size >> 8;
-                avcc[avccidx++] = size & 0xff;
-                memcpy(avcc+avccidx, csd1->data() + lastparamoffset, size);
-                avccidx += size;
-                numparams++;
-            }
-            i += 4;
-            lastparamoffset = i;
-        } else {
-            i++;
+        i = findNextNalStartCode(csd1->data() + i, csd1->size() - i) - csd1->data();
+        ALOGV("block at %zu, last was %d", i, lastparamoffset);
+        if (lastparamoffset > 0) {
+            int size = i - lastparamoffset;
+            avcc[avccidx++] = size >> 8;
+            avcc[avccidx++] = size & 0xff;
+            memcpy(avcc+avccidx, csd1->data() + lastparamoffset, size);
+            avccidx += size;
+            numparams++;
         }
+        i += 4;
+        lastparamoffset = i;
     } while(i < csd1->size());
     avcc[numpicparamsoffset] = numparams;
     return avccidx;
@@ -638,15 +1149,16 @@
     esds[11] = 0x80 | ((configdescriptorsize >> 7) & 0x7f);
     esds[12] = (configdescriptorsize & 0x7f);
     esds[13] = 0x40; // objectTypeIndication
-    esds[14] = 0x15; // not sure what 14-25 mean, they are ignored by ESDS.cpp,
-    esds[15] = 0x00; // but the actual values here were taken from a real file.
+    // bytes 14-25 are examples from a real file. they are unused/overwritten by muxers.
+    esds[14] = 0x15; // streamType(5), upStream(0),
+    esds[15] = 0x00; // 15-17: bufferSizeDB (6KB)
     esds[16] = 0x18;
     esds[17] = 0x00;
-    esds[18] = 0x00;
+    esds[18] = 0x00; // 18-21: maxBitrate (64kbps)
     esds[19] = 0x00;
     esds[20] = 0xfa;
     esds[21] = 0x00;
-    esds[22] = 0x00;
+    esds[22] = 0x00; // 22-25: avgBitrate (64kbps)
     esds[23] = 0x00;
     esds[24] = 0xfa;
     esds[25] = 0x00;
@@ -657,7 +1169,6 @@
     esds[30] = (csd0size & 0x7f);
     memcpy((void*)&esds[31], csd0->data(), csd0size);
     // data following this is ignored, so don't bother appending it
-
 }
 
 static size_t reassembleHVCC(const sp<ABuffer> &csd0, uint8_t *hvcc, size_t hvccSize, size_t nalSizeLength) {
@@ -695,6 +1206,7 @@
     return size;
 }
 
+#if 0
 static void convertMessageToMetaDataInt32(
         const sp<AMessage> &msg, sp<MetaData> &meta, uint32_t key, const char *name) {
     int32_t value;
@@ -702,6 +1214,7 @@
         meta->setInt32(key, value);
     }
 }
+#endif
 
 static void convertMessageToMetaDataColorAspects(const sp<AMessage> &msg, sp<MetaData> &meta) {
     // 0 values are unspecified
@@ -750,6 +1263,15 @@
         meta->setInt32(kKeyIsSyncFrame, 1);
     }
 
+    int32_t avgBitrate = 0;
+    int32_t maxBitrate;
+    if (msg->findInt32("bitrate", &avgBitrate) && avgBitrate > 0) {
+        meta->setInt32(kKeyBitRate, avgBitrate);
+    }
+    if (msg->findInt32("max-bitrate", &maxBitrate) && maxBitrate > 0 && maxBitrate >= avgBitrate) {
+        meta->setInt32(kKeyMaxBitRate, maxBitrate);
+    }
+
     if (mime.startsWith("video/")) {
         int32_t width;
         int32_t height;
@@ -786,8 +1308,13 @@
             meta->setInt32(kKeyRotation, rotationDegrees);
         }
 
-        convertMessageToMetaDataInt32(msg, meta, kKeyMinLuminance, "min-luminance");
-        convertMessageToMetaDataInt32(msg, meta, kKeyMaxLuminance, "max-luminance");
+        if (msg->contains("hdr-static-info")) {
+            HDRStaticInfo info;
+            if (ColorUtils::getHDRStaticInfoFromFormat(msg, &info)) {
+                meta->setData(kKeyHdrStaticInfo, 'hdrS', &info, sizeof(info));
+            }
+        }
+
         convertMessageToMetaDataColorAspects(msg, meta);
     } else if (mime.startsWith("audio/")) {
         int32_t numChannels;
@@ -848,7 +1375,7 @@
     }
 
     // reassemble the csd data into its original form
-    sp<ABuffer> csd0;
+    sp<ABuffer> csd0, csd1, csd2;
     if (msg->findBuffer("csd-0", &csd0)) {
         if (mime == MEDIA_MIMETYPE_VIDEO_AVC) {
             sp<ABuffer> csd1;
@@ -868,6 +1395,21 @@
             uint8_t hvcc[1024]; // that oughta be enough, right?
             size_t outsize = reassembleHVCC(csd0, hvcc, 1024, 4);
             meta->setData(kKeyHVCC, kKeyHVCC, hvcc, outsize);
+        } else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
+            meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
+        } else if (mime == MEDIA_MIMETYPE_AUDIO_OPUS) {
+            meta->setData(kKeyOpusHeader, 0, csd0->data(), csd0->size());
+            if (msg->findBuffer("csd-1", &csd1)) {
+                meta->setData(kKeyOpusCodecDelay, 0, csd1->data(), csd1->size());
+            }
+            if (msg->findBuffer("csd-2", &csd2)) {
+                meta->setData(kKeyOpusSeekPreRoll, 0, csd2->data(), csd2->size());
+            }
+        } else if (mime == MEDIA_MIMETYPE_AUDIO_VORBIS) {
+            meta->setData(kKeyVorbisInfo, 0, csd0->data(), csd0->size());
+            if (msg->findBuffer("csd-1", &csd1)) {
+                meta->setData(kKeyVorbisBooks, 0, csd1->data(), csd1->size());
+            }
         }
     }
 
@@ -1059,7 +1601,7 @@
     int32_t brate = -1;
     if (!meta->findInt32(kKeyBitRate, &brate)) {
         ALOGV("track of type '%s' does not publish bitrate", mime);
-     }
+    }
     info.bit_rate = brate;
 
 
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 8ef2dca..ccf3440 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -41,10 +41,37 @@
     return x + (1u << numZeroes) - 1;
 }
 
+unsigned parseUEWithFallback(ABitReader *br, unsigned fallback) {
+    unsigned numZeroes = 0;
+    while (br->getBitsWithFallback(1, 1) == 0) {
+        ++numZeroes;
+    }
+    uint32_t x;
+    if (numZeroes < 32) {
+        if (br->getBitsGraceful(numZeroes, &x)) {
+            return x + (1u << numZeroes) - 1;
+        } else {
+            return fallback;
+        }
+    } else {
+        br->skipBits(numZeroes);
+        return fallback;
+    }
+}
+
 signed parseSE(ABitReader *br) {
     unsigned codeNum = parseUE(br);
 
-    return (codeNum & 1) ? (codeNum + 1) / 2 : -(codeNum / 2);
+    return (codeNum & 1) ? (codeNum + 1) / 2 : -signed(codeNum / 2);
+}
+
+signed parseSEWithFallback(ABitReader *br, signed fallback) {
+    // NOTE: parseUE cannot normally return ~0 as the max supported value is 0xFFFE
+    unsigned codeNum = parseUEWithFallback(br, ~0U);
+    if (codeNum == ~0U) {
+        return fallback;
+    }
+    return (codeNum & 1) ? (codeNum + 1) / 2 : -signed(codeNum / 2);
 }
 
 static void skipScalingList(ABitReader *br, size_t sizeOfScalingList) {
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index e88dfa8..ff76bc8 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -26,6 +26,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
 
 #include <math.h>
 
@@ -55,6 +56,14 @@
     params->nVersion.s.nStep = 0;
 }
 
+static const OMX_U32 kSupportedProfiles[] = {
+    OMX_AUDIO_AACObjectLC,
+    OMX_AUDIO_AACObjectHE,
+    OMX_AUDIO_AACObjectHE_PS,
+    OMX_AUDIO_AACObjectLD,
+    OMX_AUDIO_AACObjectELD,
+};
+
 SoftAAC2::SoftAAC2(
         const char *name,
         const OMX_CALLBACKTYPE *callbacks,
@@ -207,7 +216,7 @@
 
 OMX_ERRORTYPE SoftAAC2::internalGetParameter(
         OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
+    switch ((OMX_U32) index) {
         case OMX_IndexParamAudioAac:
         {
             OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
@@ -283,6 +292,29 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexParamAudioProfileQuerySupported:
+        {
+            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *profileParams =
+                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *)params;
+
+            if (!isValidOMXParam(profileParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (profileParams->nPortIndex != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
+                return OMX_ErrorNoMore;
+            }
+
+            profileParams->eProfile =
+                kSupportedProfiles[profileParams->nProfileIndex];
+
+            return OMX_ErrorNone;
+        }
+
         default:
             return SimpleSoftOMXComponent::internalGetParameter(index, params);
     }
@@ -616,12 +648,15 @@
                         signalError = true;
                     } else {
                         adtsHeaderSize = (protectionAbsent ? 7 : 9);
+                        if (aac_frame_length < adtsHeaderSize) {
+                            signalError = true;
+                        } else {
+                            inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
+                            inBufferLength[0] = aac_frame_length - adtsHeaderSize;
 
-                        inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
-                        inBufferLength[0] = aac_frame_length - adtsHeaderSize;
-
-                        inHeader->nOffset += adtsHeaderSize;
-                        inHeader->nFilledLen -= adtsHeaderSize;
+                            inHeader->nOffset += adtsHeaderSize;
+                            inHeader->nFilledLen -= adtsHeaderSize;
+                        }
                     }
                 }
 
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index 77a7b1e..026006e 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -80,7 +80,8 @@
         src/asm/ARMV7/Syn_filt_32_neon.s \
         src/asm/ARMV7/syn_filt_neon.s
 
-    LOCAL_CFLAGS_arm := -DARM -DARMV7 -DASM_OPT
+    # don't actually generate neon instructions, see bug 26932980
+    LOCAL_CFLAGS_arm := -DARM -DARMV7 -DASM_OPT -mfpu=vfpv3
     LOCAL_C_INCLUDES_arm := $(LOCAL_PATH)/src/asm/ARMV5E
     LOCAL_C_INCLUDES_arm += $(LOCAL_PATH)/src/asm/ARMV7
 endif
@@ -102,7 +103,7 @@
 
 LOCAL_CFLAGS += -Werror
 LOCAL_CLANG := true
-LOCAL_SANITIZE := signed-integer-overflow
+#LOCAL_SANITIZE := signed-integer-overflow
 
 include $(BUILD_STATIC_LIBRARY)
 
diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/libstagefright/codecs/amrwbenc/src/convolve.c
index 9b8b3aa..8c24414 100644
--- a/media/libstagefright/codecs/amrwbenc/src/convolve.c
+++ b/media/libstagefright/codecs/amrwbenc/src/convolve.c
@@ -47,48 +47,53 @@
         s = vo_mult32((*tmpX++), (*tmpH--));i--;
         while(i>0)
         {
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
             i -= 4;
         }
-        y[n] = ((s<<1) + 0x8000)>>16;
+        y[n] = voround(L_shl(s, 1));
         n++;
 
         tmpH = h+n;
         tmpX = x;
         i=n+1;
-        s =  vo_mult32((*tmpX++), (*tmpH--));i--;
-        s += vo_mult32((*tmpX++), (*tmpH--));i--;
+        s =  vo_mult32((*tmpX++), (*tmpH--));
+        i--;
+        s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+        i--;
 
         while(i>0)
         {
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
             i -= 4;
         }
-        y[n] = ((s<<1) + 0x8000)>>16;
+        y[n] = voround(L_shl(s, 1));
         n++;
 
         tmpH = h+n;
         tmpX = x;
         i=n+1;
-        s =  vo_mult32((*tmpX++), (*tmpH--));i--;
-        s += vo_mult32((*tmpX++), (*tmpH--));i--;
-        s += vo_mult32((*tmpX++), (*tmpH--));i--;
+        s =  vo_mult32((*tmpX++), (*tmpH--));
+        i--;
+        s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+        i--;
+        s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+        i--;
 
         while(i>0)
         {
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
             i -= 4;
         }
-        y[n] = ((s<<1) + 0x8000)>>16;
+        y[n] = voround(L_shl(s, 1));
         n++;
 
         s = 0;
@@ -97,13 +102,13 @@
         i=n+1;
         while(i>0)
         {
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
-            s += vo_mult32((*tmpX++), (*tmpH--));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
+            s = L_add(s, vo_mult32((*tmpX++), (*tmpH--)));
             i -= 4;
         }
-        y[n] = ((s<<1) + 0x8000)>>16;
+        y[n] = voround(L_shl(s, 1));
         n++;
     }
     return;
diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
index de2a221..b453b25 100644
--- a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
+++ b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
@@ -84,8 +84,8 @@
 
     /* Find interval to compute normalized correlation */
 
-    t_min = t0_min - L_INTERPOL1;
-    t_max = t0_max + L_INTERPOL1;
+    t_min = L_sub(t0_min, L_INTERPOL1);
+    t_max = L_add(t0_max, L_INTERPOL1);
     corr = &corr_v[-t_min];
     /* Compute normalized correlation between target and filtered excitation */
 #ifdef ASM_OPT               /* asm optimization branch */
@@ -188,15 +188,15 @@
     L_tmp = 0;
     for (i = 0; i < 64; i+=4)
     {
-        L_tmp += (xn[i] * xn[i]);
-        L_tmp += (xn[i+1] * xn[i+1]);
-        L_tmp += (xn[i+2] * xn[i+2]);
-        L_tmp += (xn[i+3] * xn[i+3]);
+        L_tmp = L_add(L_tmp, (xn[i] * xn[i]));
+        L_tmp = L_add(L_tmp, (xn[i+1] * xn[i+1]));
+        L_tmp = L_add(L_tmp, (xn[i+2] * xn[i+2]));
+        L_tmp = L_add(L_tmp, (xn[i+3] * xn[i+3]));
     }
 
-    L_tmp = (L_tmp << 1) + 1;
+    L_tmp = L_add(L_shl(L_tmp, 1), 1);
     exp = norm_l(L_tmp);
-    exp = (32 - exp);
+    exp = L_sub(32, exp);
     //exp = exp + 2;                     /* energy of xn[] x 2 + rounded up     */
     scale = -(exp >> 1);           /* (1<<scale) < 1/sqrt(energy rounded) */
 
@@ -209,36 +209,36 @@
         L_tmp1 = 0;
         for (i = 0; i < 64; i+=4)
         {
-            L_tmp  += (xn[i] * excf[i]);
-            L_tmp1 += (excf[i] * excf[i]);
-            L_tmp  += (xn[i+1] * excf[i+1]);
-            L_tmp1 += (excf[i+1] * excf[i+1]);
-            L_tmp  += (xn[i+2] * excf[i+2]);
-            L_tmp1 += (excf[i+2] * excf[i+2]);
-            L_tmp  += (xn[i+3] * excf[i+3]);
-            L_tmp1 += (excf[i+3] * excf[i+3]);
+            L_tmp = L_add(L_tmp, (xn[i] * excf[i]));
+            L_tmp1 = L_add(L_tmp1, (excf[i] * excf[i]));
+            L_tmp = L_add(L_tmp, (xn[i+1] * excf[i+1]));
+            L_tmp1 = L_add(L_tmp1, (excf[i+1] * excf[i+1]));
+            L_tmp = L_add(L_tmp, (xn[i+2] * excf[i+2]));
+            L_tmp1 = L_add(L_tmp1, (excf[i+2] * excf[i+2]));
+            L_tmp = L_add(L_tmp, (xn[i+3] * excf[i+3]));
+            L_tmp1 = L_add(L_tmp1, (excf[i+3] * excf[i+3]));
         }
 
-        L_tmp = (L_tmp << 1) + 1;
-        L_tmp1 = (L_tmp1 << 1) + 1;
+        L_tmp = L_add(L_shl(L_tmp, 1), 1);
+        L_tmp1 = L_add(L_shl(L_tmp1, 1), 1);
 
         exp = norm_l(L_tmp);
-        L_tmp = (L_tmp << exp);
-        exp_corr = (30 - exp);
+        L_tmp = L_shl(L_tmp, exp);
+        exp_corr = L_sub(30, exp);
         corr = extract_h(L_tmp);
 
         exp = norm_l(L_tmp1);
-        L_tmp = (L_tmp1 << exp);
-        exp_norm = (30 - exp);
+        L_tmp = L_shl(L_tmp1, exp);
+        exp_norm = L_sub(30, exp);
 
         Isqrt_n(&L_tmp, &exp_norm);
         norm = extract_h(L_tmp);
 
         /* Normalize correlation = correlation * (1/sqrt(energy)) */
 
-        L_tmp = vo_L_mult(corr, norm);
+        L_tmp = L_mult(corr, norm);
 
-        L_tmp2 = exp_corr + exp_norm + scale;
+        L_tmp2 = L_add(exp_corr, exp_norm + scale);
         if(L_tmp2 < 0)
         {
             L_tmp2 = -L_tmp2;
@@ -246,10 +246,10 @@
         }
         else
         {
-            L_tmp = L_tmp << L_tmp2;
+            L_tmp = L_shl(L_tmp, L_tmp2);
         }
 
-        corr_norm[t] = vo_round(L_tmp);
+        corr_norm[t] = voround(L_tmp);
         /* modify the filtered excitation excf[] for the next iteration */
 
         if(t != t_max)
@@ -310,13 +310,13 @@
     ptr = &(inter4_1[k][0]);
 
     L_sum  = vo_mult32(x[0], (*ptr++));
-    L_sum += vo_mult32(x[1], (*ptr++));
-    L_sum += vo_mult32(x[2], (*ptr++));
-    L_sum += vo_mult32(x[3], (*ptr++));
-    L_sum += vo_mult32(x[4], (*ptr++));
-    L_sum += vo_mult32(x[5], (*ptr++));
-    L_sum += vo_mult32(x[6], (*ptr++));
-    L_sum += vo_mult32(x[7], (*ptr++));
+    L_sum = L_add(L_sum, vo_mult32(x[1], (*ptr++)));
+    L_sum = L_add(L_sum, vo_mult32(x[2], (*ptr++)));
+    L_sum = L_add(L_sum, vo_mult32(x[3], (*ptr++)));
+    L_sum = L_add(L_sum, vo_mult32(x[4], (*ptr++)));
+    L_sum = L_add(L_sum, vo_mult32(x[5], (*ptr++)));
+    L_sum = L_add(L_sum, vo_mult32(x[6], (*ptr++)));
+    L_sum = L_add(L_sum, vo_mult32(x[7], (*ptr++)));
 
     sum = extract_h(L_add(L_shl2(L_sum, 2), 0x8000));
     return (sum);
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
index d59f129..b908ff8 100644
--- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
+++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
@@ -810,10 +810,10 @@
             vo_p2 = vo_p0-1;
             for (j = 1; j <= M/4; j++)
             {
-                L_tmp -= *vo_p1++ * *vo_p2--;
-                L_tmp -= *vo_p1++ * *vo_p2--;
-                L_tmp -= *vo_p1++ * *vo_p2--;
-                L_tmp -= *vo_p1++ * *vo_p2--;
+                L_tmp = L_sub(L_tmp, *vo_p1++ * *vo_p2--);
+                L_tmp = L_sub(L_tmp, *vo_p1++ * *vo_p2--);
+                L_tmp = L_sub(L_tmp, *vo_p1++ * *vo_p2--);
+                L_tmp = L_sub(L_tmp, *vo_p1++ * *vo_p2--);
             }
             *vo_p3++ = *vo_p0++ = vo_round((L_tmp <<4));
         }
@@ -1205,7 +1205,7 @@
          *------------------------------------------------------*/
 
         /* y2 in Q9, gain_pit in Q14 */
-        L_tmp = (gain_code * y2[L_SUBFR - 1])<<1;
+        L_tmp = L_mult(gain_code, y2[L_SUBFR - 1]);
         L_tmp = L_shl(L_tmp, (5 + shift));
         L_tmp = L_negate(L_tmp);
         L_tmp += (xn[L_SUBFR - 1] * 16384)<<1;
@@ -1220,8 +1220,8 @@
         {
             Word32 tmp;
             /* code in Q9, gain_pit in Q14 */
-            L_tmp = (gain_code * code[i])<<1;
-            L_tmp = (L_tmp << 5);
+            L_tmp = L_mult(gain_code, code[i]);
+            L_tmp = L_shl(L_tmp, 5);
             tmp = L_mult(exc[i + i_subfr], gain_pit); // (exc[i + i_subfr] * gain_pit)<<1
             L_tmp = L_add(L_tmp, tmp);
             L_tmp = L_shl2(L_tmp, 1);
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index 973c528..61b9bfd 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -497,16 +497,6 @@
     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
 
-    /* If input EOS is seen and decoder is not in flush mode,
-     * set the decoder in flush mode.
-     * There can be a case where EOS is sent along with last picture data
-     * In that case, only after decoding that input data, decoder has to be
-     * put in flush. This case is handled here  */
-
-    if (mReceivedEOS && !mIsInFlush) {
-        setFlushMode();
-    }
-
     while (!outQueue.empty()) {
         BufferInfo *inInfo;
         OMX_BUFFERHEADERTYPE *inHeader;
@@ -674,7 +664,7 @@
                 outInfo = NULL;
                 notifyFillBufferDone(outHeader);
                 outHeader = NULL;
-            } else {
+            } else if (mIsInFlush) {
                 /* If in flush mode and no output is returned by the codec,
                  * then come out of flush mode */
                 mIsInFlush = false;
@@ -695,6 +685,16 @@
             }
         }
 
+        /* If input EOS is seen and decoder is not in flush mode,
+         * set the decoder in flush mode.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+
+        if (mReceivedEOS && !mIsInFlush) {
+            setFlushMode();
+        }
+
         if (inHeader != NULL) {
             inInfo->mOwnedByUs = false;
             inQueue.erase(inQueue.begin());
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index 54736f8..0215a11 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -444,6 +444,9 @@
 
     if (NULL == mCodecCtx) {
         if (OK != initDecoder()) {
+            ALOGE("Failed to initialize decoder");
+            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+            mSignalledError = true;
             return;
         }
     }
@@ -456,16 +459,6 @@
     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
 
-    /* If input EOS is seen and decoder is not in flush mode,
-     * set the decoder in flush mode.
-     * There can be a case where EOS is sent along with last picture data
-     * In that case, only after decoding that input data, decoder has to be
-     * put in flush. This case is handled here  */
-
-    if (mReceivedEOS && !mIsInFlush) {
-        setFlushMode();
-    }
-
     while (!outQueue.empty()) {
         BufferInfo *inInfo;
         OMX_BUFFERHEADERTYPE *inHeader;
@@ -540,6 +533,25 @@
             IV_API_CALL_STATUS_T status;
             status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
 
+            bool unsupportedResolution =
+                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+
+            /* Check for unsupported dimensions */
+            if (unsupportedResolution) {
+                ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
+                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+                mSignalledError = true;
+                return;
+            }
+
+            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
+            if (allocationFailed) {
+                ALOGE("Allocation failure in decoder");
+                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+                mSignalledError = true;
+                return;
+            }
+
             bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
 
             GETTIME(&mTimeEnd, NULL);
@@ -600,7 +612,7 @@
                 outInfo = NULL;
                 notifyFillBufferDone(outHeader);
                 outHeader = NULL;
-            } else {
+            } else if (mIsInFlush) {
                 /* If in flush mode and no output is returned by the codec,
                  * then come out of flush mode */
                 mIsInFlush = false;
@@ -621,6 +633,16 @@
             }
         }
 
+        /* If input EOS is seen and decoder is not in flush mode,
+         * set the decoder in flush mode.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+
+        if (mReceivedEOS && !mIsInFlush) {
+            setFlushMode();
+        }
+
         // TODO: Handle more than one picture data
         if (inHeader != NULL) {
             inInfo->mOwnedByUs = false;
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
index 27f860e..5210683 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -558,16 +558,6 @@
         setParams(mStride);
     }
 
-    /* If input EOS is seen and decoder is not in flush mode,
-     * set the decoder in flush mode.
-     * There can be a case where EOS is sent along with last picture data
-     * In that case, only after decoding that input data, decoder has to be
-     * put in flush. This case is handled here  */
-
-    if (mReceivedEOS && !mIsInFlush) {
-        setFlushMode();
-    }
-
     while (!outQueue.empty()) {
         BufferInfo *inInfo;
         OMX_BUFFERHEADERTYPE *inHeader;
@@ -751,7 +741,7 @@
                     notifyFillBufferDone(outHeader);
                     outHeader = NULL;
                 }
-            } else {
+            } else if (mIsInFlush) {
                 /* If in flush mode and no output is returned by the codec,
                  * then come out of flush mode */
                 mIsInFlush = false;
@@ -772,6 +762,16 @@
             }
         }
 
+        /* If input EOS is seen and decoder is not in flush mode,
+         * set the decoder in flush mode.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+
+        if (mReceivedEOS && !mIsInFlush) {
+            setFlushMode();
+        }
+
         // TODO: Handle more than one picture data
         if (inHeader != NULL) {
             inInfo->mOwnedByUs = false;
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index ba1f263..8022467 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -233,10 +233,17 @@
         // (specified in http://www.webmproject.org/vp9/profiles/). Ignore it if
         // it was passed.
         if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-            continue;
+            // Only ignore CSD buffer for VP9.
+            if (mMode == MODE_VP9) {
+                inQueue.erase(inQueue.begin());
+                inInfo->mOwnedByUs = false;
+                notifyEmptyBufferDone(inHeader);
+                continue;
+            } else {
+                // Tolerate the CSD buffer for VP8. This is a workaround
+                // for b/28689536.
+                ALOGW("WARNING: Got CSD buffer for VP8.");
+            }
         }
 
         mTimeStamps[mTimeStampIdx] = inHeader->nTimeStamp;
@@ -257,7 +264,7 @@
                 notifyEmptyBufferDone(inHeader);
                 inHeader = NULL;
             } else {
-                ALOGE("on2 decoder failed to decode frame.");
+                ALOGE("on2 decoder failed to decode frame. err: %d", err);
                 notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                 return;
             }
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
index 9517d0a..799bd16 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
@@ -60,6 +60,7 @@
 #include "h264bsd_util.h"
 #include "basetype.h"
 
+#include <log/log.h>
 /*------------------------------------------------------------------------------
     2. External compiler flags
 --------------------------------------------------------------------------------
@@ -998,6 +999,13 @@
     ASSERT(maxFrameNum);
     ASSERT(dpbSize);
 
+    // see comment in loop below about size calculation
+    if (picSizeInMbs > (UINT32_MAX - 32 - 15) / 384) {
+        ALOGE("b/28533562");
+        android_errorWriteLog(0x534e4554, "28533562");
+        return(MEMORY_ALLOCATION_ERROR);
+    }
+
     dpb->maxLongTermFrameIdx = NO_LONG_TERM_FRAME_INDICES;
     dpb->maxRefFrames        = MAX(maxRefFrames, 1);
     if (noReordering)
diff --git a/media/libstagefright/foundation/ABitReader.cpp b/media/libstagefright/foundation/ABitReader.cpp
index 1582b67..c5db9e6 100644
--- a/media/libstagefright/foundation/ABitReader.cpp
+++ b/media/libstagefright/foundation/ABitReader.cpp
@@ -24,14 +24,18 @@
     : mData(data),
       mSize(size),
       mReservoir(0),
-      mNumBitsLeft(0) {
+      mNumBitsLeft(0),
+      mOverRead(false) {
 }
 
 ABitReader::~ABitReader() {
 }
 
-void ABitReader::fillReservoir() {
-    CHECK_GT(mSize, 0u);
+bool ABitReader::fillReservoir() {
+    if (mSize == 0) {
+        mOverRead = true;
+        return false;
+    }
 
     mReservoir = 0;
     size_t i;
@@ -44,15 +48,32 @@
 
     mNumBitsLeft = 8 * i;
     mReservoir <<= 32 - mNumBitsLeft;
+    return true;
 }
 
 uint32_t ABitReader::getBits(size_t n) {
-    CHECK_LE(n, 32u);
+    uint32_t ret;
+    CHECK(getBitsGraceful(n, &ret));
+    return ret;
+}
+
+uint32_t ABitReader::getBitsWithFallback(size_t n, uint32_t fallback) {
+    uint32_t ret = fallback;
+    (void)getBitsGraceful(n, &ret);
+    return ret;
+}
+
+bool ABitReader::getBitsGraceful(size_t n, uint32_t *out) {
+    if (n > 32) {
+        return false;
+    }
 
     uint32_t result = 0;
     while (n > 0) {
         if (mNumBitsLeft == 0) {
-            fillReservoir();
+            if (!fillReservoir()) {
+                return false;
+            }
         }
 
         size_t m = n;
@@ -67,21 +88,30 @@
         n -= m;
     }
 
-    return result;
+    *out = result;
+    return true;
 }
 
-void ABitReader::skipBits(size_t n) {
+bool ABitReader::skipBits(size_t n) {
+    uint32_t dummy;
     while (n > 32) {
-        getBits(32);
+        if (!getBitsGraceful(32, &dummy)) {
+            return false;
+        }
         n -= 32;
     }
 
     if (n > 0) {
-        getBits(n);
+        return getBitsGraceful(n, &dummy);
     }
+    return true;
 }
 
 void ABitReader::putBits(uint32_t x, size_t n) {
+    if (mOverRead) {
+        return;
+    }
+
     CHECK_LE(n, 32u);
 
     while (mNumBitsLeft + n > 32) {
@@ -139,8 +169,11 @@
     return (numBitsRemaining <= 0);
 }
 
-void NALBitReader::fillReservoir() {
-    CHECK_GT(mSize, 0u);
+bool NALBitReader::fillReservoir() {
+    if (mSize == 0) {
+        mOverRead = true;
+        return false;
+    }
 
     mReservoir = 0;
     size_t i = 0;
@@ -165,6 +198,7 @@
 
     mNumBitsLeft = 8 * i;
     mReservoir <<= 32 - mNumBitsLeft;
+    return true;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/include/HevcUtils.h b/media/libstagefright/include/HevcUtils.h
index 0d7bb2f..0f59631 100644
--- a/media/libstagefright/include/HevcUtils.h
+++ b/media/libstagefright/include/HevcUtils.h
@@ -56,10 +56,24 @@
     kBitDepthLumaMinus8,
     // uint8_t
     kBitDepthChromaMinus8,
+    // uint8_t
+    kVideoFullRangeFlag,
+    // uint8_t
+    kColourPrimaries,
+    // uint8_t
+    kTransferCharacteristics,
+    // uint8_t
+    kMatrixCoeffs,
 };
 
 class HevcParameterSets {
 public:
+    enum Info : uint32_t {
+        kInfoNone                = 0,
+        kInfoIsHdr               = 1 << 0,
+        kInfoHasColorDescription = 1 << 1,
+    };
+
     HevcParameterSets();
 
     status_t addNalUnit(const uint8_t* data, size_t size);
@@ -77,6 +91,8 @@
     bool write(size_t index, uint8_t* dest, size_t size);
     status_t makeHvcc(uint8_t *hvcc, size_t *hvccSize, size_t nalSizeLength);
 
+    Info getInfo() const { return mInfo; }
+
 private:
     status_t parseVps(const uint8_t* data, size_t size);
     status_t parseSps(const uint8_t* data, size_t size);
@@ -84,6 +100,7 @@
 
     KeyedVector<uint32_t, uint64_t> mParams;
     Vector<sp<ABuffer>> mNalUnits;
+    Info mInfo;
 
     DISALLOW_EVIL_CONSTRUCTORS(HevcParameterSets);
 };
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index dafa07e..7465b35 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -47,8 +47,34 @@
         int32_t *width, int32_t *height,
         int32_t *sarWidth = NULL, int32_t *sarHeight = NULL);
 
+// Gets and returns an unsigned exp-golomb (ue) value from a bit reader |br|. Aborts if the value
+// is more than 64 bits long (>=0xFFFF (!)) or the bit reader overflows.
 unsigned parseUE(ABitReader *br);
 
+// Gets and returns a signed exp-golomb (se) value from a bit reader |br|. Aborts if the value is
+// more than 64 bits long (>0x7FFF || <-0x7FFF (!)) or the bit reader overflows.
+signed parseSE(ABitReader *br);
+
+// Gets an unsigned exp-golomb (ue) value from a bit reader |br|, and returns it if it was
+// successful. Returns |fallback| if it was unsuccessful. Note: if the value was longer that 64
+// bits, it reads past the value and still returns |fallback|.
+unsigned parseUEWithFallback(ABitReader *br, unsigned fallback);
+
+// Gets a signed exp-golomb (se) value from a bit reader |br|, and returns it if it was successful.
+// Returns |fallback| if it was unsuccessful. Note: if the value was longer that 64 bits, it reads
+// past the value and still returns |fallback|.
+signed parseSEWithFallback(ABitReader *br, signed fallback);
+
+// Skips an unsigned exp-golomb (ue) value from bit reader |br|.
+inline void skipUE(ABitReader *br) {
+    (void)parseUEWithFallback(br, 0U);
+}
+
+// Skips a signed exp-golomb (se) value from bit reader |br|.
+inline void skipSE(ABitReader *br) {
+    (void)parseSEWithFallback(br, 0);
+}
+
 status_t getNextNALUnit(
         const uint8_t **_data, size_t *_size,
         const uint8_t **nalStart, size_t *nalSize,
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 434be86..0f9430e 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -24,6 +24,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -1058,6 +1059,111 @@
     return OK;
 }
 
+static inline bool isValidInt32ColourValue(long long value) {
+    return value != mkvparser::Colour::kValueNotPresent
+            && value >= INT32_MIN
+            && value <= INT32_MAX;
+}
+
+static inline bool isValidUint16ColourValue(long long value) {
+    return value != mkvparser::Colour::kValueNotPresent
+            && value >= 0
+            && value <= UINT16_MAX;
+}
+
+static inline bool isValidPrimary(const mkvparser::PrimaryChromaticity *primary) {
+    return primary != NULL && primary->x >= 0 && primary->x <= 1
+             && primary->y >= 0 && primary->y <= 1;
+}
+
+void MatroskaExtractor::getColorInformation(
+        const mkvparser::VideoTrack *vtrack, sp<MetaData> &meta) {
+    const mkvparser::Colour *color = vtrack->GetColour();
+    if (color == NULL) {
+        return;
+    }
+
+    // Color Aspects
+    {
+        int32_t primaries = 2; // ISO unspecified
+        int32_t transfer = 2; // ISO unspecified
+        int32_t coeffs = 2; // ISO unspecified
+        bool fullRange = false; // default
+        bool rangeSpecified = false;
+
+        if (isValidInt32ColourValue(color->primaries)) {
+            primaries = color->primaries;
+        }
+        if (isValidInt32ColourValue(color->transfer_characteristics)) {
+            transfer = color->transfer_characteristics;
+        }
+        if (isValidInt32ColourValue(color->matrix_coefficients)) {
+            coeffs = color->matrix_coefficients;
+        }
+        if (color->range != mkvparser::Colour::kValueNotPresent
+                && color->range != 0 /* MKV unspecified */) {
+            // We only support MKV broadcast range (== limited) and full range.
+            // We treat all other value as the default limited range.
+            fullRange = color->range == 2 /* MKV fullRange */;
+            rangeSpecified = true;
+        }
+
+        ColorAspects aspects;
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                primaries, transfer, coeffs, fullRange, aspects);
+        meta->setInt32(kKeyColorPrimaries, aspects.mPrimaries);
+        meta->setInt32(kKeyTransferFunction, aspects.mTransfer);
+        meta->setInt32(kKeyColorMatrix, aspects.mMatrixCoeffs);
+        meta->setInt32(
+                kKeyColorRange, rangeSpecified ? aspects.mRange : ColorAspects::RangeUnspecified);
+    }
+
+    // HDR Static Info
+    {
+        HDRStaticInfo info, nullInfo; // nullInfo is a fully unspecified static info
+        memset(&info, 0, sizeof(info));
+        memset(&nullInfo, 0, sizeof(nullInfo));
+        if (isValidUint16ColourValue(color->max_cll)) {
+            info.sType1.mMaxContentLightLevel = color->max_cll;
+        }
+        if (isValidUint16ColourValue(color->max_fall)) {
+            info.sType1.mMaxFrameAverageLightLevel = color->max_fall;
+        }
+        const mkvparser::MasteringMetadata *mastering = color->mastering_metadata;
+        if (mastering != NULL) {
+            // Convert matroska values to HDRStaticInfo equivalent values for each fully specified
+            // group. See CTA-681.3 section 3.2.1 for more info.
+            if (mastering->luminance_max >= 0.5 && mastering->luminance_max < 65535.5) {
+                info.sType1.mMaxDisplayLuminance = (uint16_t)(mastering->luminance_max + 0.5);
+            }
+            if (mastering->luminance_min >= 0.00005 && mastering->luminance_min < 6.55355) {
+                // HDRStaticInfo Type1 stores min luminance scaled 10000:1
+                info.sType1.mMinDisplayLuminance =
+                    (uint16_t)(10000 * mastering->luminance_min + 0.5);
+            }
+            // HDRStaticInfo Type1 stores primaries scaled 50000:1
+            if (isValidPrimary(mastering->white_point)) {
+                info.sType1.mW.x = (uint16_t)(50000 * mastering->white_point->x + 0.5);
+                info.sType1.mW.y = (uint16_t)(50000 * mastering->white_point->y + 0.5);
+            }
+            if (isValidPrimary(mastering->r) && isValidPrimary(mastering->g)
+                    && isValidPrimary(mastering->b)) {
+                info.sType1.mR.x = (uint16_t)(50000 * mastering->r->x + 0.5);
+                info.sType1.mR.y = (uint16_t)(50000 * mastering->r->y + 0.5);
+                info.sType1.mG.x = (uint16_t)(50000 * mastering->g->x + 0.5);
+                info.sType1.mG.y = (uint16_t)(50000 * mastering->g->y + 0.5);
+                info.sType1.mB.x = (uint16_t)(50000 * mastering->b->x + 0.5);
+                info.sType1.mB.y = (uint16_t)(50000 * mastering->b->y + 0.5);
+            }
+        }
+        // Only advertise static info if at least one of the groups have been specified.
+        if (memcmp(&info, &nullInfo, sizeof(info)) != 0) {
+            info.mID = HDRStaticInfo::kType1;
+            meta->setData(kKeyHdrStaticInfo, 'hdrS', &info, sizeof(info));
+        }
+    }
+}
+
 void MatroskaExtractor::addTracks() {
     const mkvparser::Tracks *tracks = mSegment->GetTracks();
 
@@ -1127,6 +1233,9 @@
 
                 meta->setInt32(kKeyWidth, vtrack->GetWidth());
                 meta->setInt32(kKeyHeight, vtrack->GetHeight());
+
+                getColorInformation(vtrack, meta);
+
                 break;
             }
 
diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/libstagefright/matroska/MatroskaExtractor.h
index 9406829..665e68e 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.h
+++ b/media/libstagefright/matroska/MatroskaExtractor.h
@@ -29,6 +29,7 @@
 struct AMessage;
 class String8;
 
+class MetaData;
 struct DataSourceReader;
 struct MatroskaSource;
 
@@ -80,7 +81,7 @@
     status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
     void addTracks();
     void findThumbnails();
-
+    void getColorInformation(const mkvparser::VideoTrack *vtrack, sp<MetaData> &meta);
     bool isLiveStreaming() const;
 
     MatroskaExtractor(const MatroskaExtractor &);
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6fa83fa..f9a9ab9 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -64,6 +64,10 @@
 
 static int64_t kPauseDelayUs = 3000000ll;
 
+// The allowed maximum number of stale access units at the beginning of
+// a new sequence.
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
 namespace android {
 
 static bool GetAttribute(const char *s, const char *key, AString *value) {
@@ -1048,16 +1052,39 @@
                     break;
                 }
 
+                if (track->mNewSegment) {
+                    // The sequence number from RTP packet has only 16 bits and is extended
+                    // by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
+                    // RTSP "PLAY" command should be used to detect the first RTP packet
+                    // after seeking.
+                    if (track->mAllowedStaleAccessUnits > 0) {
+                        if ((((seqNum ^ track->mFirstSeqNumInSegment) & 0xffff) != 0)) {
+                            // Not the first rtp packet of the stream after seeking, discarding.
+                            track->mAllowedStaleAccessUnits--;
+                            ALOGV("discarding stale access unit (0x%x : 0x%x)",
+                                 seqNum, track->mFirstSeqNumInSegment);
+                            break;
+                        }
+                    } else { // track->mAllowedStaleAccessUnits <= 0
+                        mNumAccessUnitsReceived = 0;
+                        ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
+                             "Still no first rtp packet after %d stale ones",
+                             kMaxAllowedStaleAccessUnits);
+                        track->mAllowedStaleAccessUnits = -1;
+                        break;
+                    }
+
+                    // Now found the first rtp packet of the stream after seeking.
+                    track->mFirstSeqNumInSegment = seqNum;
+                    track->mNewSegment = false;
+                }
+
                 if (seqNum < track->mFirstSeqNumInSegment) {
                     ALOGV("dropping stale access-unit (%d < %d)",
                          seqNum, track->mFirstSeqNumInSegment);
                     break;
                 }
 
-                if (track->mNewSegment) {
-                    track->mNewSegment = false;
-                }
-
                 onAccessUnitComplete(trackIndex, accessUnit);
                 break;
             }
@@ -1336,6 +1363,12 @@
                 mPausing = false;
                 mSeekPending = false;
 
+                // Discard all stale access units.
+                for (size_t i = 0; i < mTracks.size(); ++i) {
+                    TrackInfo *track = &mTracks.editItemAt(i);
+                    track->mPackets.clear();
+                }
+
                 sp<AMessage> msg = mNotify->dup();
                 msg->setInt32("what", kWhatSeekDone);
                 msg->post();
@@ -1497,6 +1530,7 @@
             TrackInfo *info = &mTracks.editItemAt(trackIndex);
             info->mFirstSeqNumInSegment = seq;
             info->mNewSegment = true;
+            info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
 
             CHECK(GetAttribute((*it).c_str(), "rtptime", &val));
 
@@ -1540,6 +1574,7 @@
         bool mUsingInterleavedTCP;
         uint32_t mFirstSeqNumInSegment;
         bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
 
         uint32_t mRTPAnchor;
         int64_t mNTPAnchorUs;
@@ -1623,6 +1658,7 @@
         info->mUsingInterleavedTCP = false;
         info->mFirstSeqNumInSegment = 0;
         info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
         info->mRTPSocket = -1;
         info->mRTCPSocket = -1;
         info->mRTPAnchor = 0;
diff --git a/media/libstagefright/webm/WebmConstants.h b/media/libstagefright/webm/WebmConstants.h
index c53f458..3111559 100644
--- a/media/libstagefright/webm/WebmConstants.h
+++ b/media/libstagefright/webm/WebmConstants.h
@@ -98,6 +98,24 @@
     kMkvDisplayHeight = 0x54BA,
     kMkvDisplayUnit = 0x54B2,
     kMkvAspectRatioType = 0x54B3,
+    kMkvColour = 0x55B0,
+    kMkvMatrixCoefficients = 0x55B1,
+    kMkvRange = 0x55B9,
+    kMkvTransferCharacteristics = 0x55BA,
+    kMkvPrimaries = 0x55BB,
+    kMkvMaxCLL = 0x55BC,
+    kMkvMaxFALL = 0x55BD,
+    kMkvMasteringMetadata = 0x55D0,
+    kMkvPrimaryRChromaticityX = 0x55D1,
+    kMkvPrimaryRChromaticityY = 0x55D2,
+    kMkvPrimaryGChromaticityX = 0x55D3,
+    kMkvPrimaryGChromaticityY = 0x55D4,
+    kMkvPrimaryBChromaticityX = 0x55D5,
+    kMkvPrimaryBChromaticityY = 0x55D6,
+    kMkvWhitePointChromaticityX = 0x55D7,
+    kMkvWhitePointChromaticityY = 0x55D8,
+    kMkvLuminanceMax = 0x55D9,
+    kMkvLuminanceMin = 0x55DA,
     kMkvFrameRate = 0x2383E3,
     kMkvAudio = 0xE1,
     kMkvSamplingFrequency = 0xB5,
diff --git a/media/libstagefright/webm/WebmElement.cpp b/media/libstagefright/webm/WebmElement.cpp
index f454bf6..a5120b9 100644
--- a/media/libstagefright/webm/WebmElement.cpp
+++ b/media/libstagefright/webm/WebmElement.cpp
@@ -22,6 +22,8 @@
 #include "WebmConstants.h"
 
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/MetaData.h>
 #include <utils/Log.h>
 
 #include <string.h>
@@ -341,6 +343,7 @@
         const char *codec,
         uint64_t width,
         uint64_t height,
+        const sp<MetaData> &meta,
         uint64_t uid,
         bool lacing,
         const char *lang) {
@@ -358,10 +361,112 @@
             kVideoType,
             trackEntryFields);
 
+    // CSD
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (meta->findData(kKeyVp9CodecPrivate, &type, &data, &size)) {
+        sp<ABuffer> buf = new ABuffer((void *)data, size); // note: buf does not own data
+        trackEntryFields.push_back(new WebmBinary(kMkvCodecPrivate, buf));
+    }
+
     List<sp<WebmElement> > videoInfo;
     videoInfo.push_back(new WebmUnsigned(kMkvPixelWidth, width));
     videoInfo.push_back(new WebmUnsigned(kMkvPixelHeight, height));
 
+    // Color aspects
+    {
+        List<sp<WebmElement> > colorInfo;
+
+        ColorAspects aspects;
+        aspects.mPrimaries = ColorAspects::PrimariesUnspecified;
+        aspects.mTransfer = ColorAspects::TransferUnspecified;
+        aspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
+        aspects.mRange = ColorAspects::RangeUnspecified;
+        bool havePrimaries = meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries);
+        bool haveTransfer = meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer);
+        bool haveCoeffs = meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs);
+        bool haveRange = meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange);
+
+        int32_t primaries, transfer, coeffs;
+        bool fullRange;
+        ColorUtils::convertCodecColorAspectsToIsoAspects(
+                aspects, &primaries, &transfer, &coeffs, &fullRange);
+        if (havePrimaries) {
+            colorInfo.push_back(new WebmUnsigned(kMkvPrimaries, primaries));
+        }
+        if (haveTransfer) {
+            colorInfo.push_back(new WebmUnsigned(kMkvTransferCharacteristics, transfer));
+        }
+        if (haveCoeffs) {
+            colorInfo.push_back(new WebmUnsigned(kMkvMatrixCoefficients, coeffs));
+        }
+        if (haveRange) {
+            colorInfo.push_back(new WebmUnsigned(kMkvRange, fullRange ? 2 : 1));
+        }
+
+        // Also add HDR static info, some of which goes to MasteringMetadata element
+
+        const HDRStaticInfo *info;
+        uint32_t type;
+        const void *data;
+        size_t size;
+        if (meta->findData(kKeyHdrStaticInfo, &type, &data, &size)
+                && type == 'hdrS' && size == sizeof(*info)) {
+            info = (const HDRStaticInfo*)data;
+            if (info->mID == HDRStaticInfo::kType1) {
+                List<sp<WebmElement> > masteringInfo;
+
+                // convert HDRStaticInfo values to matroska equivalent values for each non-0 group
+                if (info->sType1.mMaxFrameAverageLightLevel) {
+                    colorInfo.push_back(new WebmUnsigned(
+                            kMkvMaxFALL, info->sType1.mMaxFrameAverageLightLevel));
+                }
+                if (info->sType1.mMaxContentLightLevel) {
+                    colorInfo.push_back(new WebmUnsigned(
+                            kMkvMaxCLL, info->sType1.mMaxContentLightLevel));
+                }
+                if (info->sType1.mMinDisplayLuminance) {
+                    // HDRStaticInfo Type1 stores min luminance scaled 10000:1
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvLuminanceMin, info->sType1.mMinDisplayLuminance * 0.0001));
+                }
+                if (info->sType1.mMaxDisplayLuminance) {
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvLuminanceMax, (float)info->sType1.mMaxDisplayLuminance));
+                }
+                // HDRStaticInfo Type1 stores primaries scaled 50000:1
+                if (info->sType1.mW.x || info->sType1.mW.y) {
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvWhitePointChromaticityX, info->sType1.mW.x * 0.00002));
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvWhitePointChromaticityY, info->sType1.mW.y * 0.00002));
+                }
+                if (info->sType1.mR.x || info->sType1.mR.y || info->sType1.mG.x
+                        || info->sType1.mG.y || info->sType1.mB.x || info->sType1.mB.y) {
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvPrimaryRChromaticityX, info->sType1.mR.x * 0.00002));
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvPrimaryRChromaticityY, info->sType1.mR.y * 0.00002));
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvPrimaryGChromaticityX, info->sType1.mG.x * 0.00002));
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvPrimaryGChromaticityY, info->sType1.mG.y * 0.00002));
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvPrimaryBChromaticityX, info->sType1.mB.x * 0.00002));
+                    masteringInfo.push_back(new WebmFloat(
+                            kMkvPrimaryBChromaticityY, info->sType1.mB.y * 0.00002));
+                }
+                if (masteringInfo.size()) {
+                    colorInfo.push_back(new WebmMaster(kMkvMasteringMetadata, masteringInfo));
+                }
+            }
+        }
+        if (colorInfo.size()) {
+            videoInfo.push_back(new WebmMaster(kMkvColour, colorInfo));
+        }
+    }
+
     trackEntryFields.push_back(new WebmMaster(kMkvVideo, videoInfo));
     return new WebmMaster(kMkvTrackEntry, trackEntryFields);
 }
diff --git a/media/libstagefright/webm/WebmElement.h b/media/libstagefright/webm/WebmElement.h
index 456c3c7..4e90793 100644
--- a/media/libstagefright/webm/WebmElement.h
+++ b/media/libstagefright/webm/WebmElement.h
@@ -24,6 +24,8 @@
 
 namespace android {
 
+class MetaData;
+
 struct WebmElement : public LightRefBase<WebmElement> {
     const uint64_t mId, mSize;
 
@@ -60,6 +62,7 @@
             const char *codec,
             uint64_t width,
             uint64_t height,
+            const sp<MetaData> &md,
             uint64_t uid = 0,
             bool lacing = false,
             const char *lang = "und");
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 511260a..e58964d 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -84,9 +84,13 @@
 sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {
     int32_t width, height;
     const char *mimeType;
-    CHECK(md->findInt32(kKeyWidth, &width));
-    CHECK(md->findInt32(kKeyHeight, &height));
-    CHECK(md->findCString(kKeyMIMEType, &mimeType));
+    if (!md->findInt32(kKeyWidth, &width)
+            || !md->findInt32(kKeyHeight, &height)
+            || !md->findCString(kKeyMIMEType, &mimeType)) {
+        ALOGE("Missing format keys for video track");
+        md->dumpToLog();
+        return NULL;
+    }
     const char *codec;
     if (!strncasecmp(
             mimeType,
@@ -99,9 +103,10 @@
             strlen(MEDIA_MIMETYPE_VIDEO_VP9))) {
         codec = "V_VP9";
     } else {
-        CHECK(!"Unsupported codec");
+        ALOGE("Unsupported codec: %s", mimeType);
+        return NULL;
     }
-    return WebmElement::VideoTrackEntry(codec, width, height);
+    return WebmElement::VideoTrackEntry(codec, width, height, md);
 }
 
 // static
@@ -114,10 +119,14 @@
     const void *headerData3;
     size_t headerSize1, headerSize2 = sizeof(headerData2), headerSize3;
 
-    CHECK(md->findInt32(kKeyChannelCount, &nChannels));
-    CHECK(md->findInt32(kKeySampleRate, &samplerate));
-    CHECK(md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1));
-    CHECK(md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3));
+    if (!md->findInt32(kKeyChannelCount, &nChannels)
+            || !md->findInt32(kKeySampleRate, &samplerate)
+            || !md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1)
+            || !md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3)) {
+        ALOGE("Missing format keys for audio track");
+        md->dumpToLog();
+        return NULL;
+    }
 
     size_t codecPrivateSize = 1;
     codecPrivateSize += XiphLaceCodeLen(headerSize1);
@@ -243,6 +252,11 @@
     mFd = -1;
     mInitCheck = NO_INIT;
     mStarted = false;
+    for (size_t ix = 0; ix < kMaxStreams; ++ix) {
+        mStreams[ix].mTrackEntry.clear();
+        mStreams[ix].mSource.clear();
+    }
+    mStreamsInOrder.clear();
 }
 
 status_t WebmWriter::reset() {
@@ -275,6 +289,8 @@
         if (durationUs < minDurationUs) {
             minDurationUs = durationUs;
         }
+
+        mStreams[i].mThread.clear();
     }
 
     if (numTracks() > 1) {
@@ -389,6 +405,11 @@
     // Go ahead to add the track.
     mStreams[streamIndex].mSource = source;
     mStreams[streamIndex].mTrackEntry = mStreams[streamIndex].mMakeTrack(source->getFormat());
+    if (mStreams[streamIndex].mTrackEntry == NULL) {
+        mStreams[streamIndex].mSource.clear();
+        return BAD_VALUE;
+    }
+    mStreamsInOrder.push_back(mStreams[streamIndex].mTrackEntry);
 
     return OK;
 }
@@ -429,7 +450,10 @@
             mTimeCodeScale = tcsl;
         }
     }
-    CHECK_GT(mTimeCodeScale, 0);
+    if (mTimeCodeScale == 0) {
+        ALOGE("movie time scale is 0");
+        return BAD_VALUE;
+    }
     ALOGV("movie time scale: %" PRIu64, mTimeCodeScale);
 
     /*
@@ -451,10 +475,8 @@
     info = WebmElement::SegmentInfo(mTimeCodeScale, 0);
 
     List<sp<WebmElement> > children;
-    for (size_t i = 0; i < kMaxStreams; ++i) {
-        if (mStreams[i].mTrackEntry != NULL) {
-            children.push_back(mStreams[i].mTrackEntry);
-        }
+    for (size_t i = 0; i < mStreamsInOrder.size(); ++i) {
+        children.push_back(mStreamsInOrder[i]);
     }
     tracks = new WebmMaster(kMkvTracks, children);
 
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
index 4a7f506..dd1fba3 100644
--- a/media/libstagefright/webm/WebmWriter.h
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -110,6 +110,7 @@
         }
     };
     WebmStream mStreams[kMaxStreams];
+    Vector<sp<WebmElement>> mStreamsInOrder;
 
     sp<WebmFrameSinkThread> mSinkThread;
 
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 387a302..30aa7fb 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -95,10 +95,11 @@
 AImageReader::FrameListener::setImageListener(AImageReader_ImageListener* listener) {
     Mutex::Autolock _l(mLock);
     if (listener == nullptr) {
-        ALOGE("AImageReader: listener is null!");
-        return AMEDIA_ERROR_INVALID_PARAMETER;
+        mListener.context = nullptr;
+        mListener.onImageAvailable = nullptr;
+    } else {
+        mListener = *listener;
     }
-    mListener = *listener;
     return AMEDIA_OK;
 }
 
@@ -575,8 +576,8 @@
 media_status_t AImageReader_setImageListener(
         AImageReader* reader, AImageReader_ImageListener* listener) {
     ALOGV("%s", __FUNCTION__);
-    if (reader == nullptr || listener == nullptr) {
-        ALOGE("%s: invalid argument! read %p listener %p", __FUNCTION__, reader, listener);
+    if (reader == nullptr) {
+        ALOGE("%s: invalid argument! reader %p", __FUNCTION__, reader);
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 2b0d4c8..d2fee81 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -58,7 +58,7 @@
 #include <powermanager/PowerManager.h>
 
 #include <media/IMediaLogService.h>
-
+#include <media/MemoryLeakTrackUtil.h>
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <media/AudioParameter.h>
@@ -471,17 +471,25 @@
         }
 
         // check for optional arguments
+        bool dumpMem = false;
         bool unreachableMemory = false;
         for (const auto &arg : args) {
-            if (arg == String16("--unreachable")) {
+            if (arg == String16("-m")) {
+                dumpMem = true;
+            } else if (arg == String16("--unreachable")) {
                 unreachableMemory = true;
             }
         }
 
+        if (dumpMem) {
+            dprintf(fd, "\nDumping memory:\n");
+            std::string s = dumpMemoryAddresses(100 /* limit */);
+            write(fd, s.c_str(), s.size());
+        }
         if (unreachableMemory) {
             dprintf(fd, "\nDumping unreachable memory:\n");
             // TODO - should limit be an argument parameter?
-            std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
+            std::string s = GetUnreachableMemoryString(true /* contents */, 100 /* limit */);
             write(fd, s.c_str(), s.size());
         }
     }
@@ -571,6 +579,7 @@
         IAudioFlinger::track_flags_t *flags,
         const sp<IMemory>& sharedBuffer,
         audio_io_handle_t output,
+        pid_t pid,
         pid_t tid,
         audio_session_t *sessionId,
         int clientUid,
@@ -582,6 +591,15 @@
     status_t lStatus;
     audio_session_t lSessionId;
 
+    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    if (pid == -1 || !isTrustedCallingUid(callingUid)) {
+        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW_IF(pid != -1 && pid != callingPid,
+                 "%s uid %d pid %d tried to pass itself off as pid %d",
+                 __func__, callingUid, callingPid, pid);
+        pid = callingPid;
+    }
+
     // client AudioTrack::set already implements AUDIO_STREAM_DEFAULT => AUDIO_STREAM_MUSIC,
     // but if someone uses binder directly they could bypass that and cause us to crash
     if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
@@ -626,7 +644,6 @@
             goto Exit;
         }
 
-        pid_t pid = IPCThreadState::self()->getCallingPid();
         client = registerPid(pid);
 
         PlaybackThread *effectThread = NULL;
@@ -1447,6 +1464,7 @@
         const String16& opPackageName,
         size_t *frameCount,
         IAudioFlinger::track_flags_t *flags,
+        pid_t pid,
         pid_t tid,
         int clientUid,
         audio_session_t *sessionId,
@@ -1464,11 +1482,21 @@
     cblk.clear();
     buffers.clear();
 
+    bool updatePid = (pid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     if (!isTrustedCallingUid(callingUid)) {
         ALOGW_IF((uid_t)clientUid != callingUid,
                 "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, clientUid);
         clientUid = callingUid;
+        updatePid = true;
+    }
+
+    if (updatePid) {
+        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW_IF(pid != -1 && pid != callingPid,
+                 "%s uid %d pid %d tried to pass itself off as pid %d",
+                 __func__, callingUid, callingPid, pid);
+        pid = callingPid;
     }
 
     // check calling permissions
@@ -1508,7 +1536,6 @@
             goto Exit;
         }
 
-        pid_t pid = IPCThreadState::self()->getCallingPid();
         client = registerPid(pid);
 
         if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 96d38d0..59ad688 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -107,6 +107,7 @@
                                 IAudioFlinger::track_flags_t *flags,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_io_handle_t output,
+                                pid_t pid,
                                 pid_t tid,
                                 audio_session_t *sessionId,
                                 int clientUid,
@@ -120,6 +121,7 @@
                                 const String16& opPackageName,
                                 size_t *pFrameCount,
                                 IAudioFlinger::track_flags_t *flags,
+                                pid_t pid,
                                 pid_t tid,
                                 int clientUid,
                                 audio_session_t *sessionId,
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 89de68e..f8671b5 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -168,6 +168,12 @@
                 ALOGV("createAudioPatch() removing patch handle %d", *handle);
                 halHandle = mPatches[index]->mHalHandle;
                 Patch *removedPatch = mPatches[index];
+                if ((removedPatch->mRecordPatchHandle
+                        != AUDIO_PATCH_HANDLE_NONE) ||
+                        (removedPatch->mPlaybackPatchHandle !=
+                                AUDIO_PATCH_HANDLE_NONE)) {
+                    clearPatchConnections(removedPatch);
+                }
                 mPatches.removeAt(index);
                 delete removedPatch;
                 break;
diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp
index afc2440..3c73543 100644
--- a/services/audioflinger/ServiceUtilities.cpp
+++ b/services/audioflinger/ServiceUtilities.cpp
@@ -105,11 +105,10 @@
     return true;
 }
 
-bool captureAudioOutputAllowed() {
+bool captureAudioOutputAllowed(pid_t pid, uid_t uid) {
     if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true;
     static const String16 sCaptureAudioOutput("android.permission.CAPTURE_AUDIO_OUTPUT");
-    // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
-    bool ok = PermissionCache::checkCallingPermission(sCaptureAudioOutput);
+    bool ok = checkPermission(sCaptureAudioOutput, pid, uid);
     if (!ok) ALOGE("Request requires android.permission.CAPTURE_AUDIO_OUTPUT");
     return ok;
 }
diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h
index 1e79553..8b1bc00 100644
--- a/services/audioflinger/ServiceUtilities.h
+++ b/services/audioflinger/ServiceUtilities.h
@@ -21,7 +21,7 @@
 extern pid_t getpid_cached;
 bool isTrustedCallingUid(uid_t uid);
 bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
-bool captureAudioOutputAllowed();
+bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
 bool captureHotwordAllowed();
 bool settingsAllowed();
 bool modifyAudioRoutingAllowed();
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index d296ee3..3759424 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1576,7 +1576,7 @@
         mActiveTracksGeneration(0),
         // mStreamTypes[] initialized in constructor body
         mOutput(output),
-        mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false),
+        mLastWriteTime(-1), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false),
         mMixerStatus(MIXER_IDLE),
         mMixerStatusIgnoringFastTracks(MIXER_IDLE),
         mStandbyDelayNs(AudioFlinger::mStandbyTimeInNsecs),
@@ -2537,8 +2537,6 @@
 // shared by MIXER and DIRECT, overridden by DUPLICATING
 ssize_t AudioFlinger::PlaybackThread::threadLoop_write()
 {
-    // FIXME rewrite to reduce number of system calls
-    mLastWriteTime = systemTime();
     mInWrite = true;
     ssize_t bytesWritten;
     const size_t offset = mCurrentWriteLength - mBytesRemaining;
@@ -2659,18 +2657,20 @@
     }
 }
 
-void AudioFlinger::PlaybackThread::invalidateTracks_l(audio_stream_type_t streamType)
+bool AudioFlinger::PlaybackThread::invalidateTracks_l(audio_stream_type_t streamType)
 {
     ALOGV("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %zu",
             this,  streamType, mTracks.size());
-
+    bool trackMatch = false;
     size_t size = mTracks.size();
     for (size_t i = 0; i < size; i++) {
         sp<Track> t = mTracks[i];
         if (t->streamType() == streamType && t->isExternalTrack()) {
             t->invalidate();
+            trackMatch = true;
         }
     }
+    return trackMatch;
 }
 
 void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType)
@@ -2832,6 +2832,8 @@
     Vector< sp<Track> > tracksToRemove;
 
     mStandbyTimeNs = systemTime();
+    nsecs_t lastWriteFinished = -1; // time last server write completed
+    int64_t lastFramesWritten = -1; // track changes in timestamp server frames written
 
     // MIXER
     nsecs_t lastWarning = 0;
@@ -2882,10 +2884,11 @@
             // Gather the framesReleased counters for all active tracks,
             // and associate with the sink frames written out.  We need
             // this to convert the sink timestamp to the track timestamp.
+            bool kernelLocationUpdate = false;
             if (mNormalSink != 0) {
                 // Note: The DuplicatingThread may not have a mNormalSink.
                 // We always fetch the timestamp here because often the downstream
-                // sink will block whie writing.
+                // sink will block while writing.
                 ExtendedTimestamp timestamp; // use private copy to fetch
                 (void) mNormalSink->getTimestamp(timestamp);
 
@@ -2902,6 +2905,10 @@
                             mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
                     mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
                             mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+                }
+
+                if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+                    kernelLocationUpdate = true;
                 } else {
                     ALOGV("getTimestamp error - no valid kernel position");
                 }
@@ -2915,16 +2922,33 @@
             // mFramesWritten for non-offloaded tracks are contiguous
             // even after standby() is called. This is useful for the track frame
             // to sink frame mapping.
-            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
-            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = systemTime();
-            const size_t size = mActiveTracks.size();
-            for (size_t i = 0; i < size; ++i) {
-                sp<Track> t = mActiveTracks[i].promote();
-                if (t != 0 && !t->isFastTrack()) {
-                    t->updateTrackFrameInfo(
-                            t->mAudioTrackServerProxy->framesReleased(),
-                            mFramesWritten,
-                            mTimestamp);
+            bool serverLocationUpdate = false;
+            if (mFramesWritten != lastFramesWritten) {
+                serverLocationUpdate = true;
+                lastFramesWritten = mFramesWritten;
+            }
+            // Only update timestamps if there is a meaningful change.
+            // Either the kernel timestamp must be valid or we have written something.
+            if (kernelLocationUpdate || serverLocationUpdate) {
+                if (serverLocationUpdate) {
+                    // use the time before we called the HAL write - it is a bit more accurate
+                    // to when the server last read data than the current time here.
+                    //
+                    // If we haven't written anything, mLastWriteTime will be -1
+                    // and we use systemTime().
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastWriteTime == -1
+                            ? systemTime() : mLastWriteTime;
+                }
+                const size_t size = mActiveTracks.size();
+                for (size_t i = 0; i < size; ++i) {
+                    sp<Track> t = mActiveTracks[i].promote();
+                    if (t != 0 && !t->isFastTrack()) {
+                        t->updateTrackFrameInfo(
+                                t->mAudioTrackServerProxy->framesReleased(),
+                                mFramesWritten,
+                                mTimestamp);
+                    }
                 }
             }
 
@@ -3102,8 +3126,17 @@
             // mSleepTimeUs == 0 means we must write to audio hardware
             if (mSleepTimeUs == 0) {
                 ssize_t ret = 0;
+                // We save lastWriteFinished here, as previousLastWriteFinished,
+                // for throttling. On thread start, previousLastWriteFinished will be
+                // set to -1, which properly results in no throttling after the first write.
+                nsecs_t previousLastWriteFinished = lastWriteFinished;
+                nsecs_t delta = 0;
                 if (mBytesRemaining) {
+                    // FIXME rewrite to reduce number of system calls
+                    mLastWriteTime = systemTime();  // also used for dumpsys
                     ret = threadLoop_write();
+                    lastWriteFinished = systemTime();
+                    delta = lastWriteFinished - mLastWriteTime;
                     if (ret < 0) {
                         mBytesRemaining = 0;
                     } else {
@@ -3117,15 +3150,13 @@
                 }
                 if (mType == MIXER && !mStandby) {
                     // write blocked detection
-                    nsecs_t now = systemTime();
-                    nsecs_t delta = now - mLastWriteTime;
                     if (delta > maxPeriod) {
                         mNumDelayedWrites++;
-                        if ((now - lastWarning) > kWarningThrottleNs) {
+                        if ((lastWriteFinished - lastWarning) > kWarningThrottleNs) {
                             ATRACE_NAME("underrun");
                             ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p",
                                     (unsigned long long) ns2ms(delta), mNumDelayedWrites, this);
-                            lastWarning = now;
+                            lastWarning = lastWriteFinished;
                         }
                     }
 
@@ -3145,7 +3176,9 @@
                         // (2) minimum buffer sized tracks (even if the track is full,
                         //     the app won't fill fast enough to handle the sudden draw).
 
-                        const int32_t deltaMs = delta / 1000000;
+                        // it's OK if deltaMs is an overestimate.
+                        const int32_t deltaMs =
+                                (lastWriteFinished - previousLastWriteFinished) / 1000000;
                         const int32_t throttleMs = mHalfBufferMs - deltaMs;
                         if ((signed)mHalfBufferMs >= throttleMs && throttleMs > 0) {
                             usleep(throttleMs * 1000);
@@ -5429,8 +5462,9 @@
 void AudioFlinger::OffloadThread::invalidateTracks(audio_stream_type_t streamType)
 {
     Mutex::Autolock _l(mLock);
-    mFlushPending = true;
-    PlaybackThread::invalidateTracks_l(streamType);
+    if (PlaybackThread::invalidateTracks_l(streamType)) {
+        mFlushPending = true;
+    }
 }
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 1cceb6d..787b5c4 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -613,7 +613,7 @@
                 virtual bool     isValidSyncEvent(const sp<SyncEvent>& event) const;
 
                 // called with AudioFlinger lock held
-                        void     invalidateTracks_l(audio_stream_type_t streamType);
+                        bool     invalidateTracks_l(audio_stream_type_t streamType);
                 virtual void     invalidateTracks(audio_stream_type_t streamType);
 
     virtual     size_t      frameCount() const { return mNormalFrameCount; }
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Android.mk b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Android.mk
similarity index 62%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Android.mk
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Android.mk
index 5775556..baaefd2 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Android.mk
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Android.mk
@@ -107,4 +107,56 @@
 include $(BUILD_PREBUILT)
 endif # pfw_rebuild_settings
 
+######### Policy PFW Settings - No Output #########
+include $(CLEAR_VARS)
+LOCAL_MODULE := parameter-framework.policy.no-output
+LOCAL_MODULE_STEM := PolicyConfigurableDomains-NoOutputDevice.xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_RELATIVE_PATH := parameter-framework/Settings/Policy
+LOCAL_ADDITIONAL_DEPENDENCIES := \
+        PolicyClass.xml \
+        PolicySubsystem.xml \
+        ParameterFrameworkConfigurationPolicy.xml
+
+PFW_TOPLEVEL_FILE := $(TARGET_OUT_ETC)/parameter-framework/ParameterFrameworkConfigurationPolicy.xml
+PFW_CRITERIA_FILE := $(LOCAL_PATH)/policy_criteria.txt
+PFW_EDD_FILES := \
+        $(LOCAL_PATH)/SettingsNoOutput/device_for_strategies.pfw \
+        $(LOCAL_PATH)/Settings/strategy_for_stream.pfw \
+        $(LOCAL_PATH)/Settings/strategy_for_usage.pfw \
+        $(LOCAL_PATH)/Settings/device_for_input_source.pfw \
+        $(LOCAL_PATH)/Settings/volumes.pfw
+
+include $(BUILD_PFW_SETTINGS)
+
+######### Policy PFW Settings - No Input #########
+include $(CLEAR_VARS)
+LOCAL_MODULE := parameter-framework.policy.no-input
+LOCAL_MODULE_STEM := PolicyConfigurableDomains-NoInputDevice.xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_RELATIVE_PATH := parameter-framework/Settings/Policy
+LOCAL_ADDITIONAL_DEPENDENCIES := \
+        PolicyClass.xml \
+        PolicySubsystem.xml \
+        ParameterFrameworkConfigurationPolicy.xml
+
+PFW_TOPLEVEL_FILE := $(TARGET_OUT_ETC)/parameter-framework/ParameterFrameworkConfigurationPolicy.xml
+PFW_CRITERIA_FILE := $(LOCAL_PATH)/policy_criteria.txt
+PFW_EDD_FILES := \
+        $(LOCAL_PATH)/Settings/device_for_strategy_media.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_phone.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_sonification.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_sonification_respectful.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_dtmf.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_enforced_audible.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_transmitted_through_speaker.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_accessibility.pfw \
+        $(LOCAL_PATH)/Settings/device_for_strategy_rerouting.pfw \
+        $(LOCAL_PATH)/Settings/strategy_for_stream.pfw \
+        $(LOCAL_PATH)/Settings/strategy_for_usage.pfw \
+        $(LOCAL_PATH)/SettingsNoInput/device_for_input_source.pfw \
+        $(LOCAL_PATH)/Settings/volumes.pfw
+
+include $(BUILD_PFW_SETTINGS)
+
 endif # ifeq (1, 0)
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/ParameterFrameworkConfigurationPolicy.xml.in b/services/audiopolicy/engineconfigurable/parameter-framework/examples/ParameterFrameworkConfigurationPolicy.xml.in
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/ParameterFrameworkConfigurationPolicy.xml.in
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/ParameterFrameworkConfigurationPolicy.xml.in
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/README.md b/services/audiopolicy/engineconfigurable/parameter-framework/examples/README.md
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/README.md
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/README.md
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/PolicyConfigurableDomains.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/PolicyConfigurableDomains.xml
similarity index 96%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/PolicyConfigurableDomains.xml
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/PolicyConfigurableDomains.xml
index bc7ad6b..aa2af0f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/PolicyConfigurableDomains.xml
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/PolicyConfigurableDomains.xml
@@ -15,6 +15,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/bluetooth_sco_headset"/>
       <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/bluetooth_sco_carkit"/>
       <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/telephony_tx"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -39,6 +42,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/telephony_tx">
           <BitParameter Name="telephony_tx">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/media/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -856,6 +868,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/fm"/>
       <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/speaker_safe"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -877,6 +892,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/speaker_safe">
           <BitParameter Name="speaker_safe">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/phone/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -1941,6 +1965,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/speaker_safe"/>
       <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/aux_line"/>
       <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/hdmi"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -1965,6 +1992,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/hdmi">
           <BitParameter Name="hdmi">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/sonification/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -2920,6 +2956,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/fm"/>
       <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/telephony_tx"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -2941,6 +2980,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/telephony_tx">
           <BitParameter Name="telephony_tx">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/sonification_respectful/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -3922,6 +3970,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/fm"/>
       <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/speaker_safe"/>
       <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/bluetooth_sco_carkit"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -3934,6 +3985,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/bluetooth_sco_carkit">
           <BitParameter Name="bluetooth_sco_carkit">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/dtmf/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -5210,6 +5270,10 @@
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/aux_line"/>
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/speaker_safe"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -5225,6 +5289,18 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/speaker_safe">
           <BitParameter Name="speaker_safe">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
+          <BitParameter Name="fm">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -5356,6 +5432,9 @@
           <SelectionCriterionRule SelectionCriterion="AvailableOutputDevices" MatchesWhen="Includes" Value="AnlgDockHeadset"/>
         </CompoundRule>
       </Configuration>
+      <Configuration Name="NoDevice">
+        <CompoundRule Type="All"/>
+      </Configuration>
     </Configurations>
     <ConfigurableElements>
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix"/>
@@ -5375,7 +5454,6 @@
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/usb_device"/>
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/telephony_tx"/>
       <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line"/>
-      <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="RemoteSubmix">
@@ -5430,9 +5508,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="BluetoothA2dp">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5486,9 +5561,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="BluetoothA2dpHeadphones">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5542,9 +5614,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="BluetoothA2dpSpeaker">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5598,9 +5667,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="WiredHeadphone">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5654,9 +5720,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="Line">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5710,9 +5773,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">1</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="WiredHeadset">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5766,9 +5826,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="UsbAccessory">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5822,9 +5879,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="UsbDevice">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5878,9 +5932,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="DgtlDockHeadset">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5934,9 +5985,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="Hdmi">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -5990,9 +6038,6 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
-        </ConfigurableElement>
       </Configuration>
       <Configuration Name="AnlgDockHeadset">
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
@@ -6046,8 +6091,58 @@
         <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/fm">
-          <BitParameter Name="fm">0</BitParameter>
+      </Configuration>
+      <Configuration Name="NoDevice">
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/remote_submix">
+          <BitParameter Name="remote_submix">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/earpiece">
+          <BitParameter Name="earpiece">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/wired_headset">
+          <BitParameter Name="wired_headset">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/wired_headphone">
+          <BitParameter Name="wired_headphone">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bluetooth_sco">
+          <BitParameter Name="bluetooth_sco">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bluetooth_sco_headset">
+          <BitParameter Name="bluetooth_sco_headset">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bluetooth_sco_carkit">
+          <BitParameter Name="bluetooth_sco_carkit">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bluetooth_a2dp">
+          <BitParameter Name="bluetooth_a2dp">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bluetooth_a2dp_headphones">
+          <BitParameter Name="bluetooth_a2dp_headphones">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/bluetooth_a2dp_speaker">
+          <BitParameter Name="bluetooth_a2dp_speaker">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/hdmi">
+          <BitParameter Name="hdmi">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/angl_dock_headset">
+          <BitParameter Name="angl_dock_headset">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/dgtl_dock_headset">
+          <BitParameter Name="dgtl_dock_headset">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/usb_accessory">
+          <BitParameter Name="usb_accessory">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/usb_device">
+          <BitParameter Name="usb_device">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/telephony_tx">
+          <BitParameter Name="telephony_tx">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/enforced_audible/selected_output_devices/mask/line">
+          <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
       </Configuration>
     </Settings>
@@ -6081,6 +6176,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/usb_device"/>
       <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/telephony_tx"/>
       <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/line"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -6150,6 +6248,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/line">
           <BitParameter Name="line">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/transmitted_through_speaker/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -6193,6 +6300,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/fm"/>
       <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/speaker_safe"/>
       <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/telephony_tx"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -6214,6 +6324,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/telephony_tx">
           <BitParameter Name="telephony_tx">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/accessibility/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -7453,6 +7572,9 @@
       <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/bluetooth_sco_headset"/>
       <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/bluetooth_sco_carkit"/>
       <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/telephony_tx"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/stub"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
@@ -7486,6 +7608,15 @@
         <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/telephony_tx">
           <BitParameter Name="telephony_tx">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/strategies/rerouting/selected_output_devices/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
@@ -8378,7 +8509,6 @@
       </Configuration>
     </Configurations>
     <ConfigurableElements>
-      <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/in"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/hdmi"/>
@@ -8393,7 +8523,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/line"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/hdmi"/>
@@ -8408,7 +8540,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/line"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/builtin_mic"/>
@@ -8427,7 +8561,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/builtin_mic"/>
@@ -8446,7 +8582,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/builtin_mic"/>
@@ -8465,7 +8603,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/bluetooth_sco_headset"/>
@@ -8483,7 +8623,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/hdmi"/>
@@ -8499,7 +8641,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/hdmi"/>
@@ -8514,7 +8658,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/builtin_mic"/>
@@ -8533,7 +8679,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/hdmi"/>
@@ -8549,7 +8697,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/hdmi"/>
@@ -8565,7 +8715,9 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/loopback"/>
-      <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/in"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/bus"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/stub"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/communication"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/ambient"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/bluetooth_sco_headset"/>
@@ -8584,12 +8736,11 @@
       <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/spdif"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/bluetooth_a2dp"/>
       <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/loopback"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/ip"/>
+      <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/bus"/>
     </ConfigurableElements>
     <Settings>
       <Configuration Name="Calibration">
-        <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
-        </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
         </ConfigurableElement>
@@ -8632,8 +8783,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/default/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8677,8 +8834,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/mic/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8734,8 +8897,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_downlink/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8791,8 +8960,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_call/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8848,8 +9023,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_uplink/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8902,8 +9083,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/camcorder/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8950,8 +9137,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_recognition/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -8995,8 +9188,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/voice_communication/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -9052,8 +9251,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/remote_submix/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -9100,8 +9305,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/hotword/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -9148,8 +9359,14 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
-        <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/in">
-          <BitParameter Name="in">1</BitParameter>
+        <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/unprocessed/applicable_input_device/mask/stub">
+          <BitParameter Name="stub">0</BitParameter>
         </ConfigurableElement>
         <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/communication">
           <BitParameter Name="communication">0</BitParameter>
@@ -9205,6 +9422,12 @@
         <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/loopback">
           <BitParameter Name="loopback">0</BitParameter>
         </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/ip">
+          <BitParameter Name="ip">0</BitParameter>
+        </ConfigurableElement>
+        <ConfigurableElement Path="/Policy/policy/input_sources/fm_tuner/applicable_input_device/mask/bus">
+          <BitParameter Name="bus">0</BitParameter>
+        </ConfigurableElement>
       </Configuration>
     </Settings>
   </ConfigurableDomain>
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_input_source.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
similarity index 91%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_input_source.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
index 16bcb01..a990879 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_input_source.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
@@ -1,12 +1,7 @@
 supDomain: DeviceForInputSource
 	domain: Calibration
 		conf: Calibration
-			#
-			# Note that ALL input devices must have the sign bit set to 1.
-			# As the devices is a mask, use the "in" bit as a direction indicator.
-			#
 			component: /Policy/policy/input_sources/default/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				hdmi = 0
@@ -21,8 +16,10 @@
 				line = 0
 				spdif = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/mic/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				hdmi = 0
@@ -37,8 +34,10 @@
 				line = 0
 				spdif = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/voice_downlink/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				builtin_mic = 0
@@ -57,8 +56,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/voice_call/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				builtin_mic = 0
@@ -77,8 +78,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/voice_uplink/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				builtin_mic = 0
@@ -97,8 +100,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/camcorder/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				bluetooth_sco_headset = 0
@@ -116,8 +121,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/voice_recognition/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				hdmi = 0
@@ -133,8 +140,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/voice_communication/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				hdmi = 0
@@ -149,8 +158,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/remote_submix/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				builtin_mic = 0
@@ -169,8 +180,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/hotword/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				hdmi = 0
@@ -186,8 +199,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/unprocessed/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				hdmi = 0
@@ -203,8 +218,10 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 			component: /Policy/policy/input_sources/fm_tuner/applicable_input_device/mask
-				in = 1
 				communication = 0
 				ambient = 0
 				bluetooth_sco_headset = 0
@@ -223,6 +240,9 @@
 				spdif = 0
 				bluetooth_a2dp = 0
 				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 0
 
 	domain: DefaultAndMic
 		conf: A2dp
@@ -361,7 +381,7 @@
 				back_mic = 0
 				builtin_mic = 0
 
-	domain: VoiceRecognitionAndHotword
+	domain: VoiceRecognitionAndUnprocessedAndHotword
 		conf: ScoHeadset
 			ForceUseForRecord Is ForceBtSco
 			AvailableInputDevices Includes BluetoothScoHeadset
@@ -372,6 +392,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 1
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 1
 					wired_headset = 0
@@ -387,6 +412,11 @@
 					wired_headset = 1
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 1
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 1
@@ -402,6 +432,11 @@
 					wired_headset = 0
 					usb_device = 1
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 1
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
@@ -417,6 +452,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 1
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 1
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
@@ -430,6 +470,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_accessibility.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_accessibility.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw
index dacf5b2..ecd56b0 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_accessibility.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw
@@ -16,6 +16,9 @@
 					fm = 0
 					speaker_safe = 0
 					telephony_tx = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Device
 			conf: RemoteSubmix
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_dtmf.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_dtmf.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw
index d9469c0..883c741 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_dtmf.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw
@@ -8,6 +8,9 @@
 					fm = 0
 					speaker_safe = 0
 					bluetooth_sco_carkit = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Device2
 			conf: RemoteSubmix
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_enforced_audible.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw
similarity index 94%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_enforced_audible.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw
index 593ef64..f504631 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_enforced_audible.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw
@@ -10,6 +10,10 @@
 					spdif = 0
 					aux_line = 0
 					speaker_safe = 0
+					ip = 0
+					bus = 0
+					fm = 0
+					stub = 0
 
 		domain: Speaker
 			conf: Selected
@@ -76,7 +80,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: BluetoothA2dp
 				AvailableOutputDevices Includes BluetoothA2dp
@@ -100,7 +103,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: BluetoothA2dpHeadphones
 				AvailableOutputDevices Includes BluetoothA2dpHeadphones
@@ -124,7 +126,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: BluetoothA2dpSpeaker
 				AvailableOutputDevices Includes BluetoothA2dpSpeaker
@@ -148,7 +149,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: WiredHeadphone
 				ForceUseForMedia IsNot ForceSpeaker
@@ -172,7 +172,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: Line
 				ForceUseForMedia IsNot ForceSpeaker
@@ -196,7 +195,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 1
-					fm = 0
 
 			conf: WiredHeadset
 				ForceUseForMedia IsNot ForceSpeaker
@@ -220,7 +218,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: UsbAccessory
 				ForceUseForMedia IsNot ForceSpeaker
@@ -244,7 +241,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: UsbDevice
 				ForceUseForMedia IsNot ForceSpeaker
@@ -268,7 +264,6 @@
 					usb_device = 1
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: DgtlDockHeadset
 				ForceUseForMedia IsNot ForceSpeaker
@@ -292,7 +287,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: Hdmi
 				ForceUseForMedia IsNot ForceSpeaker
@@ -316,7 +310,6 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
 
 			conf: AnlgDockHeadset
 				ForceUseForMedia IsNot ForceSpeaker
@@ -341,5 +334,25 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
-					fm = 0
+
+			conf: NoDevice
+				component: /Policy/policy/strategies/enforced_audible/selected_output_devices/mask
+					remote_submix = 0
+					earpiece = 0
+					wired_headset = 0
+					wired_headphone = 0
+					bluetooth_sco = 0
+					bluetooth_sco_headset = 0
+					bluetooth_sco_carkit = 0
+					bluetooth_a2dp = 0
+					bluetooth_a2dp_headphones = 0
+					bluetooth_a2dp_speaker = 0
+					hdmi = 0
+					angl_dock_headset = 0
+					dgtl_dock_headset = 0
+					usb_accessory = 0
+					usb_device = 0
+					telephony_tx = 0
+					line = 0
+
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_media.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_media.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw
index 006ac60..bdb6ae0 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_media.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw
@@ -12,6 +12,9 @@
 					bluetooth_sco_headset = 0
 					bluetooth_sco_carkit = 0
 					telephony_tx = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Device2
 			conf: RemoteSubmix
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_phone.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_phone.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_phone.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_phone.pfw
index 0dad830..d371ad9 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_phone.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_phone.pfw
@@ -12,6 +12,9 @@
 					spdif = 0
 					fm = 0
 					speaker_safe = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Device
 			conf: ScoCarkit
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_rerouting.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_rerouting.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw
index d390a33..04e62f7 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_rerouting.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw
@@ -17,6 +17,9 @@
 					bluetooth_sco_headset = 0
 					bluetooth_sco_carkit = 0
 					telephony_tx = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Device2
 			conf: RemoteSubmix
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_sonification.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_sonification.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_sonification.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_sonification.pfw
index 96723f6..70740d1 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_sonification.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_sonification.pfw
@@ -16,6 +16,9 @@
 					# Sonification follows phone strategy if in call but HDMI is not reachable
 					#
 					hdmi = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Speaker
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_sonification_respectful.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_sonification_respectful.pfw
similarity index 99%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_sonification_respectful.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_sonification_respectful.pfw
index 7626944..b30aa4c 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_sonification_respectful.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_sonification_respectful.pfw
@@ -22,6 +22,9 @@
 					spdif = 0
 					fm = 0
 					telephony_tx = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Speakers
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_transmitted_through_speaker.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_transmitted_through_speaker.pfw
similarity index 96%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_transmitted_through_speaker.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_transmitted_through_speaker.pfw
index e5ae9d9..9f9c211 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/device_for_strategy_transmitted_through_speaker.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_transmitted_through_speaker.pfw
@@ -26,6 +26,9 @@
 					usb_device = 0
 					telephony_tx = 0
 					line = 0
+					ip = 0
+					bus = 0
+					stub = 0
 
 		domain: Speaker
 			conf: Selected
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/strategy_for_stream.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/strategy_for_stream.pfw
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/strategy_for_stream.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/strategy_for_stream.pfw
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/strategy_for_usage.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/strategy_for_usage.pfw
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/strategy_for_usage.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/strategy_for_usage.pfw
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/volumes.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/volumes.pfw
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Settings/volumes.pfw
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/volumes.pfw
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/device_for_input_source.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/device_for_input_source.pfw
new file mode 100644
index 0000000..611d8f5
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/device_for_input_source.pfw
@@ -0,0 +1,285 @@
+supDomain: DeviceForInputSource
+	domain: Calibration
+		conf: Calibration
+			#
+			# Note that ALL input devices must have the sign bit set to 1.
+			# As the devices is a mask, use the "in" bit as a direction indicator.
+			#
+			component: /Policy/policy/input_sources/default/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/mic/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/voice_downlink/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/voice_call/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/voice_uplink/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/camcorder/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/voice_recognition/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/voice_communication/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/remote_submix/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/hotword/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/unprocessed/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+			component: /Policy/policy/input_sources/fm_tuner/applicable_input_device/mask
+				communication = 0
+				ambient = 0
+				builtin_mic = 0
+				bluetooth_sco_headset = 0
+				wired_headset = 0
+				hdmi = 0
+				telephony_rx = 0
+				back_mic = 0
+				remote_submix = 0
+				anlg_dock_headset = 0
+				dgtl_dock_headset = 0>
+				usb_accessory = 0
+				usb_device = 0
+				fm_tuner = 0
+				tv_tuner = 0
+				line = 0
+				spdif = 0
+				bluetooth_a2dp = 0
+				loopback = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/device_for_strategies.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/device_for_strategies.pfw
new file mode 100644
index 0000000..917d4a7
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/device_for_strategies.pfw
@@ -0,0 +1,255 @@
+domain: DeviceForStrategy
+	conf: Calibration
+		component: /Policy/policy/strategies
+			component: media/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: phone/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: sonification/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: sonification_respectful/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: dtmf/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: enforced_audible/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: transmitted_through_speaker/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: accessibility/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
+			component: rerouting/selected_output_devices/mask
+				earpiece = 0
+				speaker = 0
+				wired_headset = 0
+				wired_headphone = 0
+				bluetooth_sco = 0
+				bluetooth_sco_headset = 0
+				bluetooth_sco_carkit = 0
+				bluetooth_a2dp = 0>
+				bluetooth_a2dp_headphones = 0
+				bluetooth_a2dp_speaker = 0
+				hdmi = 0
+				angl_dock_headset = 0
+				dgtl_dock_headset = 0
+				usb_accessory = 0
+				usb_device = 0
+				remote_submix = 0
+				telephony_tx = 0
+				line = 0
+				hdmi_arc = 0
+				spdif = 0
+				fm = 0
+				aux_line = 0
+				speaker_safe = 0
+				ip = 0
+				bus = 0
+				stub = 1
+
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicyClass.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicyClass.xml
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicyClass.xml
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicyClass.xml
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicySubsystem-CommonTypes.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem-CommonTypes.xml
similarity index 93%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicySubsystem-CommonTypes.xml
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem-CommonTypes.xml
index 6d6145a..461e44a 100755
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicySubsystem-CommonTypes.xml
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem-CommonTypes.xml
@@ -35,6 +35,9 @@
             <BitParameter Name="fm" Size="1" Pos="20"/>
             <BitParameter Name="aux_line" Size="1" Pos="21"/>
             <BitParameter Name="speaker_safe" Size="1" Pos="22"/>
+            <BitParameter Name="ip" Size="1" Pos="23"/>
+            <BitParameter Name="bus" Size="1" Pos="24"/>
+            <BitParameter Name="stub" Size="1" Pos="30"/>
         </BitParameterBlock>
     </ComponentType>
 
@@ -62,7 +65,9 @@
             <BitParameter Name="spdif" Size="1" Pos="16"/>
             <BitParameter Name="bluetooth_a2dp" Size="1" Pos="17"/>
             <BitParameter Name="loopback" Size="1" Pos="18"/>
-            <BitParameter Name="in" Size="1" Pos="31"/>
+            <BitParameter Name="ip" Size="1" Pos="19"/>
+            <BitParameter Name="bus" Size="1" Pos="20"/>
+            <BitParameter Name="stub" Size="1" Pos="30"/>
         </BitParameterBlock>
     </ComponentType>
 
@@ -83,6 +88,10 @@
             <BitParameter Name="compress_offload" Size="1" Pos="4"/>
             <BitParameter Name="non_blocking" Size="1" Pos="5"/>
             <BitParameter Name="hw_av_sync" Size="1" Pos="6"/>
+            <BitParameter Name="tts" Size="1" Pos="7"/>
+            <BitParameter Name="raw" Size="1" Pos="8"/>
+            <BitParameter Name="sync" Size="1" Pos="9"/>
+            <BitParameter Name="iec958_nonaudio" Size="1" Pos="10"/>
         </BitParameterBlock>
     </ComponentType>
 
@@ -94,6 +103,8 @@
         <BitParameterBlock Name="mask" Size="32">
             <BitParameter Name="fast" Size="1" Pos="0"/>
             <BitParameter Name="hw_hotword" Size="1" Pos="2"/>
+            <BitParameter Name="raw" Size="1" Pos="3"/>
+            <BitParameter Name="sync" Size="1" Pos="4"/>
         </BitParameterBlock>
     </ComponentType>
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicySubsystem.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem.xml
similarity index 100%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/Structure/PolicySubsystem.xml
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem.xml
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/example/policy_criteria.txt b/services/audiopolicy/engineconfigurable/parameter-framework/examples/policy_criteria.txt
similarity index 96%
rename from services/audiopolicy/engineconfigurable/parameter-framework/example/policy_criteria.txt
rename to services/audiopolicy/engineconfigurable/parameter-framework/examples/policy_criteria.txt
index 28a7ef1..480cbe1 100755
--- a/services/audiopolicy/engineconfigurable/parameter-framework/example/policy_criteria.txt
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/policy_criteria.txt
@@ -1,6 +1,6 @@
 ExclusiveCriterion TelephonyMode                :   Normal          RingTone                InCall              InCommunication
-InclusiveCriterion AvailableInputDevices        :   Communication Ambient BuiltinMic BluetoothScoHeadset WiredHeadset Hdmi TelephonyRx BackMic RemoteSubmix AnlgDockHeadset DgtlDockHeadset UsbAccessory UsbDevice FmTuner TvTuner Line Spdif BluetoothA2dp Loopback
-InclusiveCriterion AvailableOutputDevices       :   Earpiece Speaker WiredSpeaker WiredHeadset WiredHeadphone BluetoothSco BluetoothScoHeadset BluetoothScoCarkit BluetoothA2dp BluetoothA2dpHeadphones BluetoothA2dpSpeaker Hdmi AnlgDockHeadset DgtlDockHeadset UsbAccessory UsbDevice RemoteSubmix TelephonyTx Line HdmiArc Spdif Fm AuxLine SpeakerSafe
+InclusiveCriterion AvailableInputDevices        :   Communication Ambient BuiltinMic BluetoothScoHeadset WiredHeadset Hdmi TelephonyRx BackMic RemoteSubmix AnlgDockHeadset DgtlDockHeadset UsbAccessory UsbDevice FmTuner TvTuner Line Spdif BluetoothA2dp Loopback Ip Bus Stub
+InclusiveCriterion AvailableOutputDevices       :   Earpiece Speaker WiredSpeaker WiredHeadset WiredHeadphone BluetoothSco BluetoothScoHeadset BluetoothScoCarkit BluetoothA2dp BluetoothA2dpHeadphones BluetoothA2dpSpeaker Hdmi AnlgDockHeadset DgtlDockHeadset UsbAccessory UsbDevice RemoteSubmix TelephonyTx Line HdmiArc Spdif Fm AuxLine SpeakerSafe Ip Bus Stub
 ExclusiveCriterion ForceUseForCommunication     :   ForceNone       ForceSpeaker            ForceBtSco
 ExclusiveCriterion ForceUseForMedia             :   ForceNone       ForceSpeaker			ForceHeadphones         ForceBtA2dp         ForceWiredAccessory ForceAnalogDock ForceDigitalDock    ForceNoBtA2dp       ForceSystemEnforced
 ExclusiveCriterion ForceUseForRecord            :   ForceNone       ForceBtSco              ForceWiredAccessory
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.cpp b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
index 9ff1538..ae39fef 100755
--- a/services/audiopolicy/engineconfigurable/src/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
@@ -36,8 +36,7 @@
 
 /**
 * Set the device associated to this source.
-* It checks if the input device is valid but allows to set a NONE device
-* (i.e. only the IN BIT is set).
+* It checks if the input device is valid.
 *
 * @param[in] devices selected for the given input source.
 * @tparam audio_devices_t: Applicable input device for this input source.
@@ -47,7 +46,10 @@
 template <>
 status_t Element<audio_source_t>::set(audio_devices_t devices)
 {
-    if (!audio_is_input_device(devices) && devices != AUDIO_DEVICE_BIT_IN) {
+    if (devices != AUDIO_DEVICE_NONE) {
+        devices |= AUDIO_DEVICE_BIT_IN;
+    }
+    if (!audio_is_input_device(devices)) {
         ALOGE("%s: trying to set an invalid device 0x%X for input source %s",
               __FUNCTION__, devices, getName().c_str());
         return BAD_VALUE;
diff --git a/services/audiopolicy/engineconfigurable/src/Strategy.cpp b/services/audiopolicy/engineconfigurable/src/Strategy.cpp
index 847443a..a539914 100755
--- a/services/audiopolicy/engineconfigurable/src/Strategy.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Strategy.cpp
@@ -37,7 +37,7 @@
 
 /**
  * Set the device associated to this strategy.
- * It checks if the output device is valid but allows to set a NONE device
+ * It checks if the output device is valid.
  *
  * @param[in] devices selected for the given strategy.
  *
@@ -46,7 +46,7 @@
 template <>
 status_t Element<routing_strategy>::set<audio_devices_t>(audio_devices_t devices)
 {
-    if (!audio_is_output_devices(devices) && devices != AUDIO_DEVICE_NONE) {
+    if (!audio_is_output_devices(devices) || devices == AUDIO_DEVICE_NONE) {
         ALOGE("%s: trying to set an invalid device 0x%X for strategy %s",
               __FUNCTION__, devices, getName().c_str());
         return BAD_VALUE;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 21ce8c9..00fd05a 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -4898,7 +4898,7 @@
                                         int index,
                                         audio_devices_t device)
 {
-    float volumeDb = mVolumeCurves->volIndexToDb(stream, Volume::getDeviceCategory(device), index);
+    float volumeDB = mVolumeCurves->volIndexToDb(stream, Volume::getDeviceCategory(device), index);
     // if a headset is connected, apply the following rules to ring tones and notifications
     // to avoid sound level bursts in user's ears:
     // - always attenuate notifications volume by 6dB
@@ -4922,7 +4922,7 @@
         // just stopped
         if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY) ||
                 mLimitRingtoneVolume) {
-            volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
+            volumeDB += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
             audio_devices_t musicDevice = getDeviceForStrategy(STRATEGY_MEDIA, true /*fromCache*/);
             float musicVolDB = computeVolume(AUDIO_STREAM_MUSIC,
                                              mVolumeCurves->getVolumeIndex(AUDIO_STREAM_MUSIC,
@@ -4930,17 +4930,29 @@
                                              musicDevice);
             float minVolDB = (musicVolDB > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
                     musicVolDB : SONIFICATION_HEADSET_VOLUME_MIN_DB;
-            if (volumeDb > minVolDB) {
-                volumeDb = minVolDB;
+            if (volumeDB > minVolDB) {
+                volumeDB = minVolDB;
                 ALOGV("computeVolume limiting volume to %f musicVol %f", minVolDB, musicVolDB);
             }
+            if (device & (AUDIO_DEVICE_OUT_BLUETOOTH_A2DP |
+                    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES)) {
+                // on A2DP, also ensure notification volume is not too low compared to media when
+                // intended to be played
+                if ((volumeDB > -96.0f) &&
+                        (musicVolDB - SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB > volumeDB)) {
+                    ALOGV("computeVolume increasing volume for stream=%d device=0x%X from %f to %f",
+                            stream, device,
+                            volumeDB, musicVolDB - SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB);
+                    volumeDB = musicVolDB - SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB;
+                }
+            }
         } else if ((Volume::getDeviceForVolume(device) != AUDIO_DEVICE_OUT_SPEAKER) ||
                 stream_strategy != STRATEGY_SONIFICATION) {
-            volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
+            volumeDB += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
         }
     }
 
-    return volumeDb;
+    return volumeDB;
 }
 
 status_t AudioPolicyManager::checkAndSetVolume(audio_stream_type_t stream,
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 2d6a873..6c3e416 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -52,6 +52,8 @@
 #define SONIFICATION_HEADSET_VOLUME_FACTOR_DB (-6)
 // Min volume for STRATEGY_SONIFICATION streams when limited by music volume: -36dB
 #define SONIFICATION_HEADSET_VOLUME_MIN_DB  (-36)
+// Max volume difference on A2DP between playing media and STRATEGY_SONIFICATION streams: 12dB
+#define SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB (12)
 
 // Time in milliseconds during which we consider that music is still active after a music
 // track was stopped - see computeVolume()
diff --git a/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp b/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp
index 09a931f..151d066 100644
--- a/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp
@@ -190,7 +190,8 @@
     }
 
     if (((*pDevices & AUDIO_DEVICE_IN_REMOTE_SUBMIX) == AUDIO_DEVICE_IN_REMOTE_SUBMIX)
-            && !captureAudioOutputAllowed()) {
+            && !captureAudioOutputAllowed(IPCThreadState::self()->getCallingPid(),
+                                          IPCThreadState::self()->getCallingUid())) {
         ALOGE("open_input() permission denied: capture not allowed");
         return AUDIO_IO_HANDLE_NONE;
     }
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 92a1285..c9b3abc 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -260,6 +260,7 @@
 status_t AudioPolicyService::getInputForAttr(const audio_attributes_t *attr,
                                              audio_io_handle_t *input,
                                              audio_session_t session,
+                                             pid_t pid,
                                              uid_t uid,
                                              uint32_t samplingRate,
                                              audio_format_t format,
@@ -282,11 +283,22 @@
     sp<AudioPolicyEffects>audioPolicyEffects;
     status_t status;
     AudioPolicyInterface::input_type_t inputType;
+
+    bool updatePid = (pid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (!isTrustedCallingUid(callingUid) || uid == (uid_t)-1) {
+    if (!isTrustedCallingUid(callingUid)) {
         ALOGW_IF(uid != (uid_t)-1 && uid != callingUid,
                 "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, uid);
         uid = callingUid;
+        updatePid = true;
+    }
+
+    if (updatePid) {
+        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW_IF(pid != (pid_t)-1 && pid != callingPid,
+                 "%s uid %d pid %d tried to pass itself off as pid %d",
+                 __func__, callingUid, callingPid, pid);
+        pid = callingPid;
     }
 
     {
@@ -306,7 +318,7 @@
             case AudioPolicyInterface::API_INPUT_TELEPHONY_RX:
                 // FIXME: use the same permission as for remote submix for now.
             case AudioPolicyInterface::API_INPUT_MIX_CAPTURE:
-                if (!captureAudioOutputAllowed()) {
+                if (!captureAudioOutputAllowed(pid, uid)) {
                     ALOGE("getInputForAttr() permission denied: capture not allowed");
                     status = PERMISSION_DENIED;
                 }
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp
index c830454..7c9315d 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp
@@ -234,6 +234,7 @@
 status_t AudioPolicyService::getInputForAttr(const audio_attributes_t *attr,
                                              audio_io_handle_t *input,
                                              audio_session_t session,
+                                             pid_t pid __unused,
                                              uid_t uid __unused,
                                              uint32_t samplingRate,
                                              audio_format_t format,
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 2710ac7..0b2cb35 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -99,6 +99,7 @@
     virtual status_t getInputForAttr(const audio_attributes_t *attr,
                                      audio_io_handle_t *input,
                                      audio_session_t session,
+                                     pid_t pid,
                                      uid_t uid,
                                      uint32_t samplingRate,
                                      audio_format_t format,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0e4e244..cd60ab5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -794,19 +794,12 @@
 
     if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
         res = configureStreamsLocked();
-        // Stream configuration failed due to unsupported configuration.
-        // Device back to unconfigured state. Client might try other configuraitons
-        if (res == BAD_VALUE && mStatus == STATUS_UNCONFIGURED) {
-            CLOGE("No streams configured");
-            return NULL;
-        }
-        // Stream configuration failed for other reason. Fatal.
+        // Stream configuration failed. Client might try other configuraitons.
         if (res != OK) {
-            SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res);
+            CLOGE("Can't set up streams: %s (%d)", strerror(-res), res);
             return NULL;
-        }
-        // Stream configuration successfully configure to empty stream configuration.
-        if (mStatus == STATUS_UNCONFIGURED) {
+        } else if (mStatus == STATUS_UNCONFIGURED) {
+            // Stream configuration successfully configure to empty stream configuration.
             CLOGE("No streams configured");
             return NULL;
         }
@@ -1823,6 +1816,33 @@
     return false;
 }
 
+void Camera3Device::cancelStreamsConfigurationLocked() {
+    int res = OK;
+    if (mInputStream != NULL && mInputStream->isConfiguring()) {
+        res = mInputStream->cancelConfiguration();
+        if (res != OK) {
+            CLOGE("Can't cancel configuring input stream %d: %s (%d)",
+                    mInputStream->getId(), strerror(-res), res);
+        }
+    }
+
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.editValueAt(i);
+        if (outputStream->isConfiguring()) {
+            res = outputStream->cancelConfiguration();
+            if (res != OK) {
+                CLOGE("Can't cancel configuring output stream %d: %s (%d)",
+                        outputStream->getId(), strerror(-res), res);
+            }
+        }
+    }
+
+    // Return state to that at start of call, so that future configures
+    // properly clean things up
+    internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+    mNeedConfig = true;
+}
+
 status_t Camera3Device::configureStreamsLocked() {
     ATRACE_CALL();
     status_t res;
@@ -1862,7 +1882,8 @@
         camera3_stream_t *inputStream;
         inputStream = mInputStream->startConfiguration();
         if (inputStream == NULL) {
-            SET_ERR_L("Can't start input stream configuration");
+            CLOGE("Can't start input stream configuration");
+            cancelStreamsConfigurationLocked();
             return INVALID_OPERATION;
         }
         streams.add(inputStream);
@@ -1881,7 +1902,8 @@
         camera3_stream_t *outputStream;
         outputStream = mOutputStreams.editValueAt(i)->startConfiguration();
         if (outputStream == NULL) {
-            SET_ERR_L("Can't start output stream configuration");
+            CLOGE("Can't start output stream configuration");
+            cancelStreamsConfigurationLocked();
             return INVALID_OPERATION;
         }
         streams.add(outputStream);
@@ -1898,35 +1920,8 @@
     if (res == BAD_VALUE) {
         // HAL rejected this set of streams as unsupported, clean up config
         // attempt and return to unconfigured state
-        if (mInputStream != NULL && mInputStream->isConfiguring()) {
-            res = mInputStream->cancelConfiguration();
-            if (res != OK) {
-                SET_ERR_L("Can't cancel configuring input stream %d: %s (%d)",
-                        mInputStream->getId(), strerror(-res), res);
-                return res;
-            }
-        }
-
-        for (size_t i = 0; i < mOutputStreams.size(); i++) {
-            sp<Camera3OutputStreamInterface> outputStream =
-                    mOutputStreams.editValueAt(i);
-            if (outputStream->isConfiguring()) {
-                res = outputStream->cancelConfiguration();
-                if (res != OK) {
-                    SET_ERR_L(
-                        "Can't cancel configuring output stream %d: %s (%d)",
-                        outputStream->getId(), strerror(-res), res);
-                    return res;
-                }
-            }
-        }
-
-        // Return state to that at start of call, so that future configures
-        // properly clean things up
-        internalUpdateStatusLocked(STATUS_UNCONFIGURED);
-        mNeedConfig = true;
-
-        ALOGV("%s: Camera %d: Stream configuration failed", __FUNCTION__, mId);
+        CLOGE("Set of requested inputs/outputs not supported by HAL");
+        cancelStreamsConfigurationLocked();
         return BAD_VALUE;
     } else if (res != OK) {
         // Some other kind of error from configure_streams - this is not
@@ -1943,9 +1938,10 @@
     if (mInputStream != NULL && mInputStream->isConfiguring()) {
         res = mInputStream->finishConfiguration(mHal3Device);
         if (res != OK) {
-            SET_ERR_L("Can't finish configuring input stream %d: %s (%d)",
+            CLOGE("Can't finish configuring input stream %d: %s (%d)",
                     mInputStream->getId(), strerror(-res), res);
-            return res;
+            cancelStreamsConfigurationLocked();
+            return BAD_VALUE;
         }
     }
 
@@ -1955,16 +1951,17 @@
         if (outputStream->isConfiguring()) {
             res = outputStream->finishConfiguration(mHal3Device);
             if (res != OK) {
-                SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
+                CLOGE("Can't finish configuring output stream %d: %s (%d)",
                         outputStream->getId(), strerror(-res), res);
-                return res;
+                cancelStreamsConfigurationLocked();
+                return BAD_VALUE;
             }
         }
     }
 
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
-    mRequestThread->configurationComplete();
+    mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration);
 
     // Boost priority of request thread for high speed recording to SCHED_FIFO
     if (mIsConstrainedHighSpeedConfiguration) {
@@ -2683,7 +2680,8 @@
         mCurrentPreCaptureTriggerId(0),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
-        mAeLockAvailable(aeLockAvailable) {
+        mAeLockAvailable(aeLockAvailable),
+        mPrepareVideoStream(false) {
     mStatusId = statusTracker->addComponent();
 }
 
@@ -2693,9 +2691,11 @@
     mListener = listener;
 }
 
-void Camera3Device::RequestThread::configurationComplete() {
+void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
+    // Prepare video stream for high speed recording.
+    mPrepareVideoStream = isConstrainedHighSpeed;
 }
 
 status_t Camera3Device::RequestThread::queueRequestList(
@@ -2967,19 +2967,28 @@
 }
 
 void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
-    Mutex::Autolock l(mRequestLock);
-    // Check all streams needed by repeating requests are still valid. Otherwise, stop
-    // repeating requests.
-    for (const auto& request : mRepeatingRequests) {
-        for (const auto& s : request->mOutputStreams) {
-            if (s->isAbandoned()) {
-                int64_t lastFrameNumber = 0;
-                clearRepeatingRequestsLocked(&lastFrameNumber);
-                mListener->notifyRepeatingRequestError(lastFrameNumber);
-                return;
+    bool surfaceAbandoned = false;
+    int64_t lastFrameNumber = 0;
+    {
+        Mutex::Autolock l(mRequestLock);
+        // Check all streams needed by repeating requests are still valid. Otherwise, stop
+        // repeating requests.
+        for (const auto& request : mRepeatingRequests) {
+            for (const auto& s : request->mOutputStreams) {
+                if (s->isAbandoned()) {
+                    surfaceAbandoned = true;
+                    clearRepeatingRequestsLocked(&lastFrameNumber);
+                    break;
+                }
+            }
+            if (surfaceAbandoned) {
+                break;
             }
         }
     }
+    if (surfaceAbandoned) {
+        mListener->notifyRepeatingRequestError(lastFrameNumber);
+    }
 }
 
 bool Camera3Device::RequestThread::threadLoop() {
@@ -3188,8 +3197,25 @@
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
         for (size_t i = 0; i < captureRequest->mOutputStreams.size(); i++) {
-            res = captureRequest->mOutputStreams.editItemAt(i)->
-                    getBuffer(&outputBuffers->editItemAt(i));
+            sp<Camera3OutputStreamInterface> outputStream = captureRequest->mOutputStreams.editItemAt(i);
+
+            // Prepare video buffers for high speed recording on the first video request.
+            if (mPrepareVideoStream && outputStream->isVideoStream()) {
+                // Only try to prepare video stream on the first video request.
+                mPrepareVideoStream = false;
+
+                res = outputStream->startPrepare(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX);
+                while (res == NOT_ENOUGH_DATA) {
+                    res = outputStream->prepareNextBuffer();
+                }
+                if (res != OK) {
+                    ALOGW("%s: Preparing video buffers for high speed failed: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                    outputStream->cancelPrepare();
+                }
+            }
+
+            res = outputStream->getBuffer(&outputBuffers->editItemAt(i));
             if (res != OK) {
                 // Can't get output buffer from gralloc queue - this could be due to
                 // abandoned queue or other consumer misbehavior, so not a fatal
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 0366ef6..2aca57d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -367,6 +367,11 @@
     status_t           configureStreamsLocked();
 
     /**
+     * Cancel stream configuration that did not finish successfully.
+     */
+    void               cancelStreamsConfigurationLocked();
+
+    /**
      * Add a dummy stream to the current stream set as a workaround for
      * not allowing 0 streams in the camera HAL spec.
      */
@@ -450,7 +455,7 @@
         /**
          * Call after stream (re)-configuration is completed.
          */
-        void     configurationComplete();
+        void     configurationComplete(bool isConstrainedHighSpeed);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -638,6 +643,9 @@
 
         // Whether the device supports AE lock
         bool               mAeLockAvailable;
+
+        // Flag indicating if we should prepare video stream for video requests.
+        bool               mPrepareVideoStream;
     };
     sp<RequestThread> mRequestThread;
 
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
index bc2b641..a9a2d3c 100644
--- a/services/mediaextractor/Android.mk
+++ b/services/mediaextractor/Android.mk
@@ -19,6 +19,7 @@
 LOCAL_MODULE:= mediaextractor
 LOCAL_32_BIT_ONLY := true
 LOCAL_INIT_RC := mediaextractor.rc
+LOCAL_C_INCLUDES := frameworks/av/media/libmedia
 include $(BUILD_EXECUTABLE)
 
 include $(call all-makefiles-under, $(LOCAL_PATH))
diff --git a/services/mediaextractor/main_extractorservice.cpp b/services/mediaextractor/main_extractorservice.cpp
index a7f3fbe..245489e 100644
--- a/services/mediaextractor/main_extractorservice.cpp
+++ b/services/mediaextractor/main_extractorservice.cpp
@@ -29,12 +29,18 @@
 // from LOCAL_C_INCLUDES
 #include "IcuUtils.h"
 #include "MediaExtractorService.h"
+#include "MediaUtils.h"
 #include "minijail/minijail.h"
 
 using namespace android;
 
 int main(int argc __unused, char** argv)
 {
+    limitProcessMemory(
+        "ro.media.maxmem", /* property that defines limit */
+        SIZE_MAX, /* upper limit in bytes */
+        20 /* upper limit as percentage of physical RAM */);
+
     signal(SIGPIPE, SIG_IGN);
     MiniJail();