Merge "stagefright: MetadataRetriever API to specify mime and color format" into oc-mr1-dev
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 8b76cdf..629d75a 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -115,11 +115,13 @@
* <p>The mode control selects how the image data is converted from the
* sensor's native color into linear sRGB color.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_color_correction_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When auto-white balance (AWB) is enabled with ACAMERA_CONTROL_AWB_MODE, this
* control is overridden by the AWB routine. When AWB is disabled, the
@@ -164,17 +166,19 @@
* @see ACAMERA_COLOR_CORRECTION_TRANSFORM
* @see ACAMERA_CONTROL_AWB_MODE
*/
- ACAMERA_COLOR_CORRECTION_MODE = // byte (enum)
+ ACAMERA_COLOR_CORRECTION_MODE = // byte (acamera_metadata_enum_android_color_correction_mode_t)
ACAMERA_COLOR_CORRECTION_START,
/**
* <p>A color transform matrix to use to transform
* from sensor RGB color space to output linear sRGB color space.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is either set by the camera device when the request
* ACAMERA_COLOR_CORRECTION_MODE is not TRANSFORM_MATRIX, or
@@ -196,11 +200,13 @@
* <p>Gains applying to Bayer raw color channels for
* white-balance.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>These per-channel gains are either set by the camera device
* when the request ACAMERA_COLOR_CORRECTION_MODE is not
@@ -221,11 +227,13 @@
/**
* <p>Mode of operation for the chromatic aberration correction algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
* can not focus on the same point after exiting from the lens. This metadata defines
@@ -239,7 +247,7 @@
* applying aberration correction.</p>
* <p>LEGACY devices will always be in FAST mode.</p>
*/
- ACAMERA_COLOR_CORRECTION_ABERRATION_MODE = // byte (enum)
+ ACAMERA_COLOR_CORRECTION_ABERRATION_MODE = // byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)
ACAMERA_COLOR_CORRECTION_START + 3,
/**
* <p>List of aberration correction modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE that are
@@ -247,10 +255,12 @@
*
* @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE. If no
* aberration correction modes are available for a device, this list will solely include
@@ -269,11 +279,13 @@
* <p>The desired setting for the camera device's auto-exposure
* algorithm's antibanding compensation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Some kinds of lighting fixtures, such as some fluorescent
* lights, flicker at the rate of the power supply frequency
@@ -310,17 +322,19 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_STATISTICS_SCENE_FLICKER
*/
- ACAMERA_CONTROL_AE_ANTIBANDING_MODE = // byte (enum)
+ ACAMERA_CONTROL_AE_ANTIBANDING_MODE = // byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)
ACAMERA_CONTROL_START,
/**
* <p>Adjustment to auto-exposure (AE) target image
* brightness.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The adjustment is measured as a count of steps, with the
* step size defined by ACAMERA_CONTROL_AE_COMPENSATION_STEP and the
@@ -350,11 +364,13 @@
* <p>Whether auto-exposure (AE) is currently locked to its latest
* calculated values.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
* and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
@@ -398,17 +414,19 @@
* @see ACAMERA_SENSOR_EXPOSURE_TIME
* @see ACAMERA_SENSOR_SENSITIVITY
*/
- ACAMERA_CONTROL_AE_LOCK = // byte (enum)
+ ACAMERA_CONTROL_AE_LOCK = // byte (acamera_metadata_enum_android_control_ae_lock_t)
ACAMERA_CONTROL_START + 2,
/**
* <p>The desired mode for the camera device's
* auto-exposure routine.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control is only effective if ACAMERA_CONTROL_MODE is
* AUTO.</p>
@@ -436,16 +454,18 @@
* @see ACAMERA_SENSOR_FRAME_DURATION
* @see ACAMERA_SENSOR_SENSITIVITY
*/
- ACAMERA_CONTROL_AE_MODE = // byte (enum)
+ ACAMERA_CONTROL_AE_MODE = // byte (acamera_metadata_enum_android_control_ae_mode_t)
ACAMERA_CONTROL_START + 3,
/**
* <p>List of metering areas to use for auto-exposure adjustment.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[5*area_count]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Not available if android.control.maxRegionsAe is 0.
* Otherwise will always be present.</p>
@@ -486,11 +506,13 @@
* adjust the capture frame rate to maintain good
* exposure.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Only constrains auto-exposure (AE) algorithm, not
* manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
@@ -505,11 +527,13 @@
* <p>Whether the camera device will trigger a precapture
* metering sequence when it processes this request.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This entry is normally set to IDLE, or is not
* included at all in the request settings. When included and
@@ -563,17 +587,19 @@
* @see ACAMERA_CONTROL_AF_TRIGGER
* @see ACAMERA_CONTROL_CAPTURE_INTENT
*/
- ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER = // byte (enum)
+ ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER = // byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)
ACAMERA_CONTROL_START + 6,
/**
* <p>Whether auto-focus (AF) is currently enabled, and what
* mode it is set to.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_af_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Only effective if ACAMERA_CONTROL_MODE = AUTO and the lens is not fixed focus
* (i.e. <code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE > 0</code>). Also note that
@@ -590,16 +616,18 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
*/
- ACAMERA_CONTROL_AF_MODE = // byte (enum)
+ ACAMERA_CONTROL_AF_MODE = // byte (acamera_metadata_enum_android_control_af_mode_t)
ACAMERA_CONTROL_START + 7,
/**
* <p>List of metering areas to use for auto-focus.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[5*area_count]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Not available if android.control.maxRegionsAf is 0.
* Otherwise will always be present.</p>
@@ -638,11 +666,13 @@
/**
* <p>Whether the camera device will trigger autofocus for this request.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_af_trigger_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This entry is normally set to IDLE, or is not
* included at all in the request settings.</p>
@@ -665,17 +695,19 @@
* @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
* @see ACAMERA_CONTROL_AF_STATE
*/
- ACAMERA_CONTROL_AF_TRIGGER = // byte (enum)
+ ACAMERA_CONTROL_AF_TRIGGER = // byte (acamera_metadata_enum_android_control_af_trigger_t)
ACAMERA_CONTROL_START + 9,
/**
* <p>Whether auto-white balance (AWB) is currently locked to its
* latest calculated values.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
* and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
@@ -699,18 +731,20 @@
*
* @see ACAMERA_CONTROL_AWB_MODE
*/
- ACAMERA_CONTROL_AWB_LOCK = // byte (enum)
+ ACAMERA_CONTROL_AWB_LOCK = // byte (acamera_metadata_enum_android_control_awb_lock_t)
ACAMERA_CONTROL_START + 10,
/**
* <p>Whether auto-white balance (AWB) is currently setting the color
* transform fields, and what its illumination target
* is.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control is only effective if ACAMERA_CONTROL_MODE is AUTO.</p>
* <p>When set to the ON mode, the camera device's auto-white balance
@@ -739,17 +773,19 @@
* @see ACAMERA_CONTROL_AWB_LOCK
* @see ACAMERA_CONTROL_MODE
*/
- ACAMERA_CONTROL_AWB_MODE = // byte (enum)
+ ACAMERA_CONTROL_AWB_MODE = // byte (acamera_metadata_enum_android_control_awb_mode_t)
ACAMERA_CONTROL_START + 11,
/**
* <p>List of metering areas to use for auto-white-balance illuminant
* estimation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[5*area_count]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Not available if android.control.maxRegionsAwb is 0.
* Otherwise will always be present.</p>
@@ -791,11 +827,13 @@
* of this capture, to help the camera device to decide optimal 3A
* strategy.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_capture_intent_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control (except for MANUAL) is only effective if
* <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
@@ -807,16 +845,18 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
*/
- ACAMERA_CONTROL_CAPTURE_INTENT = // byte (enum)
+ ACAMERA_CONTROL_CAPTURE_INTENT = // byte (acamera_metadata_enum_android_control_capture_intent_t)
ACAMERA_CONTROL_START + 13,
/**
* <p>A special color effect to apply.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_effect_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When this mode is set, a color effect will be applied
* to images produced by the camera device. The interpretation
@@ -825,17 +865,19 @@
* depended on to be consistent (or present) across all
* devices.</p>
*/
- ACAMERA_CONTROL_EFFECT_MODE = // byte (enum)
+ ACAMERA_CONTROL_EFFECT_MODE = // byte (acamera_metadata_enum_android_control_effect_mode_t)
ACAMERA_CONTROL_START + 14,
/**
* <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
* routines.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This is a top-level 3A control switch. When set to OFF, all 3A control
* by the camera device is disabled. The application must set the fields for
@@ -856,16 +898,18 @@
*
* @see ACAMERA_CONTROL_AF_MODE
*/
- ACAMERA_CONTROL_MODE = // byte (enum)
+ ACAMERA_CONTROL_MODE = // byte (acamera_metadata_enum_android_control_mode_t)
ACAMERA_CONTROL_START + 15,
/**
* <p>Control for which scene mode is currently active.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_scene_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Scene modes are custom camera modes optimized for a certain set of conditions and
* capture settings.</p>
@@ -883,17 +927,19 @@
* @see ACAMERA_CONTROL_AWB_MODE
* @see ACAMERA_CONTROL_MODE
*/
- ACAMERA_CONTROL_SCENE_MODE = // byte (enum)
+ ACAMERA_CONTROL_SCENE_MODE = // byte (acamera_metadata_enum_android_control_scene_mode_t)
ACAMERA_CONTROL_START + 16,
/**
* <p>Whether video stabilization is
* active.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Video stabilization automatically warps images from
* the camera in order to stabilize motion between consecutive frames.</p>
@@ -923,7 +969,7 @@
* @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
* @see ACAMERA_SCALER_CROP_REGION
*/
- ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE = // byte (enum)
+ ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE = // byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)
ACAMERA_CONTROL_START + 17,
/**
* <p>List of auto-exposure antibanding modes for ACAMERA_CONTROL_AE_ANTIBANDING_MODE that are
@@ -931,10 +977,12 @@
*
* @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all of the auto-exposure anti-banding modes may be
* supported by a given camera device. This field lists the
@@ -952,10 +1000,12 @@
*
* @see ACAMERA_CONTROL_AE_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all the auto-exposure modes may be supported by a
* given camera device, especially if no flash unit is
@@ -980,10 +1030,12 @@
*
* @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>For devices at the LEGACY level or above:</p>
* <ul>
@@ -1025,12 +1077,13 @@
* @see ACAMERA_CONTROL_AE_COMPENSATION_STEP
* @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_CONTROL_AE_COMPENSATION_RANGE = // int32[2]
ACAMERA_CONTROL_START + 21,
@@ -1038,10 +1091,12 @@
* <p>Smallest step by which the exposure compensation
* can be changed.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the unit for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION. For example, if this key has
* a value of <code>1/2</code>, then a setting of <code>-2</code> for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION means
@@ -1059,10 +1114,12 @@
*
* @see ACAMERA_CONTROL_AF_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all the auto-focus modes may be supported by a
* given camera device. This entry lists the valid modes for
@@ -1086,10 +1143,12 @@
*
* @see ACAMERA_CONTROL_EFFECT_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains the color effect modes that can be applied to
* images produced by the camera device.
@@ -1111,10 +1170,12 @@
*
* @see ACAMERA_CONTROL_SCENE_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains scene modes that can be set for the camera device.
* Only scene modes that have been fully implemented for the
@@ -1136,10 +1197,12 @@
*
* @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>OFF will always be listed.</p>
*/
@@ -1151,10 +1214,12 @@
*
* @see ACAMERA_CONTROL_AWB_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all the auto-white-balance modes may be supported by a
* given camera device. This entry lists the valid modes for
@@ -1183,22 +1248,25 @@
* @see ACAMERA_CONTROL_AF_REGIONS
* @see ACAMERA_CONTROL_AWB_REGIONS
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_CONTROL_MAX_REGIONS = // int32[3]
ACAMERA_CONTROL_START + 28,
/**
* <p>Current state of the auto-exposure (AE) algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Switching between or enabling AE modes (ACAMERA_CONTROL_AE_MODE) always
* resets the AE state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1257,15 +1325,17 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
- ACAMERA_CONTROL_AE_STATE = // byte (enum)
+ ACAMERA_CONTROL_AE_STATE = // byte (acamera_metadata_enum_android_control_ae_state_t)
ACAMERA_CONTROL_START + 31,
/**
* <p>Current state of auto-focus (AF) algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_af_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Switching between or enabling AF modes (ACAMERA_CONTROL_AF_MODE) always
* resets the AF state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1357,15 +1427,17 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
- ACAMERA_CONTROL_AF_STATE = // byte (enum)
+ ACAMERA_CONTROL_AF_STATE = // byte (acamera_metadata_enum_android_control_af_state_t)
ACAMERA_CONTROL_START + 32,
/**
* <p>Current state of auto-white balance (AWB) algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Switching between or enabling AWB modes (ACAMERA_CONTROL_AWB_MODE) always
* resets the AWB state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1408,37 +1480,41 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
- ACAMERA_CONTROL_AWB_STATE = // byte (enum)
+ ACAMERA_CONTROL_AWB_STATE = // byte (acamera_metadata_enum_android_control_awb_state_t)
ACAMERA_CONTROL_START + 34,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AE_LOCK</p>
*
* @see ACAMERA_CONTROL_AE_LOCK
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_available_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
* list <code>true</code>. This includes FULL devices.</p>
*/
- ACAMERA_CONTROL_AE_LOCK_AVAILABLE = // byte (enum)
+ ACAMERA_CONTROL_AE_LOCK_AVAILABLE = // byte (acamera_metadata_enum_android_control_ae_lock_available_t)
ACAMERA_CONTROL_START + 36,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AWB_LOCK</p>
*
* @see ACAMERA_CONTROL_AWB_LOCK
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_available_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
* always list <code>true</code>. This includes FULL devices.</p>
*/
- ACAMERA_CONTROL_AWB_LOCK_AVAILABLE = // byte (enum)
+ ACAMERA_CONTROL_AWB_LOCK_AVAILABLE = // byte (acamera_metadata_enum_android_control_awb_lock_available_t)
ACAMERA_CONTROL_START + 37,
/**
* <p>List of control modes for ACAMERA_CONTROL_MODE that are supported by this camera
@@ -1446,10 +1522,12 @@
*
* @see ACAMERA_CONTROL_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains control modes that can be set for the camera device.
* LEGACY mode devices will always support AUTO mode. LIMITED and FULL
@@ -1463,10 +1541,12 @@
*
* @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Devices support post RAW sensitivity boost will advertise
* ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
@@ -1484,11 +1564,13 @@
* <p>The amount of additional sensitivity boost applied to output images
* after RAW sensor data is captured.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Some camera devices support additional digital sensitivity boosting in the
* camera processing pipeline after sensor RAW image is captured.
@@ -1521,11 +1603,13 @@
*
* @see ACAMERA_CONTROL_CAPTURE_INTENT
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_enable_zsl_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
* STILL_CAPTURE capture intent. The camera device may use images captured in the past to
@@ -1552,7 +1636,7 @@
* @see ACAMERA_CONTROL_CAPTURE_INTENT
* @see ACAMERA_SENSOR_TIMESTAMP
*/
- ACAMERA_CONTROL_ENABLE_ZSL = // byte (enum)
+ ACAMERA_CONTROL_ENABLE_ZSL = // byte (acamera_metadata_enum_android_control_enable_zsl_t)
ACAMERA_CONTROL_START + 41,
ACAMERA_CONTROL_END,
@@ -1560,11 +1644,13 @@
* <p>Operation mode for edge
* enhancement.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_edge_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Edge enhancement improves sharpness and details in the captured image. OFF means
* no enhancement will be applied by the camera device.</p>
@@ -1586,7 +1672,7 @@
* The camera device may adjust its internal edge enhancement parameters for best
* image quality based on the android.reprocess.effectiveExposureFactor, if it is set.</p>
*/
- ACAMERA_EDGE_MODE = // byte (enum)
+ ACAMERA_EDGE_MODE = // byte (acamera_metadata_enum_android_edge_mode_t)
ACAMERA_EDGE_START,
/**
* <p>List of edge enhancement modes for ACAMERA_EDGE_MODE that are supported by this camera
@@ -1594,10 +1680,12 @@
*
* @see ACAMERA_EDGE_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Full-capability camera devices must always support OFF; camera devices that support
* YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
@@ -1610,11 +1698,13 @@
/**
* <p>The desired mode for for the camera device's flash control.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_flash_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control is only effective when flash unit is available
* (<code>ACAMERA_FLASH_INFO_AVAILABLE == true</code>).</p>
@@ -1635,16 +1725,18 @@
* @see ACAMERA_FLASH_INFO_AVAILABLE
* @see ACAMERA_FLASH_STATE
*/
- ACAMERA_FLASH_MODE = // byte (enum)
+ ACAMERA_FLASH_MODE = // byte (acamera_metadata_enum_android_flash_mode_t)
ACAMERA_FLASH_START + 2,
/**
* <p>Current state of the flash
* unit.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_flash_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>When the camera device doesn't have flash unit
* (i.e. <code>ACAMERA_FLASH_INFO_AVAILABLE == false</code>), this state will always be UNAVAILABLE.
@@ -1664,7 +1756,7 @@
* @see ACAMERA_FLASH_INFO_AVAILABLE
* @see ACAMERA_FLASH_MODE
*/
- ACAMERA_FLASH_STATE = // byte (enum)
+ ACAMERA_FLASH_STATE = // byte (acamera_metadata_enum_android_flash_state_t)
ACAMERA_FLASH_START + 5,
ACAMERA_FLASH_END,
@@ -1672,33 +1764,37 @@
* <p>Whether this camera device has a
* flash unit.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_flash_info_available_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Will be <code>false</code> if no flash is available.</p>
* <p>If there is no flash unit, none of the flash controls do
* anything.</p>
*/
- ACAMERA_FLASH_INFO_AVAILABLE = // byte (enum)
+ ACAMERA_FLASH_INFO_AVAILABLE = // byte (acamera_metadata_enum_android_flash_info_available_t)
ACAMERA_FLASH_INFO_START,
ACAMERA_FLASH_INFO_END,
/**
* <p>Operational mode for hot pixel correction.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_hot_pixel_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Hotpixel correction interpolates out, or otherwise removes, pixels
* that do not accurately measure the incoming light (i.e. pixels that
* are stuck at an arbitrary value or are oversensitive).</p>
*/
- ACAMERA_HOT_PIXEL_MODE = // byte (enum)
+ ACAMERA_HOT_PIXEL_MODE = // byte (acamera_metadata_enum_android_hot_pixel_mode_t)
ACAMERA_HOT_PIXEL_START,
/**
* <p>List of hot pixel correction modes for ACAMERA_HOT_PIXEL_MODE that are supported by this
@@ -1706,10 +1802,12 @@
*
* @see ACAMERA_HOT_PIXEL_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>FULL mode camera devices will always support FAST.</p>
*/
@@ -1721,13 +1819,14 @@
* <p>GPS coordinates to include in output JPEG
* EXIF.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: double[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_GPS_COORDINATES = // double[3]
ACAMERA_JPEG_START,
@@ -1735,13 +1834,14 @@
* <p>32 characters describing GPS algorithm to
* include in EXIF.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_GPS_PROCESSING_METHOD = // byte
ACAMERA_JPEG_START + 1,
@@ -1749,24 +1849,27 @@
* <p>Time GPS fix was made to include in
* EXIF.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_GPS_TIMESTAMP = // int64
ACAMERA_JPEG_START + 2,
/**
* <p>The orientation for a JPEG image.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The clockwise rotation angle in degrees, relative to the orientation
* to the camera, that the JPEG picture needs to be rotated by, to be viewed
@@ -1805,11 +1908,13 @@
* <p>Compression quality of the final JPEG
* image.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>85-95 is typical usage range.</p>
*/
@@ -1819,24 +1924,27 @@
* <p>Compression quality of JPEG
* thumbnail.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_THUMBNAIL_QUALITY = // byte
ACAMERA_JPEG_START + 5,
/**
* <p>Resolution of embedded JPEG thumbnail.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
* but the captured JPEG will still be a valid image.</p>
@@ -1871,10 +1979,12 @@
*
* @see ACAMERA_JPEG_THUMBNAIL_SIZE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
* thumbnail should be generated.</p>
@@ -1902,11 +2012,13 @@
* <p>The desired lens aperture size, as a ratio of lens focal length to the
* effective aperture diameter.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Setting this value is only supported on the camera devices that have a variable
* aperture lens.</p>
@@ -1934,11 +2046,13 @@
/**
* <p>The desired setting for the lens neutral density filter(s).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control will not be supported on most camera devices.</p>
* <p>Lens filters are typically used to lower the amount of light the
@@ -1960,11 +2074,13 @@
/**
* <p>The desired lens focal length; used for optical zoom.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This setting controls the physical focal length of the camera
* device's lens. Changing the focal length changes the field of
@@ -1986,11 +2102,13 @@
* <p>Desired distance to plane of sharpest focus,
* measured from frontmost surface of the lens.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Should be zero for fixed-focus cameras</p>
*/
@@ -2000,11 +2118,13 @@
* <p>Sets whether the camera device uses optical image stabilization (OIS)
* when capturing images.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>OIS is used to compensate for motion blur due to small
* movements of the camera during capture. Unlike digital image
@@ -2027,30 +2147,33 @@
* @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
* @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
*/
- ACAMERA_LENS_OPTICAL_STABILIZATION_MODE = // byte (enum)
+ ACAMERA_LENS_OPTICAL_STABILIZATION_MODE = // byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)
ACAMERA_LENS_START + 4,
/**
* <p>Direction the camera faces relative to
* device screen.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_facing_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
- ACAMERA_LENS_FACING = // byte (enum)
+ ACAMERA_LENS_FACING = // byte (acamera_metadata_enum_android_lens_facing_t)
ACAMERA_LENS_START + 5,
/**
* <p>The orientation of the camera relative to the sensor
* coordinate system.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The four coefficients that describe the quaternion
* rotation from the Android sensor coordinate system to a
@@ -2084,11 +2207,13 @@
/**
* <p>Position of the camera optical center.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The position of the camera device's lens optical center,
* as a three-dimensional vector <code>(x,y,z)</code>, relative to the
@@ -2129,10 +2254,12 @@
* <p>The range of scene distances that are in
* sharp focus (depth of field).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>If variable focus not supported, can still report
* fixed depth of field range</p>
@@ -2142,10 +2269,12 @@
/**
* <p>Current lens status.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>For lens parameters ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
* ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE, when changes are requested,
@@ -2176,17 +2305,19 @@
* @see ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS
* @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
*/
- ACAMERA_LENS_STATE = // byte (enum)
+ ACAMERA_LENS_STATE = // byte (acamera_metadata_enum_android_lens_state_t)
ACAMERA_LENS_START + 9,
/**
* <p>The parameters for this camera device's intrinsic
* calibration.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[5]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The five calibration parameters that describe the
* transform from camera-centric 3D coordinates to sensor
@@ -2245,11 +2376,13 @@
* <p>The correction coefficients to correct for this camera device's
* radial and tangential lens distortion.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[6]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
* kappa_3]</code> and two tangential distortion coefficients
@@ -2290,10 +2423,12 @@
*
* @see ACAMERA_LENS_APERTURE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the camera device doesn't support a variable lens aperture,
* this list will contain only one value, which is the fixed aperture size.</p>
@@ -2308,10 +2443,12 @@
*
* @see ACAMERA_LENS_FILTER_DENSITY
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If a neutral density filter is not supported by this camera device,
* this list will contain only 0. Otherwise, this list will include every
@@ -2325,10 +2462,12 @@
*
* @see ACAMERA_LENS_FOCAL_LENGTH
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If optical zoom is not supported, this list will only contain
* a single value corresponding to the fixed focal length of the
@@ -2343,10 +2482,12 @@
*
* @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If OIS is not supported by a given camera device, this list will
* contain only OFF.</p>
@@ -2356,10 +2497,12 @@
/**
* <p>Hyperfocal distance for this lens.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the lens is not fixed focus, the camera device will report this
* field when ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED.</p>
@@ -2372,10 +2515,12 @@
* <p>Shortest distance from frontmost surface
* of the lens that can be brought into sharp focus.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the lens is fixed-focus, this will be
* 0.</p>
@@ -2385,10 +2530,12 @@
/**
* <p>Dimensions of lens shading map.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The map should be on the order of 30-40 rows and columns, and
* must be smaller than 64x64.</p>
@@ -2398,10 +2545,12 @@
/**
* <p>The lens focus distance calibration quality.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The lens focus distance calibration quality determines the reliability of
* focus related metadata entries, i.e. ACAMERA_LENS_FOCUS_DISTANCE,
@@ -2422,18 +2571,20 @@
* @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
* @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
*/
- ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION = // byte (enum)
+ ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION = // byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)
ACAMERA_LENS_INFO_START + 7,
ACAMERA_LENS_INFO_END,
/**
* <p>Mode of operation for the noise reduction algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_noise_reduction_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The noise reduction algorithm attempts to improve image quality by removing
* excessive noise added by the capture process, especially in dark conditions.</p>
@@ -2463,7 +2614,7 @@
*
* @see ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
*/
- ACAMERA_NOISE_REDUCTION_MODE = // byte (enum)
+ ACAMERA_NOISE_REDUCTION_MODE = // byte (acamera_metadata_enum_android_noise_reduction_mode_t)
ACAMERA_NOISE_REDUCTION_START,
/**
* <p>List of noise reduction modes for ACAMERA_NOISE_REDUCTION_MODE that are supported
@@ -2471,10 +2622,12 @@
*
* @see ACAMERA_NOISE_REDUCTION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Full-capability camera devices will always support OFF and FAST.</p>
* <p>Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
@@ -2489,10 +2642,12 @@
* <p>The maximum numbers of different types of output streams
* that can be configured and used simultaneously by a camera device.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is a 3 element tuple that contains the max number of output simultaneous
* streams for raw sensor, processed (but not stalling), and processed (and stalling)
@@ -2523,10 +2678,12 @@
* through from when it was exposed to when the final completed result
* was available to the framework.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Depending on what settings are used in the request, and
* what streams are configured, the data may undergo less processing,
@@ -2542,10 +2699,12 @@
* has to go through from when it's exposed to when it's available
* to the framework.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A typical minimum value for this is 2 (one stage to expose,
* one stage to readout) from the sensor. The ISP then usually adds
@@ -2568,10 +2727,12 @@
* <p>Defines how many sub-components
* a result will be composed of.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>In order to combat the pipeline latency, partial results
* may be delivered to the application layer from the camera device as
@@ -2592,10 +2753,12 @@
* <p>List of capabilities that this camera device
* advertises as fully supporting.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A capability is a contract that the camera device makes in order
* to be able to satisfy one or more use cases.</p>
@@ -2620,16 +2783,18 @@
* @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
* @see ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS
*/
- ACAMERA_REQUEST_AVAILABLE_CAPABILITIES = // byte[n] (enum)
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES = // byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)
ACAMERA_REQUEST_START + 12,
/**
* <p>A list of all keys that the camera device has available
* to use with {@link ACaptureRequest}.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Attempting to set a key into a CaptureRequest that is not
* listed here will result in an invalid request and will be rejected
@@ -2648,10 +2813,12 @@
* to query with {@link ACameraMetadata} from
* {@link ACameraCaptureSession_captureCallback_result}.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Attempting to get a key from a CaptureResult that is not
* listed here will always return a <code>null</code> value. Getting a key from
@@ -2679,10 +2846,12 @@
* to query with {@link ACameraMetadata} from
* {@link ACameraManager_getCameraCharacteristics}.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This entry follows the same rules as
* ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS (except that it applies for
@@ -2698,11 +2867,13 @@
/**
* <p>The desired region of the sensor to read out for this capture.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control can be used to implement digital zoom.</p>
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
@@ -2748,10 +2919,12 @@
*
* @see ACAMERA_SCALER_CROP_REGION
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This represents the maximum amount of zooming possible by
* the camera device, or equivalently, the minimum cropping
@@ -2767,10 +2940,12 @@
* camera device supports
* (i.e. format, width, height, output/input stream).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The configurations are listed as <code>(format, width, height, input?)</code>
* tuples.</p>
@@ -2805,16 +2980,18 @@
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
*/
- ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
+ ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)
ACAMERA_SCALER_START + 10,
/**
* <p>This lists the minimum frame duration for each
* format/size combination.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This should correspond to the frame duration when only that
* stream is active, with all processing (typically in android.*.mode)
@@ -2836,10 +3013,12 @@
* <p>This lists the maximum stall duration for each
* output format/size combination.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A stall duration is how much extra time would get added
* to the normal minimum frame duration for a repeating request
@@ -2904,10 +3083,12 @@
/**
* <p>The crop type that this camera device supports.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_scaler_cropping_type_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>When passing a non-centered crop region (ACAMERA_SCALER_CROP_REGION) to a camera
* device that only supports CENTER_ONLY cropping, the camera device will move the
@@ -2922,7 +3103,7 @@
* @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
*/
- ACAMERA_SCALER_CROPPING_TYPE = // byte (enum)
+ ACAMERA_SCALER_CROPPING_TYPE = // byte (acamera_metadata_enum_android_scaler_cropping_type_t)
ACAMERA_SCALER_START + 13,
ACAMERA_SCALER_END,
@@ -2930,11 +3111,13 @@
* <p>Duration each pixel is exposed to
* light.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>If the sensor can't expose this exact duration, it will shorten the
* duration exposed to the nearest possible value (rather than expose longer).
@@ -2951,11 +3134,13 @@
* <p>Duration from start of frame exposure to
* start of next frame exposure.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The maximum frame rate that can be supported by a camera subsystem is
* a function of many factors:</p>
@@ -3037,11 +3222,13 @@
* <p>The amount of gain applied to sensor data
* before processing.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The sensitivity is the standard ISO sensitivity value,
* as defined in ISO 12232:2006.</p>
@@ -3072,10 +3259,12 @@
* @see ACAMERA_SENSOR_COLOR_TRANSFORM1
* @see ACAMERA_SENSOR_FORWARD_MATRIX1
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The values in this key correspond to the values defined for the
* EXIF LightSource tag. These illuminants are standard light sources
@@ -3092,7 +3281,7 @@
* @see ACAMERA_SENSOR_FORWARD_MATRIX1
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
*/
- ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 = // byte (enum)
+ ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 = // byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)
ACAMERA_SENSOR_START + 3,
/**
* <p>The standard reference illuminant used as the scene light source when
@@ -3104,10 +3293,12 @@
* @see ACAMERA_SENSOR_COLOR_TRANSFORM2
* @see ACAMERA_SENSOR_FORWARD_MATRIX2
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>See ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 for more details.</p>
* <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
@@ -3125,10 +3316,12 @@
* <p>A per-device calibration transform matrix that maps from the
* reference sensor colorspace to the actual device sensor colorspace.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to correct for per-device variations in the
* sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3148,10 +3341,12 @@
* reference sensor colorspace to the actual device sensor colorspace
* (this is the colorspace of the raw buffer data).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to correct for per-device variations in the
* sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3172,10 +3367,12 @@
* <p>A matrix that transforms color values from CIE XYZ color space to
* reference sensor color space.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert from the standard CIE XYZ color
* space to the reference sensor colorspace, and is used when processing
@@ -3198,10 +3395,12 @@
* <p>A matrix that transforms color values from CIE XYZ color space to
* reference sensor color space.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert from the standard CIE XYZ color
* space to the reference sensor colorspace, and is used when processing
@@ -3226,10 +3425,12 @@
* <p>A matrix that transforms white balanced camera colors from the reference
* sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
* is used when processing raw buffer data.</p>
@@ -3250,10 +3451,12 @@
* <p>A matrix that transforms white balanced camera colors from the reference
* sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
* is used when processing raw buffer data.</p>
@@ -3276,10 +3479,12 @@
* <p>A fixed black level offset for each of the color filter arrangement
* (CFA) mosaic channels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This key specifies the zero light value for each of the CFA mosaic
* channels in the camera sensor. The maximal value output by the
@@ -3310,10 +3515,12 @@
* <p>Maximum sensitivity that is implemented
* purely through analog gain.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>For ACAMERA_SENSOR_SENSITIVITY values less than or
* equal to this, all applied gain must be analog. For
@@ -3328,10 +3535,12 @@
* <p>Clockwise angle through which the output image needs to be rotated to be
* upright on the device screen in its native orientation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Also defines the direction of rolling shutter readout, which is from top to bottom in
* the sensor's coordinate system.</p>
@@ -3342,10 +3551,12 @@
* <p>Time at start of exposure of first
* row of the image sensor active array, in nanoseconds.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The timestamps are also included in all image
* buffers produced for the same capture, and will be identical
@@ -3374,10 +3585,12 @@
* <p>The estimated camera neutral color in the native sensor colorspace at
* the time of capture.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This value gives the neutral color point encoded as an RGB value in the
* native sensor color space. The neutral color point indicates the
@@ -3391,10 +3604,12 @@
/**
* <p>Noise model coefficients for each CFA mosaic channel.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: double[2*CFA Channels]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This key contains two noise model coefficients for each CFA channel
* corresponding to the sensor amplification (S) and sensor readout
@@ -3421,10 +3636,12 @@
/**
* <p>The worst-case divergence between Bayer green channels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This value is an estimate of the worst case split between the
* Bayer green channels in the red and blue rows in the sensor color
@@ -3465,11 +3682,13 @@
*
* @see ACAMERA_SENSOR_TEST_PATTERN_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Each color channel is treated as an unsigned 32-bit integer.
* The camera device then uses the most significant X bits
@@ -3484,11 +3703,13 @@
* <p>When enabled, the sensor sends a test pattern instead of
* doing a real exposure from the camera.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When a test pattern is enabled, all manual sensor controls specified
* by ACAMERA_SENSOR_* will be ignored. All other controls should
@@ -3498,7 +3719,7 @@
* would not actually affect it).</p>
* <p>Defaults to OFF.</p>
*/
- ACAMERA_SENSOR_TEST_PATTERN_MODE = // int32 (enum)
+ ACAMERA_SENSOR_TEST_PATTERN_MODE = // int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)
ACAMERA_SENSOR_START + 24,
/**
* <p>List of sensor test pattern modes for ACAMERA_SENSOR_TEST_PATTERN_MODE
@@ -3506,10 +3727,12 @@
*
* @see ACAMERA_SENSOR_TEST_PATTERN_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Defaults to OFF, and always includes OFF if defined.</p>
*/
@@ -3519,10 +3742,12 @@
* <p>Duration between the start of first row exposure
* and the start of last row exposure.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the exposure time skew between the first and last
* row exposure start times. The first row and the last row are
@@ -3539,10 +3764,12 @@
* <p>List of disjoint rectangles indicating the sensor
* optically shielded black pixel regions.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4*num_regions]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>In most camera sensors, the active array is surrounded by some
* optically shielded pixel areas. By blocking light, these pixels
@@ -3569,10 +3796,12 @@
* <p>A per-frame dynamic black level offset for each of the color filter
* arrangement (CFA) mosaic channels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Camera sensor black levels may vary dramatically for different
* capture settings (e.g. ACAMERA_SENSOR_SENSITIVITY). The fixed black
@@ -3610,10 +3839,12 @@
/**
* <p>Maximum raw value output by sensor for this frame.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Since the ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may change for different
* capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY), the white
@@ -3637,10 +3868,12 @@
* <p>The area of the image sensor which corresponds to active pixels after any geometric
* distortion correction has been applied.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the rectangle representing the size of the active region of the sensor (i.e.
* the region that actually receives light from the scene) after any geometric correction
@@ -3668,10 +3901,12 @@
*
* @see ACAMERA_SENSOR_SENSITIVITY
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The values are the standard ISO sensitivity values,
* as defined in ISO 12232:2006.</p>
@@ -3683,14 +3918,15 @@
* represents the colors in the top-left 2x2 section of
* the sensor, in reading order.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
- ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT = // byte (enum)
+ ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT = // byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)
ACAMERA_SENSOR_INFO_START + 2,
/**
* <p>The range of image exposure times for ACAMERA_SENSOR_EXPOSURE_TIME supported
@@ -3698,12 +3934,13 @@
*
* @see ACAMERA_SENSOR_EXPOSURE_TIME
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE = // int64[2]
ACAMERA_SENSOR_INFO_START + 3,
@@ -3713,10 +3950,12 @@
*
* @see ACAMERA_SENSOR_FRAME_DURATION
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Attempting to use frame durations beyond the maximum will result in the frame
* duration being clipped to the maximum. See that control for a full definition of frame
@@ -3731,10 +3970,12 @@
* <p>The physical dimensions of the full pixel
* array.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the physical size of the sensor pixel
* array defined by ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
@@ -3747,10 +3988,12 @@
* <p>Dimensions of the full pixel array, possibly
* including black calibration pixels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The pixel count of the full pixel array of the image sensor, which covers
* ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
@@ -3773,10 +4016,12 @@
/**
* <p>Maximum raw value output by sensor.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This specifies the fully-saturated encoding level for the raw
* sample values from the sensor. This is typically caused by the
@@ -3802,26 +4047,30 @@
/**
* <p>The time base source for sensor capture start timestamps.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
* may not based on a time source that can be compared to other system time sources.</p>
* <p>This characteristic defines the source for the timestamps, and therefore whether they
* can be compared against other system time sources/timestamps.</p>
*/
- ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE = // byte (enum)
+ ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE = // byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)
ACAMERA_SENSOR_INFO_START + 8,
/**
* <p>Whether the RAW images output from this camera device are subject to
* lens shading correction.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If TRUE, all images produced by the camera device in the RAW image formats will
* have lens shading correction already applied to it. If FALSE, the images will
@@ -3830,16 +4079,18 @@
* <p>This key will be <code>null</code> for all devices do not report this information.
* Devices with RAW capability will always report this information in this key.</p>
*/
- ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED = // byte (enum)
+ ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED = // byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)
ACAMERA_SENSOR_INFO_START + 9,
/**
* <p>The area of the image sensor which corresponds to active pixels prior to the
* application of any geometric distortion correction.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
* <p>This is the rectangle representing the size of the active region of the sensor (i.e.
@@ -3906,11 +4157,13 @@
* <p>Quality of lens shading correction applied
* to the image data.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_shading_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to OFF mode, no lens shading correction will be applied by the
* camera device, and an identity lens shading map data will be provided
@@ -3940,17 +4193,19 @@
* @see ACAMERA_CONTROL_AWB_MODE
* @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
*/
- ACAMERA_SHADING_MODE = // byte (enum)
+ ACAMERA_SHADING_MODE = // byte (acamera_metadata_enum_android_shading_mode_t)
ACAMERA_SHADING_START,
/**
* <p>List of lens shading modes for ACAMERA_SHADING_MODE that are supported by this camera device.</p>
*
* @see ACAMERA_SHADING_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains lens shading modes that can be set for the camera device.
* Camera devices that support the MANUAL_POST_PROCESSING capability will always
@@ -3965,41 +4220,47 @@
* <p>Operating mode for the face detector
* unit.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Whether face detection is enabled, and whether it
* should output just the basic fields or the full set of
* fields.</p>
*/
- ACAMERA_STATISTICS_FACE_DETECT_MODE = // byte (enum)
+ ACAMERA_STATISTICS_FACE_DETECT_MODE = // byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)
ACAMERA_STATISTICS_START,
/**
* <p>Operating mode for hot pixel map generation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>If set to <code>true</code>, a hot pixel map is returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.
* If set to <code>false</code>, no hot pixel map will be returned.</p>
*
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
*/
- ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE = // byte (enum)
+ ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE = // byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)
ACAMERA_STATISTICS_START + 3,
/**
* <p>List of unique IDs for detected faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Each detected face is given a unique ID that is valid for as long as the face is visible
* to the camera device. A face that leaves the field of view and later returns may be
@@ -4014,10 +4275,12 @@
* <p>List of landmarks for detected
* faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*6]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
* <code>(0, 0)</code> being the top-left pixel of the active array.</p>
@@ -4032,10 +4295,12 @@
* <p>List of the bounding rectangles for detected
* faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
* <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
@@ -4051,10 +4316,12 @@
* <p>List of the face confidence scores for
* detected faces</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.</p>
*
@@ -4067,10 +4334,12 @@
* that lists the coefficients used to correct for vignetting and color shading,
* for each Bayer color channel of RAW image data.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4*n*m]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The map provided here is the same map that is used by the camera device to
* correct both color shading and vignetting for output non-RAW images.</p>
@@ -4144,10 +4413,12 @@
* <p>The camera device estimated scene illumination lighting
* frequency.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_scene_flicker_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Many light sources, such as most fluorescent lights, flicker at a rate
* that depends on the local utility power standards. This flicker must be
@@ -4167,15 +4438,17 @@
* @see ACAMERA_CONTROL_AE_MODE
* @see ACAMERA_CONTROL_MODE
*/
- ACAMERA_STATISTICS_SCENE_FLICKER = // byte (enum)
+ ACAMERA_STATISTICS_SCENE_FLICKER = // byte (acamera_metadata_enum_android_statistics_scene_flicker_t)
ACAMERA_STATISTICS_START + 14,
/**
* <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
* <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
@@ -4193,11 +4466,13 @@
* <p>Whether the camera device will output the lens
* shading map in output result metadata.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to ON,
* ACAMERA_STATISTICS_LENS_SHADING_MAP will be provided in
@@ -4206,7 +4481,7 @@
*
* @see ACAMERA_STATISTICS_LENS_SHADING_MAP
*/
- ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE = // byte (enum)
+ ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE = // byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)
ACAMERA_STATISTICS_START + 16,
ACAMERA_STATISTICS_END,
@@ -4216,10 +4491,12 @@
*
* @see ACAMERA_STATISTICS_FACE_DETECT_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>OFF is always supported.</p>
*/
@@ -4229,12 +4506,13 @@
* <p>The maximum number of simultaneously detectable
* faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT = // int32
ACAMERA_STATISTICS_INFO_START + 2,
@@ -4244,10 +4522,12 @@
*
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If no hotpixel map output is available for this camera device, this will contain only
* <code>false</code>.</p>
@@ -4261,10 +4541,12 @@
*
* @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If no lens shading map output is available for this camera device, this key will
* contain only OFF.</p>
@@ -4282,11 +4564,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n*2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
*
@@ -4301,11 +4585,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n*2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
*
@@ -4320,11 +4606,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n*2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Each channel's curve is defined by an array of control points:</p>
* <pre><code>ACAMERA_TONEMAP_CURVE_RED =
@@ -4375,11 +4663,13 @@
/**
* <p>High-level global contrast/gamma/tonemapping control.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_tonemap_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When switching to an application-defined contrast curve by setting
* ACAMERA_TONEMAP_MODE to CONTRAST_CURVE, the curve is defined
@@ -4402,16 +4692,18 @@
*
* @see ACAMERA_TONEMAP_MODE
*/
- ACAMERA_TONEMAP_MODE = // byte (enum)
+ ACAMERA_TONEMAP_MODE = // byte (acamera_metadata_enum_android_tonemap_mode_t)
ACAMERA_TONEMAP_START + 3,
/**
* <p>Maximum number of supported points in the
* tonemap curve that can be used for android.tonemap.curve.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the actual number of points provided by the application (in ACAMERA_TONEMAPCURVE_*) is
* less than this maximum, the camera device will resample the curve to its internal
@@ -4428,10 +4720,12 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
* at least one of below mode combinations:</p>
@@ -4449,11 +4743,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The tonemap curve will be defined the following formula:
* * OUT = pow(IN, 1.0 / gamma)
@@ -4474,11 +4770,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_tonemap_preset_curve_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The tonemap curve will be defined by specified standard.</p>
* <p>sRGB (approximated by 16 control points):</p>
@@ -4488,17 +4786,19 @@
* <p>Note that above figures show a 16 control points approximation of preset
* curves. Camera devices may apply a different approximation to the curve.</p>
*/
- ACAMERA_TONEMAP_PRESET_CURVE = // byte (enum)
+ ACAMERA_TONEMAP_PRESET_CURVE = // byte (acamera_metadata_enum_android_tonemap_preset_curve_t)
ACAMERA_TONEMAP_START + 7,
ACAMERA_TONEMAP_END,
/**
* <p>Generally classifies the overall set of the camera device functionality.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_info_supported_hardware_level_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The supported hardware level is a high-level description of the camera device's
* capabilities, summarizing several capabilities into one field. Each level adds additional
@@ -4551,7 +4851,7 @@
* @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
* @see ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
*/
- ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (enum)
+ ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
ACAMERA_INFO_START,
ACAMERA_INFO_END,
@@ -4559,11 +4859,13 @@
* <p>Whether black-level compensation is locked
* to its current values, or is free to vary.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_black_level_lock_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Whether the black level offset was locked for this frame. Should be
* ON if ACAMERA_BLACK_LEVEL_LOCK was ON in the capture request, unless
@@ -4572,7 +4874,7 @@
*
* @see ACAMERA_BLACK_LEVEL_LOCK
*/
- ACAMERA_BLACK_LEVEL_LOCK = // byte (enum)
+ ACAMERA_BLACK_LEVEL_LOCK = // byte (acamera_metadata_enum_android_black_level_lock_t)
ACAMERA_BLACK_LEVEL_START,
ACAMERA_BLACK_LEVEL_END,
@@ -4581,10 +4883,12 @@
* with which the output result (metadata + buffers) has been fully
* synchronized.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64 (acamera_metadata_enum_android_sync_frame_number_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>When a request is submitted to the camera device, there is usually a
* delay of several frames before the controls get applied. A camera
@@ -4638,17 +4942,19 @@
* @see ACAMERA_REQUEST_PIPELINE_MAX_DEPTH
* @see ACAMERA_SYNC_FRAME_NUMBER
*/
- ACAMERA_SYNC_FRAME_NUMBER = // int64 (enum)
+ ACAMERA_SYNC_FRAME_NUMBER = // int64 (acamera_metadata_enum_android_sync_frame_number_t)
ACAMERA_SYNC_START,
/**
* <p>The maximum number of frames that can occur after a request
* (different than the previous) has been submitted, and before the
* result's state becomes synchronized.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32 (acamera_metadata_enum_android_sync_max_latency_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This defines the maximum distance (in number of metadata results),
* between the frame number of the request that has new controls to apply
@@ -4657,7 +4963,7 @@
* must occur before the camera device knows for a fact that the new
* submitted camera settings have been applied in outgoing frames.</p>
*/
- ACAMERA_SYNC_MAX_LATENCY = // int32 (enum)
+ ACAMERA_SYNC_MAX_LATENCY = // int32 (acamera_metadata_enum_android_sync_max_latency_t)
ACAMERA_SYNC_START + 1,
ACAMERA_SYNC_END,
@@ -4666,10 +4972,12 @@
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>These are output stream configurations for use with
* dataSpace HAL_DATASPACE_DEPTH. The configurations are
@@ -4683,16 +4991,18 @@
* android.depth.maxDepthSamples, 1, OUTPUT)</code> in addition to
* the entries for HAL_PIXEL_FORMAT_Y16.</p>
*/
- ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
+ ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)
ACAMERA_DEPTH_START + 1,
/**
* <p>This lists the minimum frame duration for each
* format/size combination for depth output formats.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This should correspond to the frame duration when only that
* stream is active, with all processing (typically in android.*.mode)
@@ -4714,10 +5024,12 @@
* <p>This lists the maximum stall duration for each
* output format/size combination for depth streams.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A stall duration is how much extra time would get added
* to the normal minimum frame duration for a repeating request
@@ -4737,10 +5049,12 @@
* DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
* YUV_420_888, JPEG, or RAW) simultaneously.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If TRUE, including both depth and color outputs in a single
* capture request is not supported. An application must interleave color
@@ -4751,7 +5065,7 @@
* measure depth values, which causes the color image to be
* corrupted during depth measurement.</p>
*/
- ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE = // byte (enum)
+ ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE = // byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)
ACAMERA_DEPTH_START + 4,
ACAMERA_DEPTH_END,
@@ -6966,6 +7280,7 @@
} acamera_metadata_enum_android_depth_depth_is_exclusive_t;
+
#endif /* __ANDROID_API__ >= 24 */
__END_DECLS
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 3150e3c..bc37557 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -257,6 +257,11 @@
plugin = hPlugin;
}
);
+
+ if (!hResult.isOk()) {
+ ALOGE("createPlugin remote call failed");
+ }
+
return plugin;
}
@@ -408,8 +413,11 @@
if (mPlugin == NULL) {
mInitCheck = ERROR_UNSUPPORTED;
} else {
- mInitCheck = OK;
- mPlugin->setListener(this);
+ if (!mPlugin->setListener(this).isOk()) {
+ mInitCheck = DEAD_OBJECT;
+ } else {
+ mInitCheck = OK;
+ }
}
return mInitCheck;
@@ -424,12 +432,14 @@
closeOpenSessions();
reportMetrics();
setListener(NULL);
- if (mPlugin != NULL) {
- mPlugin->setListener(NULL);
- }
- mPlugin.clear();
mInitCheck = NO_INIT;
+ if (mPlugin != NULL) {
+ if (!mPlugin->setListener(NULL).isOk()) {
+ mInitCheck = DEAD_OBJECT;
+ }
+ }
+ mPlugin.clear();
return OK;
}
@@ -486,18 +496,21 @@
return mInitCheck;
}
- Status status = mPlugin->closeSession(toHidlVec(sessionId));
- if (status == Status::OK) {
- DrmSessionManager::Instance()->removeSession(sessionId);
- for (size_t i = 0; i < mOpenSessions.size(); i++) {
- if (mOpenSessions[i] == sessionId) {
- mOpenSessions.removeAt(i);
- break;
+ Return<Status> status = mPlugin->closeSession(toHidlVec(sessionId));
+ if (status.isOk()) {
+ if (status == Status::OK) {
+ DrmSessionManager::Instance()->removeSession(sessionId);
+ for (size_t i = 0; i < mOpenSessions.size(); i++) {
+ if (mOpenSessions[i] == sessionId) {
+ mOpenSessions.removeAt(i);
+ break;
+ }
}
}
+ reportMetrics();
+ return toStatusT(status);
}
- reportMetrics();
- return toStatusT(status);
+ return DEAD_OBJECT;
}
status_t DrmHal::getKeyRequest(Vector<uint8_t> const &sessionId,
@@ -997,11 +1010,14 @@
Mutex::Autolock autoLock(mLock);
closeOpenSessions();
setListener(NULL);
+ mInitCheck = NO_INIT;
+
if (mPlugin != NULL) {
- mPlugin->setListener(NULL);
+ if (!mPlugin->setListener(NULL).isOk()) {
+ mInitCheck = DEAD_OBJECT;
+ }
}
mPlugin.clear();
- mInitCheck = NO_INIT;
}
void DrmHal::writeByteArray(Parcel &obj, hidl_vec<uint8_t> const &vec)
diff --git a/media/libaaudio/examples/input_monitor/src/input_monitor.cpp b/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
index edf644a..2dfd0a7 100644
--- a/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
+++ b/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
@@ -27,9 +27,11 @@
#include "AAudioSimpleRecorder.h"
// TODO support FLOAT
-#define REQUIRED_FORMAT AAUDIO_FORMAT_PCM_I16
+#define REQUIRED_FORMAT AAUDIO_FORMAT_PCM_I16
#define MIN_FRAMES_TO_READ 48 /* arbitrary, 1 msec at 48000 Hz */
+static const int FRAMES_PER_LINE = 20000;
+
int main(int argc, const char **argv)
{
AAudioArgsParser argParser;
@@ -46,7 +48,10 @@
int32_t framesPerRead = 0;
int32_t framesToRecord = 0;
int32_t framesLeft = 0;
+ int32_t nextFrameCount = 0;
+ int32_t frameCount = 0;
int32_t xRunCount = 0;
+ int64_t previousFramePosition = -1;
int16_t *data = nullptr;
float peakLevel = 0.0;
int loopCounter = 0;
@@ -56,7 +61,7 @@
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
- printf("%s - Monitor input level using AAudio\n", argv[0]);
+ printf("%s - Monitor input level using AAudio V0.1.1\n", argv[0]);
argParser.setFormat(REQUIRED_FORMAT);
if (argParser.parseArgs(argc, argv)) {
@@ -133,6 +138,7 @@
goto finish;
}
framesLeft -= actual;
+ frameCount += actual;
// Peak finder.
for (int frameIndex = 0; frameIndex < actual; frameIndex++) {
@@ -143,9 +149,36 @@
}
// Display level as stars, eg. "******".
- if ((loopCounter++ % 10) == 0) {
+ if (frameCount > nextFrameCount) {
displayPeakLevel(peakLevel);
peakLevel = 0.0;
+ nextFrameCount += FRAMES_PER_LINE;
+ }
+
+ // Print timestamps.
+ int64_t framePosition = 0;
+ int64_t frameTime = 0;
+ aaudio_result_t timeResult;
+ timeResult = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+ &framePosition, &frameTime);
+
+ if (timeResult == AAUDIO_OK) {
+ if (framePosition > (previousFramePosition + FRAMES_PER_LINE)) {
+ int64_t realTime = getNanoseconds();
+ int64_t framesRead = AAudioStream_getFramesRead(aaudioStream);
+
+ double latencyMillis = calculateLatencyMillis(framesRead, realTime,
+ framePosition, frameTime,
+ actualSampleRate);
+
+ printf("--- timestamp: result = %4d, position = %lld, at %lld nanos"
+ ", latency = %7.2f msec\n",
+ timeResult,
+ (long long) framePosition,
+ (long long) frameTime,
+ latencyMillis);
+ previousFramePosition = framePosition;
+ }
}
}
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 144c941..df0df04 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -417,11 +417,18 @@
sleep(1);
printf("%4d: ", i);
loopbackData.loopbackProcessor->printStatus();
- int64_t framesWritten = AAudioStream_getFramesWritten(loopbackData.inputStream);
- int64_t framesRead = AAudioStream_getFramesRead(loopbackData.inputStream);
- printf(" input written = %lld, read %lld, xruns = %d\n",
- (long long) framesWritten,
- (long long) framesRead,
+
+ int64_t inputFramesWritten = AAudioStream_getFramesWritten(loopbackData.inputStream);
+ int64_t inputFramesRead = AAudioStream_getFramesRead(loopbackData.inputStream);
+ int64_t outputFramesWritten = AAudioStream_getFramesWritten(outputStream);
+ int64_t outputFramesRead = AAudioStream_getFramesRead(outputStream);
+ printf(" INPUT: wr %lld rd %lld state %s, OUTPUT: wr %lld rd %lld state %s, xruns %d\n",
+ (long long) inputFramesWritten,
+ (long long) inputFramesRead,
+ AAudio_convertStreamStateToText(AAudioStream_getState(loopbackData.inputStream)),
+ (long long) outputFramesWritten,
+ (long long) outputFramesRead,
+ AAudio_convertStreamStateToText(AAudioStream_getState(outputStream)),
AAudioStream_getXRunCount(outputStream)
);
}
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index 46bc99e..30c3ccd 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -24,7 +24,8 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
-#include <AAudioExampleUtils.h>
+
+#include "AAudioExampleUtils.h"
// TODO use this as a base class within AAudio
class AAudioParameters {
@@ -239,7 +240,7 @@
* Print stream parameters in comparison with requested values.
* @param stream
*/
- void compareWithStream(AAudioStream *stream) {
+ void compareWithStream(AAudioStream *stream) const {
printf(" DeviceId: requested = %d, actual = %d\n",
getDeviceId(), AAudioStream_getDeviceId(stream));
diff --git a/media/libaaudio/examples/utils/AAudioExampleUtils.h b/media/libaaudio/examples/utils/AAudioExampleUtils.h
index 66de25f..6cbcc58 100644
--- a/media/libaaudio/examples/utils/AAudioExampleUtils.h
+++ b/media/libaaudio/examples/utils/AAudioExampleUtils.h
@@ -25,7 +25,7 @@
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
-static const char *getSharingModeText(aaudio_sharing_mode_t mode) {
+const char *getSharingModeText(aaudio_sharing_mode_t mode) {
const char *modeText = "unknown";
switch (mode) {
case AAUDIO_SHARING_MODE_EXCLUSIVE:
@@ -49,7 +49,7 @@
return (time.tv_sec * NANOS_PER_SECOND) + time.tv_nsec;
}
-void displayPeakLevel(float peakLevel) {
+static void displayPeakLevel(float peakLevel) {
printf("%5.3f ", peakLevel);
const int maxStars = 50; // arbitrary, fits on one line
int numStars = (int) (peakLevel * maxStars);
@@ -59,4 +59,24 @@
printf("\n");
}
+/**
+ * @param position1 position of hardware frame
+ * @param nanoseconds1
+ * @param position2 position of client read/write
+ * @param nanoseconds2
+ * @param sampleRate
+ * @return latency in milliseconds
+ */
+static double calculateLatencyMillis(int64_t position1, int64_t nanoseconds1,
+ int64_t position2, int64_t nanoseconds2,
+ int64_t sampleRate) {
+ int64_t deltaFrames = position2 - position1;
+ int64_t deltaTime =
+ (NANOS_PER_SECOND * deltaFrames / sampleRate);
+ int64_t timeCurrentFramePlayed = nanoseconds1 + deltaTime;
+ int64_t latencyNanos = timeCurrentFramePlayed - nanoseconds2;
+ double latencyMillis = latencyNanos / 1000000.0;
+ return latencyMillis;
+}
+
#endif // AAUDIO_EXAMPLE_UTILS_H
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 30fbdd6..3c23736 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -234,6 +234,15 @@
int32_t channelCount);
/**
+ * Identical to AAudioStreamBuilder_setChannelCount().
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param samplesPerFrame Number of samples in a frame.
+ */
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+ int32_t samplesPerFrame);
+
+/**
* Request a sample data format, for example AAUDIO_FORMAT_PCM_I16.
*
* The default, if you do not call this function, is AAUDIO_UNSPECIFIED.
@@ -721,6 +730,14 @@
AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream);
/**
+ * Identical to AAudioStream_getChannelCount().
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual number of samples frame
+ */
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream);
+
+/**
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return actual device ID
*/
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index b9012e5..2ba5250 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -11,6 +11,7 @@
AAudioStreamBuilder_setErrorCallback;
AAudioStreamBuilder_setFramesPerDataCallback;
AAudioStreamBuilder_setSampleRate;
+ AAudioStreamBuilder_setSamplesPerFrame;
AAudioStreamBuilder_setChannelCount;
AAudioStreamBuilder_setFormat;
AAudioStreamBuilder_setSharingMode;
@@ -34,6 +35,7 @@
AAudioStream_getBufferCapacityInFrames;
AAudioStream_getXRunCount;
AAudioStream_getSampleRate;
+ AAudioStream_getSamplesPerFrame;
AAudioStream_getChannelCount;
AAudioStream_getPerformanceMode;
AAudioStream_getDeviceId;
diff --git a/media/libaaudio/src/binding/AAudioServiceMessage.h b/media/libaaudio/src/binding/AAudioServiceMessage.h
index b4377fb..54e8001 100644
--- a/media/libaaudio/src/binding/AAudioServiceMessage.h
+++ b/media/libaaudio/src/binding/AAudioServiceMessage.h
@@ -28,7 +28,6 @@
// Used to send information about the HAL to the client.
struct AAudioMessageTimestamp {
int64_t position; // number of frames transferred so far
- int64_t deviceOffset; // add to client position to get device position
int64_t timestamp; // time when that position was reached
};
@@ -51,7 +50,8 @@
typedef struct AAudioServiceMessage_s {
enum class code : uint32_t {
NOTHING,
- TIMESTAMP,
+ TIMESTAMP_SERVICE, // when frame is read or written by the service to the client
+ TIMESTAMP_HARDWARE, // when frame is at DAC or ADC
EVENT,
};
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 899eb04..b582b99 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -64,9 +64,9 @@
if (mSizeInBytes > 0) {
// Keep the original FD until you are done with the mFd.
// If you close it in here then it will prevent mFd from working.
- mOriginalFd = parcel->readFileDescriptor();
- ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mOriginalFd = %d\n", mOriginalFd);
- mFd = fcntl(mOriginalFd, F_DUPFD_CLOEXEC, 0);
+ int originalFd = parcel->readFileDescriptor();
+ ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? originalFd = %d\n", originalFd);
+ mFd = fcntl(originalFd, F_DUPFD_CLOEXEC, 0);
ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mFd = %d\n", mFd);
if (mFd == -1) {
status = -errno;
@@ -87,14 +87,13 @@
}
if (mFd != -1) {
ALOGV("SharedMemoryParcelable::close() LEAK? mFd = %d\n", mFd);
- ::close(mFd);
+ if(::close(mFd) < 0) {
+ int err = errno;
+ ALOGE("SharedMemoryParcelable close failed for fd = %d, errno = %d (%s)",
+ mFd, err, strerror(err));
+ }
mFd = -1;
}
- if (mOriginalFd != -1) {
- ALOGV("SharedMemoryParcelable::close() LEAK? mOriginalFd = %d\n", mOriginalFd);
- ::close(mOriginalFd);
- mOriginalFd = -1;
- }
return AAUDIO_OK;
}
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4b94b46..c5107d3 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -68,7 +68,6 @@
#define MMAP_UNRESOLVED_ADDRESS reinterpret_cast<uint8_t*>(MAP_FAILED)
int mFd = -1;
- int mOriginalFd = -1;
int32_t mSizeInBytes = 0;
uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
};
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 6ec285f..6ae5379 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -254,3 +254,7 @@
ALOGD("AudioEndpoint: data readCounter = %lld", (long long) mDataQueue->getReadCounter());
ALOGD("AudioEndpoint: data writeCounter = %lld", (long long) mDataQueue->getWriteCounter());
}
+
+void AudioEndpoint::eraseDataMemory() {
+ mDataQueue->eraseMemory();
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 81a4f7b..f5b67e8 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -86,6 +86,11 @@
int32_t getBufferCapacityInFrames() const;
+ /**
+ * Write zeros to the data queue memory.
+ */
+ void eraseDataMemory();
+
void dump() const;
private:
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 4c7d0f7..41d4909 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -68,6 +68,7 @@
, mServiceInterface(serviceInterface)
, mWakeupDelayNanos(AAudioProperty_getWakeupDelayMicros() * AAUDIO_NANOS_PER_MICROSECOND)
, mMinimumSleepNanos(AAudioProperty_getMinimumSleepMicros() * AAUDIO_NANOS_PER_MICROSECOND)
+ , mAtomicTimestamp()
{
ALOGD("AudioStreamInternal(): mWakeupDelayNanos = %d, mMinimumSleepNanos = %d",
mWakeupDelayNanos, mMinimumSleepNanos);
@@ -240,9 +241,11 @@
int64_t startTime;
ALOGD("AudioStreamInternal()::requestStart()");
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ ALOGE("AudioStreamInternal::requestStart() mServiceStreamHandle invalid");
return AAUDIO_ERROR_INVALID_STATE;
}
if (isActive()) {
+ ALOGE("AudioStreamInternal::requestStart() already active");
return AAUDIO_ERROR_INVALID_STATE;
}
aaudio_stream_state_t originalState = getState();
@@ -319,6 +322,7 @@
aaudio_result_t AudioStreamInternal::registerThread() {
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ ALOGE("AudioStreamInternal::registerThread() mServiceStreamHandle invalid");
return AAUDIO_ERROR_INVALID_STATE;
}
return mServiceInterface.registerAudioThread(mServiceStreamHandle,
@@ -328,6 +332,7 @@
aaudio_result_t AudioStreamInternal::unregisterThread() {
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ ALOGE("AudioStreamInternal::unregisterThread() mServiceStreamHandle invalid");
return AAUDIO_ERROR_INVALID_STATE;
}
return mServiceInterface.unregisterAudioThread(mServiceStreamHandle, gettid());
@@ -351,15 +356,18 @@
aaudio_result_t AudioStreamInternal::getTimestamp(clockid_t clockId,
int64_t *framePosition,
int64_t *timeNanoseconds) {
- // TODO Generate in server and pass to client. Return latest.
- int64_t time = AudioClock::getNanoseconds();
- *framePosition = mClockModel.convertTimeToPosition(time) + mFramesOffsetFromService;
- // TODO Get a more accurate timestamp from the service. This code just adds a fudge factor.
- *timeNanoseconds = time + (6 * AAUDIO_NANOS_PER_MILLISECOND);
- return AAUDIO_OK;
+ // Generated in server and passed to client. Return latest.
+ if (mAtomicTimestamp.isValid()) {
+ Timestamp timestamp = mAtomicTimestamp.read();
+ *framePosition = timestamp.getPosition();
+ *timeNanoseconds = timestamp.getNanoseconds();
+ return AAUDIO_OK;
+ } else {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
}
-aaudio_result_t AudioStreamInternal::updateStateWhileWaiting() {
+aaudio_result_t AudioStreamInternal::updateStateMachine() {
if (isDataCallbackActive()) {
return AAUDIO_OK; // state is getting updated by the callback thread read/write call
}
@@ -385,7 +393,7 @@
oldTime = nanoTime;
}
-aaudio_result_t AudioStreamInternal::onTimestampFromServer(AAudioServiceMessage *message) {
+aaudio_result_t AudioStreamInternal::onTimestampService(AAudioServiceMessage *message) {
#if LOG_TIMESTAMPS
logTimestamp(*message);
#endif
@@ -393,23 +401,29 @@
return AAUDIO_OK;
}
+aaudio_result_t AudioStreamInternal::onTimestampHardware(AAudioServiceMessage *message) {
+ Timestamp timestamp(message->timestamp.position, message->timestamp.timestamp);
+ mAtomicTimestamp.write(timestamp);
+ return AAUDIO_OK;
+}
+
aaudio_result_t AudioStreamInternal::onEventFromServer(AAudioServiceMessage *message) {
aaudio_result_t result = AAUDIO_OK;
switch (message->event.event) {
case AAUDIO_SERVICE_EVENT_STARTED:
- ALOGD("AudioStreamInternal::onEventFromServergot() AAUDIO_SERVICE_EVENT_STARTED");
+ ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_STARTED");
if (getState() == AAUDIO_STREAM_STATE_STARTING) {
setState(AAUDIO_STREAM_STATE_STARTED);
}
break;
case AAUDIO_SERVICE_EVENT_PAUSED:
- ALOGD("AudioStreamInternal::onEventFromServergot() AAUDIO_SERVICE_EVENT_PAUSED");
+ ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_PAUSED");
if (getState() == AAUDIO_STREAM_STATE_PAUSING) {
setState(AAUDIO_STREAM_STATE_PAUSED);
}
break;
case AAUDIO_SERVICE_EVENT_STOPPED:
- ALOGD("AudioStreamInternal::onEventFromServergot() AAUDIO_SERVICE_EVENT_STOPPED");
+ ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_STOPPED");
if (getState() == AAUDIO_STREAM_STATE_STOPPING) {
setState(AAUDIO_STREAM_STATE_STOPPED);
}
@@ -426,10 +440,14 @@
setState(AAUDIO_STREAM_STATE_CLOSED);
break;
case AAUDIO_SERVICE_EVENT_DISCONNECTED:
+ // Prevent hardware from looping on old data and making buzzing sounds.
+ if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+ mAudioEndpoint.eraseDataMemory();
+ }
result = AAUDIO_ERROR_DISCONNECTED;
setState(AAUDIO_STREAM_STATE_DISCONNECTED);
ALOGW("WARNING - AudioStreamInternal::onEventFromServer()"
- " AAUDIO_SERVICE_EVENT_DISCONNECTED");
+ " AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared");
break;
case AAUDIO_SERVICE_EVENT_VOLUME:
mStreamVolume = (float)message->event.dataDouble;
@@ -456,8 +474,12 @@
break; // no command this time, no problem
}
switch (message.what) {
- case AAudioServiceMessage::code::TIMESTAMP:
- result = onTimestampFromServer(&message);
+ case AAudioServiceMessage::code::TIMESTAMP_SERVICE:
+ result = onTimestampService(&message);
+ break;
+
+ case AAudioServiceMessage::code::TIMESTAMP_HARDWARE:
+ result = onTimestampHardware(&message);
break;
case AAudioServiceMessage::code::EVENT:
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 1b991de..13cf16c 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -50,7 +50,7 @@
int64_t *framePosition,
int64_t *timeNanoseconds) override;
- virtual aaudio_result_t updateStateWhileWaiting() override;
+ virtual aaudio_result_t updateStateMachine() override;
aaudio_result_t open(const AudioStreamBuilder &builder) override;
@@ -122,7 +122,9 @@
aaudio_result_t onEventFromServer(AAudioServiceMessage *message);
- aaudio_result_t onTimestampFromServer(AAudioServiceMessage *message);
+ aaudio_result_t onTimestampService(AAudioServiceMessage *message);
+
+ aaudio_result_t onTimestampHardware(AAudioServiceMessage *message);
void logTimestamp(AAudioServiceMessage &message);
@@ -181,6 +183,11 @@
AudioEndpointParcelable mEndPointParcelable; // description of the buffers filled by service
EndpointDescriptor mEndpointDescriptor; // buffer description with resolved addresses
+
+ SimpleDoubleBuffer<Timestamp> mAtomicTimestamp;
+
+ int64_t mServiceLatencyNanos = 0;
+
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index ca42444..5089b00 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -101,7 +101,6 @@
*/
static aaudio_policy_t s_MMapPolicy = AAUDIO_UNSPECIFIED;
-
static AudioStream *convertAAudioStreamToAudioStream(AAudioStream* stream)
{
return (AudioStream*) stream;
@@ -144,12 +143,18 @@
}
AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* builder,
- int32_t channelCount)
+ int32_t channelCount)
{
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
streamBuilder->setSamplesPerFrame(channelCount);
}
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+ int32_t channelCount)
+{
+ AAudioStreamBuilder_setChannelCount(builder, channelCount);
+}
+
AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* builder,
aaudio_direction_t direction)
{
@@ -248,7 +253,7 @@
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
ALOGD("AAudioStream_requestStart(%p) called --------------", stream);
aaudio_result_t result = audioStream->requestStart();
- ALOGD("AAudioStream_requestStart(%p) returned ------------", stream);
+ ALOGD("AAudioStream_requestStart(%p) returned %d ---------", stream, result);
return result;
}
@@ -350,6 +355,11 @@
return audioStream->getSamplesPerFrame();
}
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream)
+{
+ return AAudioStream_getChannelCount(stream);
+}
+
AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 4859c69..4f1cc37 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -98,7 +98,7 @@
aaudio_stream_state_t *nextState,
int64_t timeoutNanoseconds)
{
- aaudio_result_t result = updateStateWhileWaiting();
+ aaudio_result_t result = updateStateMachine();
if (result != AAUDIO_OK) {
return result;
}
@@ -112,7 +112,7 @@
AudioClock::sleepForNanos(durationNanos);
timeoutNanoseconds -= durationNanos;
- aaudio_result_t result = updateStateWhileWaiting();
+ aaudio_result_t result = updateStateMachine();
if (result != AAUDIO_OK) {
return result;
}
@@ -153,6 +153,7 @@
void* threadArg)
{
if (mHasThread) {
+ ALOGE("AudioStream::createThread() - mHasThread already true");
return AAUDIO_ERROR_INVALID_STATE;
}
if (threadProc == nullptr) {
@@ -174,6 +175,7 @@
aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds)
{
if (!mHasThread) {
+ ALOGE("AudioStream::joinThread() - but has no thread");
return AAUDIO_ERROR_INVALID_STATE;
}
#if 0
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index e5fdcc6..ad18751 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -68,10 +68,10 @@
/**
- * Update state while in the middle of waitForStateChange()
+ * Update state machine.()
* @return
*/
- virtual aaudio_result_t updateStateWhileWaiting() = 0;
+ virtual aaudio_result_t updateStateMachine() = 0;
// =========== End ABSTRACT methods ===========================
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 8d2c62d..a869886 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -210,3 +210,9 @@
return mFifo->getCapacity();
}
+void FifoBuffer::eraseMemory() {
+ int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
+ if (numBytes > 0) {
+ memset(mStorage, 0, (size_t) numBytes);
+ }
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index a94e9b0..f5a9e27 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -111,6 +111,11 @@
mFifo->setWriteCounter(n);
}
+ /*
+ * This is generally only called before or after the buffer is used.
+ */
+ void eraseMemory();
+
private:
void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dd5e3c0..2816bac 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -94,12 +94,15 @@
} else {
audioBuffer->size = 0;
}
- break;
+
+ if (updateStateMachine() == AAUDIO_OK) {
+ break; // don't fall through
+ }
}
}
/// FALL THROUGH
- // Stream got rerouted so we disconnect.
+ // Stream got rerouted so we disconnect.
case AAUDIO_CALLBACK_OPERATION_DISCONNECTED: {
setState(AAUDIO_STREAM_STATE_DISCONNECTED);
ALOGD("processCallbackCommon() stream disconnected");
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 8e8070c..041280d 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -159,6 +159,9 @@
actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
}
setPerformanceMode(actualPerformanceMode);
+
+ setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
+
// Log warning if we did not get what we asked for.
ALOGW_IF(actualFlags != flags,
"AudioStreamRecord::open() flags changed from 0x%08X to 0x%08X",
@@ -207,7 +210,7 @@
if (mAudioRecord.get() == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
}
- // Get current position so we can detect when the track is playing.
+ // Get current position so we can detect when the track is recording.
status_t err = mAudioRecord->getPosition(&mPositionWhenStarting);
if (err != OK) {
return AAudioConvert_androidToAAudioResult(err);
@@ -235,7 +238,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamRecord::updateStateWhileWaiting()
+aaudio_result_t AudioStreamRecord::updateStateMachine()
{
aaudio_result_t result = AAUDIO_OK;
aaudio_wrapping_frames_t position;
@@ -292,6 +295,12 @@
}
int32_t framesRead = (int32_t)(bytesRead / bytesPerFrame);
incrementFramesRead(framesRead);
+
+ result = updateStateMachine();
+ if (result != AAUDIO_OK) {
+ return result;
+ }
+
return (aaudio_result_t) framesRead;
}
@@ -330,3 +339,21 @@
}
return getBestTimestamp(clockId, framePosition, timeNanoseconds, &extendedTimestamp);
}
+
+int64_t AudioStreamRecord::getFramesWritten() {
+ aaudio_wrapping_frames_t position;
+ status_t result;
+ switch (getState()) {
+ case AAUDIO_STREAM_STATE_STARTING:
+ case AAUDIO_STREAM_STATE_STARTED:
+ case AAUDIO_STREAM_STATE_STOPPING:
+ result = mAudioRecord->getPosition(&position);
+ if (result == OK) {
+ mFramesWritten.update32(position);
+ }
+ break;
+ default:
+ break;
+ }
+ return AudioStreamLegacy::getFramesWritten();
+}
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 2c6a7eb..c1723ba 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -59,9 +59,11 @@
int32_t getXRunCount() const override;
+ int64_t getFramesWritten() override;
+
int32_t getFramesPerBurst() const override;
- aaudio_result_t updateStateWhileWaiting() override;
+ aaudio_result_t updateStateMachine() override;
aaudio_direction_t getDirection() const override {
return AAUDIO_DIRECTION_INPUT;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 77f31e2..51440d6 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -183,6 +183,9 @@
actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
}
setPerformanceMode(actualPerformanceMode);
+
+ setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
+
// Log warning if we did not get what we asked for.
ALOGW_IF(actualFlags != flags,
"AudioStreamTrack::open() flags changed from 0x%08X to 0x%08X",
@@ -227,6 +230,7 @@
std::lock_guard<std::mutex> lock(mStreamMutex);
if (mAudioTrack.get() == nullptr) {
+ ALOGE("AudioStreamTrack::requestStart() no AudioTrack");
return AAUDIO_ERROR_INVALID_STATE;
}
// Get current position so we can detect when the track is playing.
@@ -250,6 +254,7 @@
std::lock_guard<std::mutex> lock(mStreamMutex);
if (mAudioTrack.get() == nullptr) {
+ ALOGE("AudioStreamTrack::requestPause() no AudioTrack");
return AAUDIO_ERROR_INVALID_STATE;
} else if (getState() != AAUDIO_STREAM_STATE_STARTING
&& getState() != AAUDIO_STREAM_STATE_STARTED) {
@@ -271,8 +276,10 @@
std::lock_guard<std::mutex> lock(mStreamMutex);
if (mAudioTrack.get() == nullptr) {
+ ALOGE("AudioStreamTrack::requestFlush() no AudioTrack");
return AAUDIO_ERROR_INVALID_STATE;
} else if (getState() != AAUDIO_STREAM_STATE_PAUSED) {
+ ALOGE("AudioStreamTrack::requestFlush() not paused");
return AAUDIO_ERROR_INVALID_STATE;
}
setState(AAUDIO_STREAM_STATE_FLUSHING);
@@ -286,6 +293,7 @@
std::lock_guard<std::mutex> lock(mStreamMutex);
if (mAudioTrack.get() == nullptr) {
+ ALOGE("AudioStreamTrack::requestStop() no AudioTrack");
return AAUDIO_ERROR_INVALID_STATE;
}
onStop();
@@ -296,7 +304,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::updateStateWhileWaiting()
+aaudio_result_t AudioStreamTrack::updateStateMachine()
{
status_t err;
aaudio_wrapping_frames_t position;
@@ -373,6 +381,12 @@
}
int32_t framesWritten = (int32_t)(bytesWritten / bytesPerFrame);
incrementFramesWritten(framesWritten);
+
+ result = updateStateMachine();
+ if (result != AAUDIO_OK) {
+ return result;
+ }
+
return framesWritten;
}
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index ff429ea..3230ac8 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -67,7 +67,7 @@
return AAUDIO_DIRECTION_OUTPUT;
}
- aaudio_result_t updateStateWhileWaiting() override;
+ aaudio_result_t updateStateMachine() override;
// This is public so it can be called from the C callback function.
void processCallback(int event, void *info) override;
@@ -81,8 +81,7 @@
// adapts between variable sized blocks and fixed size blocks
FixedBlockReader mFixedBlockReader;
- // TODO add 64-bit position reporting to AudioRecord and use it.
- aaudio_wrapping_frames_t mPositionWhenStarting = 0;
+ // TODO add 64-bit position reporting to AudioTrack and use it.
aaudio_wrapping_frames_t mPositionWhenPausing = 0;
};
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index acd319b..b0c6c94 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -258,4 +258,74 @@
}
}
+
+/**
+ * Simple double buffer for a structure that can be written occasionally and read occasionally.
+ * This allows a SINGLE writer with multiple readers.
+ *
+ * It is OK if the FIFO overflows and we lose old values.
+ * It is also OK if we read an old value.
+ * Thread may return a non-atomic result if the other thread is rapidly writing
+ * new values on another core.
+ */
+template <class T>
+class SimpleDoubleBuffer {
+public:
+ SimpleDoubleBuffer()
+ : mValues()
+ , mCounter(0) {}
+
+ __attribute__((no_sanitize("integer")))
+ void write(T value) {
+ int index = mCounter.load() & 1;
+ mValues[index] = value;
+ mCounter++; // Increment AFTER updating storage, OK if it wraps.
+ }
+
+ T read() const {
+ T result;
+ int before;
+ int after;
+ int timeout = 3;
+ do {
+ // Check to see if a write occurred while were reading.
+ before = mCounter.load();
+ int index = (before & 1) ^ 1;
+ result = mValues[index];
+ after = mCounter.load();
+ } while ((after != before) && --timeout > 0);
+ return result;
+ }
+
+ /**
+ * @return true if at least one value has been written
+ */
+ bool isValid() const {
+ return mCounter.load() > 0;
+ }
+
+private:
+ T mValues[2];
+ std::atomic<int> mCounter;
+};
+
+class Timestamp {
+public:
+ Timestamp()
+ : mPosition(0)
+ , mNanoseconds(0) {}
+ Timestamp(int64_t position, int64_t nanoseconds)
+ : mPosition(position)
+ , mNanoseconds(nanoseconds) {}
+
+ int64_t getPosition() const { return mPosition; }
+
+ int64_t getNanoseconds() const { return mNanoseconds; }
+
+private:
+ // These cannot be const because we need to implement the copy assignment operator.
+ int64_t mPosition;
+ int64_t mNanoseconds;
+};
+
#endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.mk b/media/libaaudio/tests/Android.mk
index e4eef06..4402919 100644
--- a/media/libaaudio/tests/Android.mk
+++ b/media/libaaudio/tests/Android.mk
@@ -34,6 +34,17 @@
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/src \
+ frameworks/av/media/libaaudio/examples
+LOCAL_SRC_FILES:= test_timestamps.cpp
+LOCAL_SHARED_LIBRARIES := libaaudio
+LOCAL_MODULE := test_timestamps
+include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/libaaudio/include \
frameworks/av/media/libaaudio/src
LOCAL_SRC_FILES:= test_linear_ramp.cpp
LOCAL_SHARED_LIBRARIES := libaaudio
diff --git a/media/libaaudio/tests/test_timestamps.cpp b/media/libaaudio/tests/test_timestamps.cpp
new file mode 100644
index 0000000..d9ca391
--- /dev/null
+++ b/media/libaaudio/tests/test_timestamps.cpp
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Play silence and recover from dead servers or disconnected devices.
+
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+
+#include "utils/AAudioExampleUtils.h"
+
+#define DEFAULT_TIMEOUT_NANOS ((int64_t)1000000000)
+
+int main(int argc, char **argv) {
+ (void) argc;
+ (void *)argv;
+
+ aaudio_result_t result = AAUDIO_OK;
+
+ int32_t triesLeft = 3;
+ int32_t bufferCapacity;
+ int32_t framesPerBurst = 0;
+ float *buffer = nullptr;
+
+ int32_t actualChannelCount = 0;
+ int32_t actualSampleRate = 0;
+ int32_t originalBufferSize = 0;
+ int32_t requestedBufferSize = 0;
+ int32_t finalBufferSize = 0;
+ aaudio_format_t actualDataFormat = AAUDIO_FORMAT_PCM_FLOAT;
+ aaudio_sharing_mode_t actualSharingMode = AAUDIO_SHARING_MODE_SHARED;
+ int32_t framesMax;
+ int64_t framesTotal;
+ int64_t printAt;
+ int samplesPerBurst;
+ int64_t previousFramePosition = -1;
+
+ AAudioStreamBuilder *aaudioBuilder = nullptr;
+ AAudioStream *aaudioStream = nullptr;
+
+ // Make printf print immediately so that debug info is not stuck
+ // in a buffer if we hang or crash.
+ setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+ printf("Test Timestamps V0.1.1\n");
+
+ AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
+
+ // Use an AAudioStreamBuilder to contain requested parameters.
+ result = AAudio_createStreamBuilder(&aaudioBuilder);
+ if (result != AAUDIO_OK) {
+ printf("AAudio_createStreamBuilder returned %s",
+ AAudio_convertResultToText(result));
+ goto finish;
+ }
+
+ // Request stream properties.
+ AAudioStreamBuilder_setFormat(aaudioBuilder, AAUDIO_FORMAT_PCM_FLOAT);
+ //AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_NONE);
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+ // Create an AAudioStream using the Builder.
+ result = AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStreamBuilder_openStream returned %s",
+ AAudio_convertResultToText(result));
+ goto finish;
+ }
+
+ // Check to see what kind of stream we actually got.
+ actualSampleRate = AAudioStream_getSampleRate(aaudioStream);
+ actualChannelCount = AAudioStream_getChannelCount(aaudioStream);
+ actualDataFormat = AAudioStream_getFormat(aaudioStream);
+
+ printf("-------- chans = %3d, rate = %6d format = %d\n",
+ actualChannelCount, actualSampleRate, actualDataFormat);
+ printf(" Is MMAP used? %s\n", AAudioStream_isMMapUsed(aaudioStream)
+ ? "yes" : "no");
+
+ // This is the number of frames that are read in one chunk by a DMA controller
+ // or a DSP or a mixer.
+ framesPerBurst = AAudioStream_getFramesPerBurst(aaudioStream);
+ printf(" framesPerBurst = %3d\n", framesPerBurst);
+
+ originalBufferSize = AAudioStream_getBufferSizeInFrames(aaudioStream);
+ requestedBufferSize = 2 * framesPerBurst;
+ finalBufferSize = AAudioStream_setBufferSizeInFrames(aaudioStream, requestedBufferSize);
+
+ printf(" BufferSize: original = %4d, requested = %4d, final = %4d\n",
+ originalBufferSize, requestedBufferSize, finalBufferSize);
+
+ samplesPerBurst = framesPerBurst * actualChannelCount;
+ buffer = new float[samplesPerBurst];
+
+ result = AAudioStream_requestStart(aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_requestStart returned %s",
+ AAudio_convertResultToText(result));
+ goto finish;
+ }
+
+ // Play silence very briefly.
+ framesMax = actualSampleRate * 4;
+ framesTotal = 0;
+ printAt = actualSampleRate;
+ while (result == AAUDIO_OK && framesTotal < framesMax) {
+ int32_t framesWritten = AAudioStream_write(aaudioStream,
+ buffer, framesPerBurst,
+ DEFAULT_TIMEOUT_NANOS);
+ if (framesWritten < 0) {
+ result = framesWritten;
+ printf("write() returned %s, frames = %d\n",
+ AAudio_convertResultToText(result), (int)framesTotal);
+ printf(" frames = %d\n", (int)framesTotal);
+ } else if (framesWritten != framesPerBurst) {
+ printf("write() returned %d, frames = %d\n", framesWritten, (int)framesTotal);
+ result = AAUDIO_ERROR_TIMEOUT;
+ } else {
+ framesTotal += framesWritten;
+ if (framesTotal >= printAt) {
+ printf("frames = %d\n", (int)framesTotal);
+ printAt += actualSampleRate;
+ }
+ }
+
+ // Print timestamps.
+ int64_t framePosition = 0;
+ int64_t frameTime = 0;
+ aaudio_result_t timeResult;
+ timeResult = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+ &framePosition, &frameTime);
+
+ if (timeResult == AAUDIO_OK) {
+ if (framePosition > (previousFramePosition + 5000)) {
+ int64_t realTime = getNanoseconds();
+ int64_t framesWritten = AAudioStream_getFramesWritten(aaudioStream);
+
+ double latencyMillis = calculateLatencyMillis(framePosition, frameTime,
+ framesWritten, realTime,
+ actualSampleRate);
+
+ printf("--- timestamp: result = %4d, position = %lld, at %lld nanos"
+ ", latency = %7.2f msec\n",
+ timeResult,
+ (long long) framePosition,
+ (long long) frameTime,
+ latencyMillis);
+ previousFramePosition = framePosition;
+ }
+ }
+ }
+
+ result = AAudioStream_requestStop(aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_requestStop returned %s\n",
+ AAudio_convertResultToText(result));
+ }
+ result = AAudioStream_close(aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_close returned %s\n",
+ AAudio_convertResultToText(result));
+ }
+ aaudioStream = nullptr;
+
+
+finish:
+ if (aaudioStream != nullptr) {
+ AAudioStream_close(aaudioStream);
+ }
+ AAudioStreamBuilder_delete(aaudioBuilder);
+ delete[] buffer;
+ printf("result = %d = %s\n", result, AAudio_convertResultToText(result));
+}
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index e49945b..7bf4f99 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -111,7 +111,8 @@
status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *requested,
struct timespec *elapsed)
{
- LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0);
+ LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0,
+ "%s: null or zero frame buffer, buffer:%p", __func__, buffer);
struct timespec total; // total elapsed time spent waiting
total.tv_sec = 0;
total.tv_nsec = 0;
@@ -345,7 +346,10 @@
buffer->mNonContig = 0;
return;
}
- LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount));
+ LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount),
+ "%s: mUnreleased out of range, "
+ "!(stepCount:%zu <= mUnreleased:%zu <= mFrameCount:%zu), BufferSizeInFrames:%u",
+ __func__, stepCount, mUnreleased, mFrameCount, getBufferSizeInFrames());
mUnreleased -= stepCount;
audio_track_cblk_t* cblk = mCblk;
// Both of these barriers are required
@@ -675,7 +679,8 @@
__attribute__((no_sanitize("integer")))
status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
{
- LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0);
+ LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0,
+ "%s: null or zero frame buffer, buffer:%p", __func__, buffer);
if (mIsShutdown) {
goto no_init;
}
@@ -761,7 +766,10 @@
buffer->mNonContig = 0;
return;
}
- LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount));
+ LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount),
+ "%s: mUnreleased out of range, "
+ "!(stepCount:%zu <= mUnreleased:%zu <= mFrameCount:%zu)",
+ __func__, stepCount, mUnreleased, mFrameCount);
mUnreleased -= stepCount;
audio_track_cblk_t* cblk = mCblk;
if (mIsOut) {
@@ -1056,7 +1064,9 @@
}
// As mFramesReady is the total remaining frames in the static audio track,
// it is always larger or equal to avail.
- LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail);
+ LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail,
+ "%s: mFramesReady out of range, mFramesReady:%lld < avail:%zu",
+ __func__, (long long)mFramesReady, avail);
buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);
if (!ackFlush) {
mUnreleased = avail;
@@ -1068,8 +1078,14 @@
void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
{
size_t stepCount = buffer->mFrameCount;
- LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady));
- LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased));
+ LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady),
+ "%s: stepCount out of range, "
+ "!(stepCount:%zu <= mFramesReady:%lld)",
+ __func__, stepCount, (long long)mFramesReady);
+ LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased),
+ "%s: stepCount out of range, "
+ "!(stepCount:%zu <= mUnreleased:%zu)",
+ __func__, stepCount, mUnreleased);
if (stepCount == 0) {
// prevent accidental re-use of buffer
buffer->mRaw = NULL;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index fb4fe4b..7abf09f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -15,6 +15,9 @@
cc_library {
name: "libmedia_helper",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
cflags: [
"-Werror",
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index a6eba86..e6c8f9c 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -375,7 +375,7 @@
audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
{
audio_channel_mask_t channels;
- if (!OutputChannelConverter::fromString(literalChannels, channels) ||
+ if (!OutputChannelConverter::fromString(literalChannels, channels) &&
!InputChannelConverter::fromString(literalChannels, channels)) {
return AUDIO_CHANNEL_INVALID;
}
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 2e14786..a0d9db0 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -895,6 +895,12 @@
ALOGE("moov: depth %d", depth);
return ERROR_MALFORMED;
}
+
+ if (chunk_type == FOURCC('m', 'o', 'o', 'v') && mInitCheck == OK) {
+ ALOGE("duplicate moov");
+ return ERROR_MALFORMED;
+ }
+
if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) {
// store the offset of the first segment
mMoofFound = true;
@@ -969,6 +975,12 @@
if (!mLastTrack->meta->findInt32(kKeyTrackID, &trackId)) {
mLastTrack->skipTrack = true;
}
+
+ status_t err = verifyTrack(mLastTrack);
+ if (err != OK) {
+ mLastTrack->skipTrack = true;
+ }
+
if (mLastTrack->skipTrack) {
Track *cur = mFirstTrack;
@@ -988,12 +1000,6 @@
return OK;
}
-
- status_t err = verifyTrack(mLastTrack);
-
- if (err != OK) {
- return err;
- }
} else if (chunk_type == FOURCC('m', 'o', 'o', 'v')) {
mInitCheck = OK;
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index e31c37c..810b0d6 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -578,6 +578,10 @@
}
// First two pages are header pages.
if (err == ERROR_END_OF_STREAM || mCurrentPage.mPageNo > 2) {
+ if (mBuf != NULL) {
+ mBuf->release();
+ mBuf = NULL;
+ }
break;
}
curGranulePosition = mCurrentPage.mGranulePosition;
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 9108ce1..221af1d 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -7,6 +7,9 @@
cc_library_shared {
name: "libstagefright_foundation",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
include_dirs: [
"frameworks/av/include",
"frameworks/native/include",
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index c50af2f..23fd7ab 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -719,9 +719,22 @@
}
int MtpFfsHandle::sendEvent(mtp_event me) {
+ // Mimic the behavior of f_mtp by sending the event async.
+ // Events aren't critical to the connection, so we don't need to check the return value.
+ char *temp = new char[me.length];
+ memcpy(temp, me.data, me.length);
+ me.data = temp;
+ std::thread t([&me](MtpFfsHandle *h) { return h->doSendEvent(me); }, this);
+ t.detach();
+ return 0;
+}
+
+void MtpFfsHandle::doSendEvent(mtp_event me) {
unsigned length = me.length;
- int ret = writeHandle(mIntr, me.data, length);
- return static_cast<unsigned>(ret) == length ? 0 : -1;
+ int ret = ::write(mIntr, me.data, length);
+ delete[] reinterpret_cast<char*>(me.data);
+ if (static_cast<unsigned>(ret) != length)
+ PLOG(ERROR) << "Mtp error sending event thread!";
}
} // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index 98669ff..b637d65 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -33,6 +33,7 @@
bool initFunctionfs();
void closeConfig();
void closeEndpoints();
+ void doSendEvent(mtp_event me);
bool mPtp;
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 9023b2d..63898a0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -604,7 +604,7 @@
virtual status_t standby();
private:
- sp<MmapThread> mThread;
+ const sp<MmapThread> mThread;
};
ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 459e4fb..8c4531a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7503,34 +7503,22 @@
AudioFlinger::MmapThreadHandle::MmapThreadHandle(const sp<MmapThread>& thread)
: mThread(thread)
{
+ assert(thread != 0); // thread must start non-null and stay non-null
}
AudioFlinger::MmapThreadHandle::~MmapThreadHandle()
{
- MmapThread *thread = mThread.get();
- // clear our strong reference before disconnecting the thread: the last strong reference
- // will be removed when closeInput/closeOutput is executed upon call from audio policy manager
- // and the thread removed from mMMapThreads list causing the thread destruction.
- mThread.clear();
- if (thread != nullptr) {
- thread->disconnect();
- }
+ mThread->disconnect();
}
status_t AudioFlinger::MmapThreadHandle::createMmapBuffer(int32_t minSizeFrames,
struct audio_mmap_buffer_info *info)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->createMmapBuffer(minSizeFrames, info);
}
status_t AudioFlinger::MmapThreadHandle::getMmapPosition(struct audio_mmap_position *position)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->getMmapPosition(position);
}
@@ -7538,25 +7526,16 @@
audio_port_handle_t *handle)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->start(client, handle);
}
status_t AudioFlinger::MmapThreadHandle::stop(audio_port_handle_t handle)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->stop(handle);
}
status_t AudioFlinger::MmapThreadHandle::standby()
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->standby();
}
@@ -7588,7 +7567,7 @@
for (const sp<MmapTrack> &t : mActiveTracks) {
stop(t->portId());
}
- // this will cause the destruction of this thread.
+ // This will decrement references and may cause the destruction of this thread.
if (isOutput()) {
AudioSystem::releaseOutput(mId, streamType(), mSessionId);
} else {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1a7db26..78f195d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -54,6 +54,11 @@
//FIXME: workaround for truncated touch sounds
// to be removed when the problem is handled by system UI
#define TOUCH_SOUND_FIXED_DELAY_MS 100
+
+// Largest difference in dB on earpiece in call between the voice volume and another
+// media / notification / system volume.
+constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
+
// ----------------------------------------------------------------------------
// AudioPolicyInterface implementation
// ----------------------------------------------------------------------------
@@ -5348,6 +5353,30 @@
return ringVolumeDB - 4 > volumeDB ? ringVolumeDB - 4 : volumeDB;
}
+ // in-call: always cap earpiece volume by voice volume + some low headroom
+ if ((stream != AUDIO_STREAM_VOICE_CALL) && (device & AUDIO_DEVICE_OUT_EARPIECE) && isInCall()) {
+ switch (stream) {
+ case AUDIO_STREAM_SYSTEM:
+ case AUDIO_STREAM_RING:
+ case AUDIO_STREAM_MUSIC:
+ case AUDIO_STREAM_ALARM:
+ case AUDIO_STREAM_NOTIFICATION:
+ case AUDIO_STREAM_ENFORCED_AUDIBLE:
+ case AUDIO_STREAM_DTMF:
+ case AUDIO_STREAM_ACCESSIBILITY: {
+ const float maxVoiceVolDb = computeVolume(AUDIO_STREAM_VOICE_CALL, index, device)
+ + IN_CALL_EARPIECE_HEADROOM_DB;
+ if (volumeDB > maxVoiceVolDb) {
+ ALOGV("computeVolume() stream %d at vol=%f overriden by stream %d at vol=%f",
+ stream, volumeDB, AUDIO_STREAM_VOICE_CALL, maxVoiceVolDb);
+ volumeDB = maxVoiceVolDb;
+ }
+ } break;
+ default:
+ break;
+ }
+ }
+
// if a headset is connected, apply the following rules to ring tones and notifications
// to avoid sound level bursts in user's ears:
// - always attenuate notifications volume by 6dB
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 0429e7f..fc5a28e 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -313,11 +313,13 @@
binder::Status CameraDeviceClient::beginConfigure() {
// TODO: Implement this.
+ ATRACE_CALL();
ALOGV("%s: Not implemented yet.", __FUNCTION__);
return binder::Status::ok();
}
binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
+ ATRACE_CALL();
ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
__FUNCTION__, mInputStream.configured ? 1 : 0,
mStreamMap.size());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 69b1d7d..89d2c65 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -1478,6 +1478,7 @@
status_t Camera3Device::getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) {
+ ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -1691,6 +1692,7 @@
}
status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
+ ATRACE_CALL();
status_t res;
Mutex::Autolock l(mOutputLock);
@@ -1884,6 +1886,7 @@
*/
void Camera3Device::notifyStatus(bool idle) {
+ ATRACE_CALL();
{
// Need mLock to safely update state and synchronize to current
// state of methods in flight.
@@ -2317,6 +2320,7 @@
}
void Camera3Device::setErrorState(const char *fmt, ...) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
va_list args;
va_start(args, fmt);
@@ -2327,6 +2331,7 @@
}
void Camera3Device::setErrorStateV(const char *fmt, va_list args) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
setErrorStateLockedV(fmt, args);
}
@@ -2411,6 +2416,7 @@
}
void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+ ATRACE_CALL();
nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
mInFlightMap.removeItemsAt(idx, 1);
@@ -2495,6 +2501,7 @@
}
void Camera3Device::flushInflightRequests() {
+ ATRACE_CALL();
{ // First return buffers cached in mInFlightMap
Mutex::Autolock l(mInFlightLock);
for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
@@ -2621,6 +2628,7 @@
void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mOutputLock);
CaptureResult captureResult;
@@ -2636,6 +2644,7 @@
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
bool reprocess) {
+ ATRACE_CALL();
if (pendingMetadata.isEmpty())
return;
@@ -2884,7 +2893,7 @@
void Camera3Device::notifyError(const camera3_error_msg_t &msg,
sp<NotificationListener> listener) {
-
+ ATRACE_CALL();
// Map camera HAL error codes to ICameraDeviceCallback error codes
// Index into this with the HAL error code
static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
@@ -2962,6 +2971,7 @@
void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
sp<NotificationListener> listener) {
+ ATRACE_CALL();
ssize_t idx;
// Set timestamp for the request in the in-flight tracking
@@ -3292,7 +3302,7 @@
void Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
/*out*/device::V3_2::CaptureRequest* captureRequest,
/*out*/std::vector<native_handle_t*>* handlesCreated) {
-
+ ATRACE_CALL();
if (captureRequest == nullptr || handlesCreated == nullptr) {
ALOGE("%s: captureRequest (%p) and handlesCreated (%p) must not be null",
__FUNCTION__, captureRequest, handlesCreated);
@@ -3614,11 +3624,13 @@
void Camera3Device::RequestThread::setNotificationListener(
wp<NotificationListener> listener) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
mListener = listener;
}
void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
mReconfigured = true;
// Prepare video stream for high speed recording.
@@ -3629,6 +3641,7 @@
List<sp<CaptureRequest> > &requests,
/*out*/
int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end();
++it) {
@@ -3651,7 +3664,7 @@
status_t Camera3Device::RequestThread::queueTrigger(
RequestTrigger trigger[],
size_t count) {
-
+ ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
status_t ret;
@@ -3708,6 +3721,7 @@
const RequestList &requests,
/*out*/
int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
if (lastFrameNumber != NULL) {
*lastFrameNumber = mRepeatingLastFrameNumber;
@@ -3734,6 +3748,7 @@
}
status_t Camera3Device::RequestThread::clearRepeatingRequests(/*out*/int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
return clearRepeatingRequestsLocked(lastFrameNumber);
@@ -3750,6 +3765,7 @@
status_t Camera3Device::RequestThread::clear(
/*out*/int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
ALOGV("RequestThread::%s:", __FUNCTION__);
@@ -3805,6 +3821,7 @@
}
void Camera3Device::RequestThread::setPaused(bool paused) {
+ ATRACE_CALL();
Mutex::Autolock l(mPauseLock);
mDoPause = paused;
mDoPauseSignal.signal();
@@ -3812,6 +3829,7 @@
status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
int32_t requestId, nsecs_t timeout) {
+ ATRACE_CALL();
Mutex::Autolock l(mLatestRequestMutex);
status_t res;
while (mLatestRequestId != requestId) {
@@ -3838,6 +3856,7 @@
}
void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
+ ATRACE_CALL();
bool surfaceAbandoned = false;
int64_t lastFrameNumber = 0;
sp<NotificationListener> listener;
@@ -3866,6 +3885,7 @@
}
bool Camera3Device::RequestThread::sendRequestsBatch() {
+ ATRACE_CALL();
status_t res;
size_t batchSize = mNextRequests.size();
std::vector<camera3_capture_request_t*> requests(batchSize);
@@ -4261,6 +4281,7 @@
}
CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+ ATRACE_CALL();
Mutex::Autolock al(mLatestRequestMutex);
ALOGV("RequestThread::%s", __FUNCTION__);
@@ -4270,6 +4291,7 @@
bool Camera3Device::RequestThread::isStreamPending(
sp<Camera3StreamInterface>& stream) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
for (const auto& nextRequest : mNextRequests) {
@@ -4299,6 +4321,7 @@
}
nsecs_t Camera3Device::getExpectedInFlightDuration() {
+ ATRACE_CALL();
Mutex::Autolock al(mInFlightLock);
return mExpectedInflightDuration > kMinInflightDuration ?
mExpectedInflightDuration : kMinInflightDuration;
@@ -4370,6 +4393,7 @@
}
void Camera3Device::RequestThread::waitForNextRequestBatch() {
+ ATRACE_CALL();
// Optimized a bit for the simple steady-state case (single repeating
// request), to avoid putting that request in the queue temporarily.
Mutex::Autolock l(mRequestLock);
@@ -4519,6 +4543,7 @@
}
bool Camera3Device::RequestThread::waitIfPaused() {
+ ATRACE_CALL();
status_t res;
Mutex::Autolock l(mPauseLock);
while (mDoPause) {
@@ -4543,6 +4568,7 @@
}
void Camera3Device::RequestThread::unpauseForNewRequests() {
+ ATRACE_CALL();
// With work to do, mark thread as unpaused.
// If paused by request (setPaused), don't resume, to avoid
// extra signaling/waiting overhead to waitUntilPaused
@@ -4574,7 +4600,7 @@
status_t Camera3Device::RequestThread::insertTriggers(
const sp<CaptureRequest> &request) {
-
+ ATRACE_CALL();
Mutex::Autolock al(mTriggerMutex);
sp<Camera3Device> parent = mParent.promote();
@@ -4663,6 +4689,7 @@
status_t Camera3Device::RequestThread::removeTriggers(
const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
Mutex::Autolock al(mTriggerMutex);
CameraMetadata &metadata = request->mSettings;
@@ -4779,6 +4806,7 @@
}
status_t Camera3Device::PreparerThread::prepare(int maxCount, sp<Camera3StreamInterface>& stream) {
+ ATRACE_CALL();
status_t res;
Mutex::Autolock l(mLock);
@@ -4822,6 +4850,7 @@
}
status_t Camera3Device::PreparerThread::clear() {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
for (const auto& stream : mPendingStreams) {
@@ -4834,6 +4863,7 @@
}
void Camera3Device::PreparerThread::setNotificationListener(wp<NotificationListener> listener) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
mListener = listener;
}
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 0f863fe..81f1d1b 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -64,8 +64,9 @@
}
result << " Registered Streams:" << "\n";
+ result << AAudioServiceStreamShared::dumpHeader() << "\n";
for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
- result << sharedStream->dump();
+ result << sharedStream->dump() << "\n";
}
if (isLocked) {
@@ -177,7 +178,10 @@
configuration.getSamplesPerFrame() != mStreamInternal->getSamplesPerFrame()) {
return false;
}
-
return true;
}
+
+aaudio_result_t AAudioServiceEndpoint::getTimestamp(int64_t *positionFrames, int64_t *timeNanos) {
+ return mStreamInternal->getTimestamp(CLOCK_MONOTONIC, positionFrames, timeNanos);
+}
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index e40a670..603d497 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -69,6 +69,8 @@
mReferenceCount = count;
}
+ aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos);
+
bool matches(const AAudioStreamConfiguration& configuration);
virtual AudioStreamInternal *getStreamInternal() = 0;
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 6a37330..6504cc1 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -62,6 +62,9 @@
// result might be a frame count
while (mCallbackEnabled.load() && getStreamInternal()->isActive() && (result >= 0)) {
+
+ int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
+
// Read audio data from stream using a blocking read.
result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
if (result == AAUDIO_ERROR_DISCONNECTED) {
@@ -74,18 +77,32 @@
}
// Distribute data to each active stream.
- { // use lock guard
+ { // brackets are for lock_guard
+
std::lock_guard <std::mutex> lock(mLockStreams);
- for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
- if (sharedStream->isRunning()) {
- FifoBuffer *fifo = sharedStream->getDataFifoBuffer();
+ for (sp<AAudioServiceStreamShared> clientStream : mRegisteredStreams) {
+ if (clientStream->isRunning()) {
+ FifoBuffer *fifo = clientStream->getDataFifoBuffer();
+
+ // Determine offset between framePosition in client's stream vs the underlying
+ // MMAP stream.
+ int64_t clientFramesWritten = fifo->getWriteCounter();
+ // There are two indices that refer to the same frame.
+ int64_t positionOffset = mmapFramesRead - clientFramesWritten;
+ clientStream->setTimestampPositionOffset(positionOffset);
+
if (fifo->getFifoControllerBase()->getEmptyFramesAvailable() <
getFramesPerBurst()) {
underflowCount++;
} else {
fifo->write(mDistributionBuffer, getFramesPerBurst());
}
- sharedStream->markTransferTime(AudioClock::getNanoseconds());
+
+ // This timestamp represents the completion of data being written into the
+ // client buffer. It is sent to the client and used in the timing model
+ // to decide when data will be available to read.
+ Timestamp timestamp(fifo->getWriteCounter(), AudioClock::getNanoseconds());
+ clientStream->markTransferTime(timestamp);
}
}
}
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index b83b918..8b1cc9f 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -65,7 +65,6 @@
// Mix data from each application stream and write result to the shared MMAP stream.
void *AAudioServiceEndpointPlay::callbackLoop() {
- int32_t underflowCount = 0;
aaudio_result_t result = AAUDIO_OK;
int64_t timeoutNanos = getStreamInternal()->calculateReasonableTimeout();
@@ -73,17 +72,33 @@
while (mCallbackEnabled.load() && getStreamInternal()->isActive() && (result >= 0)) {
// Mix data from each active stream.
mMixer.clear();
- { // use lock guard
+ { // brackets are for lock_guard
int index = 0;
+ int64_t mmapFramesWritten = getStreamInternal()->getFramesWritten();
+
std::lock_guard <std::mutex> lock(mLockStreams);
- for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
- if (sharedStream->isRunning()) {
- FifoBuffer *fifo = sharedStream->getDataFifoBuffer();
+ for (sp<AAudioServiceStreamShared> clientStream : mRegisteredStreams) {
+ if (clientStream->isRunning()) {
+ FifoBuffer *fifo = clientStream->getDataFifoBuffer();
+ // Determine offset between framePosition in client's stream vs the underlying
+ // MMAP stream.
+ int64_t clientFramesRead = fifo->getReadCounter();
+ // These two indices refer to the same frame.
+ int64_t positionOffset = mmapFramesWritten - clientFramesRead;
+ clientStream->setTimestampPositionOffset(positionOffset);
+
float volume = 1.0; // to match legacy volume
bool underflowed = mMixer.mix(index, fifo, volume);
- underflowCount += underflowed ? 1 : 0;
- // TODO log underflows in each stream
- sharedStream->markTransferTime(AudioClock::getNanoseconds());
+
+ // This timestamp represents the completion of data being read out of the
+ // client buffer. It is sent to the client and used in the timing model
+ // to decide when the client has room to write more data.
+ Timestamp timestamp(fifo->getReadCounter(), AudioClock::getNanoseconds());
+ clientStream->markTransferTime(timestamp);
+
+ if (underflowed) {
+ clientStream->incrementXRunCount();
+ }
}
index++;
}
@@ -102,8 +117,5 @@
}
}
- ALOGW_IF((underflowCount > 0),
- "AAudioServiceEndpointPlay(): callbackLoop() had %d underflows", underflowCount);
-
return NULL; // TODO review
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 5f7d179..e5f916c 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -18,6 +18,8 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <iomanip>
+#include <iostream>
#include <mutex>
#include "binding/IAAudioService.h"
@@ -37,7 +39,8 @@
AAudioServiceStreamBase::AAudioServiceStreamBase()
: mUpMessageQueue(nullptr)
- , mAAudioThread() {
+ , mAAudioThread()
+ , mAtomicTimestamp() {
mMmapClient.clientUid = -1;
mMmapClient.clientPid = -1;
mMmapClient.packageName = String16("");
@@ -53,16 +56,22 @@
"service stream still open, state = %d", mState);
}
+std::string AAudioServiceStreamBase::dumpHeader() {
+ return std::string(" T Handle UId Run State Format Burst Chan Capacity");
+}
+
std::string AAudioServiceStreamBase::dump() const {
std::stringstream result;
- result << " -------- handle = 0x" << std::hex << mHandle << std::dec << "\n";
- result << " state = " << AAudio_convertStreamStateToText(mState) << "\n";
- result << " format = " << mAudioFormat << "\n";
- result << " framesPerBurst = " << mFramesPerBurst << "\n";
- result << " channelCount = " << mSamplesPerFrame << "\n";
- result << " capacityFrames = " << mCapacityInFrames << "\n";
- result << " owner uid = " << mMmapClient.clientUid << "\n";
+ result << " 0x" << std::setfill('0') << std::setw(8) << std::hex << mHandle
+ << std::dec << std::setfill(' ') ;
+ result << std::setw(6) << mMmapClient.clientUid;
+ result << std::setw(4) << (isRunning() ? "yes" : " no");
+ result << std::setw(6) << mState;
+ result << std::setw(7) << mAudioFormat;
+ result << std::setw(6) << mFramesPerBurst;
+ result << std::setw(5) << mSamplesPerFrame;
+ result << std::setw(9) << mCapacityInFrames;
return result.str();
}
@@ -211,15 +220,25 @@
aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp() {
AAudioServiceMessage command;
+ // Send a timestamp for the clock model.
aaudio_result_t result = getFreeRunningPosition(&command.timestamp.position,
&command.timestamp.timestamp);
if (result == AAUDIO_OK) {
- // ALOGD("sendCurrentTimestamp(): position = %lld, nanos = %lld",
- // (long long) command.timestamp.position,
- // (long long) command.timestamp.timestamp);
- command.what = AAudioServiceMessage::code::TIMESTAMP;
+ command.what = AAudioServiceMessage::code::TIMESTAMP_SERVICE;
result = writeUpMessageQueue(&command);
- } else if (result == AAUDIO_ERROR_UNAVAILABLE) {
+
+ if (result == AAUDIO_OK) {
+ // Send a hardware timestamp for presentation time.
+ result = getHardwareTimestamp(&command.timestamp.position,
+ &command.timestamp.timestamp);
+ if (result == AAUDIO_OK) {
+ command.what = AAudioServiceMessage::code::TIMESTAMP_HARDWARE;
+ result = writeUpMessageQueue(&command);
+ }
+ }
+ }
+
+ if (result == AAUDIO_ERROR_UNAVAILABLE) {
result = AAUDIO_OK; // just not available yet, try again later
}
return result;
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index cebefec..2f94614 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -20,6 +20,7 @@
#include <assert.h>
#include <mutex>
+#include <media/AudioClient.h>
#include <utils/RefBase.h>
#include "fifo/FifoBuffer.h"
@@ -27,7 +28,7 @@
#include "binding/AudioEndpointParcelable.h"
#include "binding/AAudioServiceMessage.h"
#include "utility/AAudioUtilities.h"
-#include <media/AudioClient.h>
+#include "utility/AudioClock.h"
#include "SharedRingBuffer.h"
#include "AAudioThread.h"
@@ -53,7 +54,10 @@
ILLEGAL_THREAD_ID = 0
};
- std::string dump() const;
+ static std::string dumpHeader();
+
+ // does not include EOL
+ virtual std::string dump() const;
// -------------------------------------------------------------------
/**
@@ -170,6 +174,8 @@
*/
virtual aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) = 0;
+ virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) = 0;
+
virtual aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable) = 0;
aaudio_stream_state_t mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
@@ -191,6 +197,8 @@
android::AudioClient mMmapClient;
audio_port_handle_t mClientHandle = AUDIO_PORT_HANDLE_NONE;
+ SimpleDoubleBuffer<Timestamp> mAtomicTimestamp;
+
private:
aaudio_handle_t mHandle = -1;
};
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index ff02c0f..970d734 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -37,6 +37,11 @@
#define AAUDIO_BUFFER_CAPACITY_MIN 4 * 512
#define AAUDIO_SAMPLE_RATE_DEFAULT 48000
+// This is an estimate of the time difference between the HW and the MMAP time.
+// TODO Get presentation timestamps from the HAL instead of using these estimates.
+#define OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS (3 * AAUDIO_NANOS_PER_MILLISECOND)
+#define INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS (-1 * AAUDIO_NANOS_PER_MILLISECOND)
+
/**
* Service Stream that uses an MMAP buffer.
*/
@@ -113,10 +118,14 @@
config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
? AUDIO_CHANNEL_OUT_STEREO
: audio_channel_out_mask_from_count(aaudioSamplesPerFrame);
+ mHardwareTimeOffsetNanos = OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at DAC later
+
} else if (direction == AAUDIO_DIRECTION_INPUT) {
config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
? AUDIO_CHANNEL_IN_STEREO
: audio_channel_in_mask_from_count(aaudioSamplesPerFrame);
+ mHardwareTimeOffsetNanos = INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at ADC earlier
+
} else {
ALOGE("openMmapStream - invalid direction = %d", direction);
return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
@@ -289,6 +298,7 @@
return AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
}
+// Get free-running DSP or DMA hardware position from the HAL.
aaudio_result_t AAudioServiceStreamMMAP::getFreeRunningPosition(int64_t *positionFrames,
int64_t *timeNanos) {
struct audio_mmap_position position;
@@ -305,12 +315,29 @@
disconnect();
} else {
mFramesRead.update32(position.position_frames);
- *positionFrames = mFramesRead.get();
- *timeNanos = position.time_nanoseconds;
+
+ Timestamp timestamp(mFramesRead.get(), position.time_nanoseconds);
+ mAtomicTimestamp.write(timestamp);
+ *positionFrames = timestamp.getPosition();
+ *timeNanos = timestamp.getNanoseconds();
}
return result;
}
+// Get timestamp that was written by getFreeRunningPosition()
+aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) {
+ // TODO Get presentation timestamp from the HAL
+ if (mAtomicTimestamp.isValid()) {
+ Timestamp timestamp = mAtomicTimestamp.read();
+ *positionFrames = timestamp.getPosition();
+ *timeNanos = timestamp.getNanoseconds() + mHardwareTimeOffsetNanos;
+ return AAUDIO_OK;
+ } else {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+}
+
void AAudioServiceStreamMMAP::onTearDown() {
ALOGD("AAudioServiceStreamMMAP::onTearDown() called");
disconnect();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 533e5a8..e6f8fad 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -100,6 +100,8 @@
aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable) override;
aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+ virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) override;
private:
// This proxy class was needed to prevent a crash in AudioFlinger
@@ -132,6 +134,7 @@
MonotonicCounter mFramesRead;
int32_t mPreviousFrameCounter = 0; // from HAL
int mAudioDataFileDescriptor = -1;
+ int64_t mHardwareTimeOffsetNanos = 0; // TODO get from HAL
// Interface to the AudioFlinger MMAP support.
android::sp<android::MmapStreamInterface> mMmapStream;
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 5654113..d648c6d 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -18,6 +18,8 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <iomanip>
+#include <iostream>
#include <mutex>
#include <aaudio/AAudio.h>
@@ -41,9 +43,33 @@
AAudioServiceStreamShared::AAudioServiceStreamShared(AAudioService &audioService)
: mAudioService(audioService)
+ , mTimestampPositionOffset(0)
+ , mXRunCount(0)
{
}
+std::string AAudioServiceStreamShared::dumpHeader() {
+ std::stringstream result;
+ result << AAudioServiceStreamBase::dumpHeader();
+ result << " Write# Read# Avail XRuns";
+ return result.str();
+}
+
+std::string AAudioServiceStreamShared::dump() const {
+ std::stringstream result;
+ result << AAudioServiceStreamBase::dump();
+
+ auto fifo = mAudioDataQueue->getFifoBuffer();
+ int32_t readCounter = fifo->getReadCounter();
+ int32_t writeCounter = fifo->getWriteCounter();
+ result << std::setw(10) << writeCounter;
+ result << std::setw(10) << readCounter;
+ result << std::setw(8) << (writeCounter - readCounter);
+ result << std::setw(8) << getXRunCount();
+
+ return result.str();
+}
+
int32_t AAudioServiceStreamShared::calculateBufferCapacity(int32_t requestedCapacityFrames,
int32_t framesPerBurst) {
@@ -196,6 +222,7 @@
}
AAudioServiceEndpoint *endpoint = mServiceEndpoint;
if (endpoint == nullptr) {
+ ALOGE("AAudioServiceStreamShared::start() missing endpoint");
return AAUDIO_ERROR_INVALID_STATE;
}
// For output streams, this will add the stream to the mixer.
@@ -223,6 +250,7 @@
}
AAudioServiceEndpoint *endpoint = mServiceEndpoint;
if (endpoint == nullptr) {
+ ALOGE("AAudioServiceStreamShared::pause() missing endpoint");
return AAUDIO_ERROR_INVALID_STATE;
}
endpoint->getStreamInternal()->stopClient(mClientHandle);
@@ -240,6 +268,7 @@
}
AAudioServiceEndpoint *endpoint = mServiceEndpoint;
if (endpoint == nullptr) {
+ ALOGE("AAudioServiceStreamShared::stop() missing endpoint");
return AAUDIO_ERROR_INVALID_STATE;
}
endpoint->getStreamInternal()->stopClient(mClientHandle);
@@ -259,6 +288,7 @@
aaudio_result_t AAudioServiceStreamShared::flush() {
AAudioServiceEndpoint *endpoint = mServiceEndpoint;
if (endpoint == nullptr) {
+ ALOGE("AAudioServiceStreamShared::flush() missing endpoint");
return AAUDIO_ERROR_INVALID_STATE;
}
if (mState != AAUDIO_STREAM_STATE_PAUSED) {
@@ -307,15 +337,30 @@
return AAUDIO_OK;
}
-void AAudioServiceStreamShared::markTransferTime(int64_t nanoseconds) {
- mMarkedPosition = mAudioDataQueue->getFifoBuffer()->getReadCounter();
- mMarkedTime = nanoseconds;
+void AAudioServiceStreamShared::markTransferTime(Timestamp ×tamp) {
+ mAtomicTimestamp.write(timestamp);
}
+// Get timestamp that was written by the real-time service thread, eg. mixer.
aaudio_result_t AAudioServiceStreamShared::getFreeRunningPosition(int64_t *positionFrames,
int64_t *timeNanos) {
- // TODO get these two numbers as an atomic pair
- *positionFrames = mMarkedPosition;
- *timeNanos = mMarkedTime;
- return AAUDIO_OK;
+ if (mAtomicTimestamp.isValid()) {
+ Timestamp timestamp = mAtomicTimestamp.read();
+ *positionFrames = timestamp.getPosition();
+ *timeNanos = timestamp.getNanoseconds();
+ return AAUDIO_OK;
+ } else {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+}
+
+// Get timestamp from lower level service.
+aaudio_result_t AAudioServiceStreamShared::getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) {
+
+ aaudio_result_t result = mServiceEndpoint->getTimestamp(positionFrames, timeNanos);
+ if (result == AAUDIO_OK) {
+ *positionFrames -= mTimestampPositionOffset.load(); // Offset from shared MMAP stream
+ }
+ return result;
}
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 6b67337..36a56b8 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -46,6 +46,10 @@
AAudioServiceStreamShared(android::AAudioService &aAudioService);
virtual ~AAudioServiceStreamShared() = default;
+ static std::string dumpHeader();
+
+ std::string dump() const override;
+
aaudio_result_t open(const aaudio::AAudioStreamRequest &request,
aaudio::AAudioStreamConfiguration &configurationOutput) override;
@@ -85,7 +89,19 @@
/* Keep a record of when a buffer transfer completed.
* This allows for a more accurate timing model.
*/
- void markTransferTime(int64_t nanoseconds);
+ void markTransferTime(Timestamp ×tamp);
+
+ void setTimestampPositionOffset(int64_t deltaFrames) {
+ mTimestampPositionOffset.store(deltaFrames);
+ }
+
+ void incrementXRunCount() {
+ mXRunCount++;
+ }
+
+ int32_t getXRunCount() const {
+ return mXRunCount.load();
+ }
protected:
@@ -93,6 +109,9 @@
aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+ virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) override;
+
/**
* @param requestedCapacityFrames
* @param framesPerBurst
@@ -106,8 +125,8 @@
AAudioServiceEndpoint *mServiceEndpoint = nullptr;
SharedRingBuffer *mAudioDataQueue = nullptr;
- int64_t mMarkedPosition = 0;
- int64_t mMarkedTime = 0;
+ std::atomic<int64_t> mTimestampPositionOffset;
+ std::atomic<int32_t> mXRunCount;
};
} /* namespace aaudio */
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index ebb50f8..c6fb57d 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -53,7 +53,7 @@
aaudio_result_t AAudioThread::start(Runnable *runnable) {
if (mHasThread) {
- ALOGE("AAudioThread::start() - mHasThread.load() already true");
+ ALOGE("AAudioThread::start() - mHasThread already true");
return AAUDIO_ERROR_INVALID_STATE;
}
// mRunnable will be read by the new thread when it starts.
@@ -71,6 +71,7 @@
aaudio_result_t AAudioThread::stop() {
if (!mHasThread) {
+ ALOGE("AAudioThread::stop() but no thread running");
return AAUDIO_ERROR_INVALID_STATE;
}
int err = pthread_join(mThread, nullptr);