Merge "camera: Let SurfaceView transform preview only when rotation override isn't rotation only" into main
diff --git a/apex/Android.bp b/apex/Android.bp
index b0d7c02..356bf03 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -173,6 +173,7 @@
"mediaswcodec",
],
native_shared_libs: [
+ "libapexcodecs",
"libcodec2_hidl@1.0",
"libcodec2_hidl@1.1",
"libcodec2_hidl@1.2",
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index cabfbc4..ebfd7d7 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -3,6 +3,14 @@
flag {
namespace: "camera_platform"
+ name: "camera_heif_gainmap"
+ is_exported: true
+ description: "Extend HEIC/HEIF still capture with HDR gainmap"
+ bug: "362608343"
+}
+
+flag {
+ namespace: "camera_platform"
name: "camera_hsum_permission"
is_exported: true
description: "Camera access by headless system user"
@@ -167,16 +175,10 @@
flag {
namespace: "camera_platform"
- name: "use_context_attribution_source"
- description: "Use the context-provided AttributionSource when checking for client permissions"
+ name: "data_delivery_permission_checks"
+ description: "Pass the full AttributionSource chain to PermissionChecker for data delivery"
bug: "190657833"
-}
-
-flag {
- namespace: "camera_platform"
- name: "check_full_attribution_source_chain"
- description: "Pass the full AttributionSource chain to PermissionChecker"
- bug: "190657833"
+ is_fixed_read_only: true
}
flag {
@@ -186,3 +188,50 @@
description: "Support setting and getting mirror mode for shared surfaces"
bug: "298899993"
}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "multiresolution_imagereader_usage_public"
+ description: "Make constructor for MultiResolutionImageReader with usage public"
+ bug: "338621560"
+}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "color_temperature"
+ description: "Add keys to manually set color temperature and color tint"
+ bug: "359409044"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "night_mode_indicator"
+ is_exported: true
+ description: "Indicates when to activate Night Mode Camera Extension"
+ bug: "335902696"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "zoom_method"
+ is_exported: true
+ description: "Gives apps explicit control on reflects zoom via ZOOM_RATIO capture result"
+ bug: "298899993"
+}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "ae_priority"
+ description: "Add AE priority modes"
+ bug: "359944765"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "feature_combination_baklava"
+ description: "Add new feature combination query version for Baklava"
+ bug: "370778206"
+}
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 69b30f7..32e2f3d 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -536,6 +536,8 @@
case ACAMERA_COLOR_CORRECTION_TRANSFORM:
case ACAMERA_COLOR_CORRECTION_GAINS:
case ACAMERA_COLOR_CORRECTION_ABERRATION_MODE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TINT:
case ACAMERA_CONTROL_AE_ANTIBANDING_MODE:
case ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION:
case ACAMERA_CONTROL_AE_LOCK:
@@ -560,6 +562,8 @@
case ACAMERA_CONTROL_ZOOM_RATIO:
case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
case ACAMERA_CONTROL_AUTOFRAMING:
+ case ACAMERA_CONTROL_ZOOM_METHOD:
+ case ACAMERA_CONTROL_AE_PRIORITY_MODE:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
case ACAMERA_FLASH_STRENGTH_LEVEL:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index acc3c7c..b792de0 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -307,6 +307,100 @@
*/
ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES = // byte[n]
ACAMERA_COLOR_CORRECTION_START + 4,
+ /**
+ * <p>Specifies the color temperature for CCT mode in Kelvin
+ * to adjust the white balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color temperature in Kelvin units for when
+ * ACAMERA_COLOR_CORRECTION_MODE is CCT to adjust the
+ * white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color temperature,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color temperatures
+ * requested outside the advertised ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * will be clamped.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE = // int32
+ ACAMERA_COLOR_CORRECTION_START + 5,
+ /**
+ * <p>Specifies the color tint for CCT mode to adjust the white
+ * balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color tint for when ACAMERA_COLOR_CORRECTION_MODE
+ * is CCT to adjust the white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color tint,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color tints requested
+ * outside the supported range will be clamped to the nearest limit (-50 or +50).</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TINT = // int32
+ ACAMERA_COLOR_CORRECTION_START + 6,
+ /**
+ * <p>The range of supported color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ *
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid range of color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE supported by this camera device.</p>
+ * <p>This key will be null on devices that do not support CCT mode for
+ * ACAMERA_COLOR_CORRECTION_MODE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE = // int32[2]
+ ACAMERA_COLOR_CORRECTION_START + 7,
+ /**
+ * <p>List of color correction modes for ACAMERA_COLOR_CORRECTION_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_MODE. If no
+ * color correction modes are available for a device, this key will be null.</p>
+ * <p>Camera devices that have a FULL hardware level will always include at least
+ * FAST, HIGH_QUALITY, and TRANSFORM_MATRIX modes.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_AVAILABLE_MODES = // byte[n]
+ ACAMERA_COLOR_CORRECTION_START + 8,
ACAMERA_COLOR_CORRECTION_END,
/**
@@ -469,7 +563,9 @@
* application's selected exposure time, sensor sensitivity,
* and frame duration (ACAMERA_SENSOR_EXPOSURE_TIME,
* ACAMERA_SENSOR_SENSITIVITY, and
- * ACAMERA_SENSOR_FRAME_DURATION). If one of the FLASH modes
+ * ACAMERA_SENSOR_FRAME_DURATION). If ACAMERA_CONTROL_AE_PRIORITY_MODE is
+ * enabled, the relevant priority CaptureRequest settings will not be overridden.
+ * See ACAMERA_CONTROL_AE_PRIORITY_MODE for more details. If one of the FLASH modes
* is selected, the camera device's flash unit controls are
* also overridden.</p>
* <p>The FLASH modes are only available if the camera device
@@ -489,6 +585,7 @@
* different ACAMERA_FLASH_STRENGTH_LEVEL.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_FLASH_INFO_AVAILABLE
* @see ACAMERA_FLASH_MODE
@@ -2307,6 +2404,95 @@
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE = // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
ACAMERA_CONTROL_START + 59,
+ /**
+ * <p>Whether the application uses ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO
+ * to control zoom levels.</p>
+ *
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_control_zoom_method_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>If set to AUTO, the camera device detects which capture request key the application uses
+ * to do zoom, ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO. If
+ * the application doesn't set android.scaler.zoomRatio or sets it to 1.0 in the capture
+ * request, the effective zoom level is reflected in ACAMERA_SCALER_CROP_REGION in capture
+ * results. If ACAMERA_CONTROL_ZOOM_RATIO is set to values other than 1.0, the effective
+ * zoom level is reflected in ACAMERA_CONTROL_ZOOM_RATIO. AUTO is the default value
+ * for this control, and also the behavior of the OS before Android version
+ * <a href="https://developer.android.com/reference/android/os/Build.VERSION_CODES.html#BAKLAVA">BAKLAVA</a>.</p>
+ * <p>If set to ZOOM_RATIO, the application explicitly specifies zoom level be controlled
+ * by ACAMERA_CONTROL_ZOOM_RATIO, and the effective zoom level is reflected in
+ * ACAMERA_CONTROL_ZOOM_RATIO in capture results. This addresses an ambiguity with AUTO,
+ * with which the camera device cannot know if the application is using cropRegion or
+ * zoomRatio at 1.0x.</p>
+ *
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ */
+ ACAMERA_CONTROL_ZOOM_METHOD = // byte (acamera_metadata_enum_android_control_zoom_method_t)
+ ACAMERA_CONTROL_START + 60,
+ /**
+ * <p>Turn on AE priority mode.</p>
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_priority_mode_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>This control is only effective if ACAMERA_CONTROL_MODE is
+ * AUTO and ACAMERA_CONTROL_AE_MODE is set to one of its
+ * ON modes, with the exception of ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
+ * <p>When a priority mode is enabled, the camera device's
+ * auto-exposure routine will maintain the application's
+ * selected parameters relevant to the priority mode while overriding
+ * the remaining exposure parameters
+ * (ACAMERA_SENSOR_EXPOSURE_TIME, ACAMERA_SENSOR_SENSITIVITY, and
+ * ACAMERA_SENSOR_FRAME_DURATION). For example, if
+ * SENSOR_SENSITIVITY_PRIORITY mode is enabled, the camera device will
+ * maintain the application-selected ACAMERA_SENSOR_SENSITIVITY
+ * while adjusting ACAMERA_SENSOR_EXPOSURE_TIME
+ * and ACAMERA_SENSOR_FRAME_DURATION. The overridden fields for a
+ * given capture will be available in its CaptureResult.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE = // byte (acamera_metadata_enum_android_control_ae_priority_mode_t)
+ ACAMERA_CONTROL_START + 61,
+ /**
+ * <p>List of auto-exposure priority modes for ACAMERA_CONTROL_AE_PRIORITY_MODE
+ * that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This entry lists the valid modes for
+ * ACAMERA_CONTROL_AE_PRIORITY_MODE for this camera device.
+ * If no AE priority modes are available for a device, this will only list OFF.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
+ */
+ ACAMERA_CONTROL_AE_AVAILABLE_PRIORITY_MODES = // byte[n]
+ ACAMERA_CONTROL_START + 62,
ACAMERA_CONTROL_END,
/**
@@ -4793,9 +4979,12 @@
* duration exposed to the nearest possible value (rather than expose longer).
* The final exposure time used will be available in the output capture result.</p>
* <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
- * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * OFF; otherwise the auto-exposure algorithm will override this value. However, in the
+ * case that ACAMERA_CONTROL_AE_PRIORITY_MODE is set to SENSOR_EXPOSURE_TIME_PRIORITY, this
+ * control will be effective and not controlled by the auto-exposure algorithm.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
* @see ACAMERA_CONTROL_MODE
*/
ACAMERA_SENSOR_EXPOSURE_TIME = // int64
@@ -4904,7 +5093,9 @@
* value. The final sensitivity used will be available in the
* output capture result.</p>
* <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
- * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * OFF; otherwise the auto-exposure algorithm will override this value. However, in the
+ * case that ACAMERA_CONTROL_AE_PRIORITY_MODE is set to SENSOR_SENSITIVITY_PRIORITY, this
+ * control will be effective and not controlled by the auto-exposure algorithm.</p>
* <p>Note that for devices supporting postRawSensitivityBoost, the total sensitivity applied
* to the final processed image is the combination of ACAMERA_SENSOR_SENSITIVITY and
* ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST. In case the application uses the sensor
@@ -4913,6 +5104,7 @@
* set postRawSensitivityBoost.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
* @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
@@ -7865,6 +8057,145 @@
ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION =
// int64[4*n]
ACAMERA_HEIC_START + 5,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
+ * Configuring JPEG and HEIC streams at the same time is not supported.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS =
+ // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t)
+ ACAMERA_HEIC_START + 6,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC UltraHDR output formats.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 7,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC UltraHDR streams.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC UltraHDR
+ * streams.</p>
+ * <p>All HEIC output stream formats may have a nonzero stall
+ * duration.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 8,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream) for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS for details.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION =
+ // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t)
+ ACAMERA_HEIC_START + 9,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC UltraHDR output formats for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION =
+ // int64[4*n]
+ ACAMERA_HEIC_START + 10,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC UltraHDR streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION =
+ // int64[4*n]
+ ACAMERA_HEIC_START + 11,
ACAMERA_HEIC_END,
/**
@@ -7949,6 +8280,33 @@
ACAMERA_AUTOMOTIVE_LENS_END,
/**
+ * <p>Indicates when to activate Night Mode Camera Extension for high-quality
+ * still captures in low-light conditions.</p>
+ *
+ * <p>Type: int32 (acamera_metadata_enum_android_extension_night_mode_indicator_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul></p>
+ *
+ * <p>Provides awareness to the application when the current scene can benefit from using a
+ * Night Mode Camera Extension to take a high-quality photo.</p>
+ * <p>Support for this capture result can be queried via
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.</p>
+ * <p>If the device supports this capability then it will also support
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_NIGHT">NIGHT</a>
+ * and will be available in both
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession.html">sessions</a> and
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionSession.html">sessions</a>.</p>
+ * <p>The value will be {@code UNKNOWN} in the following auto exposure modes: ON_AUTO_FLASH,
+ * ON_ALWAYS_FLASH, ON_AUTO_FLASH_REDEYE, or ON_EXTERNAL_FLASH.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR = // int32 (acamera_metadata_enum_android_extension_night_mode_indicator_t)
+ ACAMERA_EXTENSION_START + 2,
+ ACAMERA_EXTENSION_END,
+
+ /**
* <p>The available Jpeg/R stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
@@ -8137,6 +8495,20 @@
*/
ACAMERA_COLOR_CORRECTION_MODE_HIGH_QUALITY = 2,
+ /**
+ * <p>Use
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE and
+ * ACAMERA_COLOR_CORRECTION_COLOR_TINT to adjust the white balance based
+ * on correlated color temperature.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * CCT is ignored.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TINT
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_MODE_CCT = 3,
+
} acamera_metadata_enum_android_color_correction_mode_t;
// ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
@@ -9378,6 +9750,42 @@
} acamera_metadata_enum_android_control_low_light_boost_state_t;
+// ACAMERA_CONTROL_AE_PRIORITY_MODE
+typedef enum acamera_metadata_enum_acamera_control_ae_priority_mode {
+ /**
+ * <p>Disable AE priority mode. This is the default value.</p>
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE_OFF = 0,
+
+ /**
+ * <p>The camera device's auto-exposure routine is active and
+ * prioritizes the application-selected ISO (ACAMERA_SENSOR_SENSITIVITY).</p>
+ * <p>The application has control over ACAMERA_SENSOR_SENSITIVITY while
+ * the application's values for ACAMERA_SENSOR_EXPOSURE_TIME and
+ * ACAMERA_SENSOR_FRAME_DURATION are ignored.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY = 1,
+
+ /**
+ * <p>The camera device's auto-exposure routine is active and
+ * prioritizes the application-selected exposure time
+ * (ACAMERA_SENSOR_EXPOSURE_TIME).</p>
+ * <p>The application has control over ACAMERA_SENSOR_EXPOSURE_TIME while
+ * the application's values for ACAMERA_SENSOR_SENSITIVITY and
+ * ACAMERA_SENSOR_FRAME_DURATION are ignored.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY = 2,
+
+} acamera_metadata_enum_android_control_ae_priority_mode_t;
+
// ACAMERA_EDGE_MODE
@@ -11418,6 +11826,26 @@
} acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
+// ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_ultra_hdr_stream_configurations {
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT
+ = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t;
+
+// ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution {
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+ = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t;
+
// ACAMERA_AUTOMOTIVE_LOCATION
@@ -11581,6 +12009,33 @@
} acamera_metadata_enum_android_automotive_lens_facing_t;
+// ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR
+typedef enum acamera_metadata_enum_acamera_extension_night_mode_indicator {
+ /**
+ * <p>The camera can't accurately assess the scene's lighting to determine if a Night Mode
+ * Camera Extension capture would improve the photo. This can happen when the current
+ * camera configuration doesn't support night mode indicator detection, such as when
+ * the auto exposure mode is ON_AUTO_FLASH, ON_ALWAYS_FLASH, ON_AUTO_FLASH_REDEYE, or
+ * ON_EXTERNAL_FLASH.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR_UNKNOWN = 0,
+
+ /**
+ * <p>The camera has detected lighting conditions that are sufficiently bright. Night
+ * Mode Camera Extensions is available but may not be able to optimize the camera
+ * settings to take a higher quality photo.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR_OFF = 1,
+
+ /**
+ * <p>The camera has detected low-light conditions. It is recommended to use Night Mode
+ * Camera Extension to optimize the camera settings to take a high-quality photo in
+ * the dark.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR_ON = 2,
+
+} acamera_metadata_enum_android_extension_night_mode_indicator_t;
+
// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations {
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 3325da6..9ed8197 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -1684,8 +1684,9 @@
__FUNCTION__, burstId, cbh.mRequests.size());
dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
}
+
sp<CaptureRequest> request = cbh.mRequests[burstId];
- ALOGE("%s: request = %p", __FUNCTION__, request.get());
+ ALOGV("%s: request = %p", __FUNCTION__, request.get());
sp<AMessage> msg = nullptr;
if (v2Callback) {
msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
diff --git a/include/media/MmapStreamCallback.h b/include/media/MmapStreamCallback.h
index 76ee6d7..a3876d9 100644
--- a/include/media/MmapStreamCallback.h
+++ b/include/media/MmapStreamCallback.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_AUDIO_MMAP_STREAM_CALLBACK_H
#define ANDROID_AUDIO_MMAP_STREAM_CALLBACK_H
+#include <media/AudioContainers.h>
#include <system/audio.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -42,10 +43,10 @@
virtual void onVolumeChanged(float volume) = 0;
/**
- * The device the stream is routed to/from has changed
- * \param[in] onRoutingChanged the unique device ID of the new device.
+ * The devices the stream is routed to/from has changed
+ * \param[in] deviceIds a set of the device IDs of the new devices.
*/
- virtual void onRoutingChanged(audio_port_handle_t deviceId) = 0;
+ virtual void onRoutingChanged(const DeviceIdVector& deviceIds) = 0;
protected:
MmapStreamCallback() {}
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index 7725175..3d29335 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -19,6 +19,7 @@
#include <system/audio.h>
#include <media/AudioClient.h>
+#include <media/AudioContainers.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -51,9 +52,10 @@
* Requested parameters as input,
* Actual parameters as output
* \param[in] client a AudioClient struct describing the first client using this stream.
- * \param[in,out] deviceId audio device the stream should preferably be routed to/from
- * Requested as input,
- * Actual as output
+ * \param[in,out] deviceIds audio devices the stream should preferably be routed to/from.
+ * Leave empty if there are no preferred devices.
+ * Requested as input,
+ * Actual as output
* \param[in,out] sessionId audio sessionId for the stream
* Requested as input, may be AUDIO_SESSION_ALLOCATE
* Actual as output
@@ -70,7 +72,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index 16beb28..1e5eafb 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -50,3 +50,22 @@
],
aconfig_declarations: "aconfig_codec_fwk_flags",
}
+
+aconfig_declarations {
+ name: "aconfig_media_swcodec_flags",
+ package: "android.media.swcodec.flags",
+ container: "com.android.media.swcodec",
+ srcs: ["swcodec_flags.aconfig"],
+}
+
+cc_aconfig_library {
+ name: "android.media.swcodec.flags-aconfig-cc",
+ aconfig_declarations: "aconfig_media_swcodec_flags",
+ min_sdk_version: "apex_inherit",
+ vendor_available: true,
+ double_loadable: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ ],
+}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index b5c7edf..c820a2c 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -21,6 +21,13 @@
}
flag {
+ name: "codec_availability"
+ namespace: "codec_fwk"
+ description: "Feature flag for codec availability HAL API support"
+ bug: "363282971"
+}
+
+flag {
name: "codec_buffer_state_cleanup"
namespace: "codec_fwk"
description: "Bugfix flag for more buffer state cleanup in MediaCodec"
@@ -111,6 +118,13 @@
}
flag {
+ name: "num_input_slots"
+ namespace: "codec_fwk"
+ description: "Feature flag for exposing number of input slots"
+ bug: "159891571"
+}
+
+flag {
name: "p210_format_support"
is_exported: true
namespace: "codec_fwk"
@@ -181,6 +195,13 @@
}
flag {
+ name: "subsession_metrics"
+ namespace: "codec_fwk"
+ description: "Feature flag for subsession codec metrics"
+ bug: "363382811"
+}
+
+flag {
name: "teamfood"
namespace: "codec_fwk"
description: "Feature flag to track teamfood population"
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
new file mode 100644
index 0000000..a435a43
--- /dev/null
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -0,0 +1,14 @@
+# Media SW Codec Flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+package: "android.media.swcodec.flags"
+container: "com.android.media.swcodec"
+
+flag {
+ name: "apv_software_codec"
+ is_exported: true
+ is_fixed_read_only: true
+ namespace: "codec_fwk"
+ description: "Feature flag for APV Software C2 codec"
+ bug: "376770121"
+}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index 2da6758..cab126f 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -120,6 +120,7 @@
"//frameworks/base/api",
"//frameworks/base/core/res",
],
+ exportable: true,
}
aconfig_declarations {
@@ -152,6 +153,20 @@
}
java_aconfig_library {
+ name: "android.media.audio-aconfig-exported-java",
+ aconfig_declarations: "android.media.audio-aconfig",
+ defaults: ["framework-minus-apex-aconfig-java-defaults"],
+ min_sdk_version: "Tiramisu",
+ mode: "exported",
+ apex_available: [
+ "com.android.btservices",
+ ],
+ visibility: [
+ "//packages/modules/Bluetooth:__subpackages__",
+ ],
+}
+
+java_aconfig_library {
name: "android.media.audiopolicy-aconfig-java",
aconfig_declarations: "android.media.audiopolicy-aconfig",
defaults: ["framework-minus-apex-aconfig-java-defaults"],
diff --git a/media/audio/aconfig/README.md b/media/audio/aconfig/README.md
index 8ce1259..83370fe 100644
--- a/media/audio/aconfig/README.md
+++ b/media/audio/aconfig/README.md
@@ -126,11 +126,13 @@
### TestApis
-TestApis do not require flagging, since their existence in the tree implies that they should
-be accessible to callers (xTS not building on trunk enables this).
-
+TestApis do not require flagging, unless they are API additions associated with new features.
+For testing existing features, we have full control over the set of callers.
### Api Changes
-Currently, the flag infra does not support any type of Api modification (arguments, annotation,
-renaming, deletion, etc.) In any of these cases (including for SystemApi), exceptions will need to
-be granted.
+There is partial (work ongoing) support for modifying API surfaces.
+ - SystemApi -> public is supported
+ - UAU -> SystemApi is supported, but the @UAU must remain until the flag is in next
+Other modifications involving moving between surfaces, or annotation changes may not be supported:
+check the [FAQ](https://g3doc.corp.google.com/company/teams/android-api-council/guidelines/faq.md?cl=head#i-cannot-use-flaggedapi-with-data-classes-generated-by-codegen)
+for the up to date list of support.
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index f9fb4c7..7896a75 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -6,6 +6,13 @@
container: "system"
flag {
+ name: "offload_support"
+ namespace: "media_audio"
+ description: "Enable offload support in AAudio."
+ bug: "372041799"
+}
+
+flag {
name: "sample_rate_conversion"
namespace: "media_audio"
description: "Enable the AAudio sample rate converter."
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index c732708..fe53824 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -35,6 +35,13 @@
}
flag {
+ name: "audio_eraser_effect"
+ namespace: "media_audio"
+ description: "Enable audio eraser effect"
+ bug: "367667349"
+}
+
+flag {
name: "bluetooth_mac_address_anonymization"
namespace: "media_audio"
description:
@@ -69,6 +76,22 @@
}
flag {
+ name: "hardening_impl"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for overall implementation of hardening"
+ bug: "376480814"
+}
+
+flag {
+ name: "hardening_strict"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for strict enforcement (deny access) of hardening"
+ bug: "376480814"
+}
+
+flag {
name: "music_fx_edge_to_edge"
namespace: "media_audio"
description: "Enable Edge-to-edge feature for MusicFx and handle insets"
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 084caf9..ae7eb36 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -32,6 +32,25 @@
bug: "374751406"
}
+flag {
+ name: "deprecate_stream_bt_sco"
+ namespace: "media_audio"
+ description: "Deprecate STREAM_BLUETOOTH_SCO"
+ is_exported: true
+ bug: "376756660"
+}
+
+flag {
+ name: "enable_multichannel_group_device"
+ namespace: "media_audio"
+ description:
+ "Enable new audio device type for wireless connected speaker group"
+ "supporting multichannel content."
+ is_exported: true
+ is_fixed_read_only: true
+ bug: "344031109"
+}
+
flag{
name: "enable_ringtone_haptics_customization"
namespace: "media_audio"
@@ -93,6 +112,22 @@
}
flag {
+ name: "hardening_permission_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API flag for additional appop/perm constructs for hardening."
+ bug: "376480814"
+}
+
+flag {
+ name: "iamf_definitions_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API definitions for the IAMF format"
+ bug: "337522902"
+}
+
+flag {
name: "loudness_configurator_api"
is_exported: true
namespace: "media_audio"
@@ -164,12 +199,33 @@
}
flag {
+ name: "spatial_audio_settings_versioning"
+ namespace: "media_audio"
+ description: "introduce versioning of spatial audio settings"
+ bug: "377977731"
+}
+
+flag {
+ name: "spatializer_capabilities"
+ namespace: "media_audio"
+ description: "spatializer reports effective channel masks"
+ bug: "377582613"
+}
+
+flag {
name: "speaker_cleanup_usage"
namespace: "media_audio"
description: "Support new AudioAttributes usage for speaker cleanup"
bug: "355050846"
}
+flag {
+ name: "speaker_layout_api"
+ namespace: "media_audio"
+ description: "Surface new API method for returning speaker layout channel mask for devices"
+ bug: "337522902"
+}
+
# TODO remove
flag {
name: "volume_ringer_api_hardening"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index cdcce08..95a8a69 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -480,6 +480,11 @@
AudioDeviceType::OUT_BROADCAST,
GET_DEVICE_DESC_CONNECTION(BT_LE))
},
+ {
+ AUDIO_DEVICE_OUT_MULTICHANNEL_GROUP, make_AudioDeviceDescription(
+ AudioDeviceType::OUT_MULTICHANNEL_GROUP,
+ GET_DEVICE_DESC_CONNECTION(VIRTUAL))
+ },
// AUDIO_DEVICE_IN_AMBIENT and IN_COMMUNICATION are removed since they were deprecated.
{
AUDIO_DEVICE_IN_BUILTIN_MIC, make_AudioDeviceDescription(
@@ -1797,6 +1802,8 @@
return AUDIO_USAGE_VEHICLE_STATUS;
case AudioUsage::ANNOUNCEMENT:
return AUDIO_USAGE_ANNOUNCEMENT;
+ case AudioUsage::SPEAKER_CLEANUP:
+ return AUDIO_USAGE_SPEAKER_CLEANUP;
}
return unexpected(BAD_VALUE);
}
@@ -1848,6 +1855,8 @@
return AudioUsage::VEHICLE_STATUS;
case AUDIO_USAGE_ANNOUNCEMENT:
return AudioUsage::ANNOUNCEMENT;
+ case AUDIO_USAGE_SPEAKER_CLEANUP:
+ return AudioUsage::SPEAKER_CLEANUP;
}
return unexpected(BAD_VALUE);
}
@@ -2343,6 +2352,15 @@
audio_port_config_device_ext legacy{};
RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
aidl.device, &legacy.type, legacy.address));
+ const bool isInput = false; // speaker_layout_channel_mask only represents output.
+ if (aidl.speakerLayout.has_value()) {
+ legacy.speaker_layout_channel_mask =
+ VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ aidl.speakerLayout.value(), isInput));
+ } else {
+ // Default to none when the field is null in the AIDL.
+ legacy.speaker_layout_channel_mask = AUDIO_CHANNEL_NONE;
+ }
return legacy;
}
@@ -2351,6 +2369,14 @@
AudioPortDeviceExt aidl;
aidl.device = VALUE_OR_RETURN(
legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+ const bool isInput = false; // speaker_layout_channel_mask only represents output.
+ // The AIDL speakerLayout is nullable and if set, can only be a layoutMask.
+ if (audio_channel_mask_is_valid(legacy.speaker_layout_channel_mask) &&
+ audio_channel_mask_get_representation(legacy.speaker_layout_channel_mask) ==
+ AUDIO_CHANNEL_REPRESENTATION_POSITION) {
+ aidl.speakerLayout = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ legacy.speaker_layout_channel_mask, isInput));
+ }
return aidl;
}
diff --git a/media/codec2/components/apv/Android.bp b/media/codec2/components/apv/Android.bp
new file mode 100644
index 0000000..f565978
--- /dev/null
+++ b/media/codec2/components/apv/Android.bp
@@ -0,0 +1,58 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+ enabled: true,
+ name: "libcodec2_soft_apvenc",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_signed-defaults",
+ "libcodec2_soft_sanitize_cfi-defaults",
+ ],
+
+ static_libs: [
+ "libopenapv",
+ "android.media.swcodec.flags-aconfig-cc",
+ ],
+
+ srcs: ["C2SoftApvEnc.cpp"],
+
+ cflags: [
+ "-DOAPV_STATIC_DEFINE",
+ "-Wno-unused-variable",
+ "-Wno-unused-parameter",
+ "-Wno-unused-function",
+ "-Wno-reorder-ctor",
+ ],
+}
+
+cc_library {
+ enabled: true,
+ name: "libcodec2_soft_apvdec",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_signed-defaults",
+ "libcodec2_soft_sanitize_cfi-defaults",
+ ],
+
+ static_libs: [
+ "libopenapv",
+ "android.media.swcodec.flags-aconfig-cc",
+ ],
+
+ srcs: ["C2SoftApvDec.cpp"],
+
+ cflags: [
+ "-DOAPV_STATIC_DEFINE",
+ "-Wno-unused-variable",
+ "-Wno-unused-parameter",
+ "-Wno-unused-function",
+ "-Wno-reorder-ctor",
+ ],
+}
diff --git a/media/codec2/components/apv/C2SoftApvCommon.h b/media/codec2/components/apv/C2SoftApvCommon.h
new file mode 100644
index 0000000..9325f28
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvCommon.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_COMMON_H__
+#define ANDROID_C2_SOFT_APV_COMMON_H__
+
+typedef enum {
+ PIX_CHROMA_NA = 0xFFFFFFFF,
+ PIX_YUV_420P = 0x1,
+ PIX_YUV_422P = 0x2,
+ PIX_420_UV_INTL = 0x3,
+ PIX_YUV_422IBE = 0x4,
+ PIX_YUV_422ILE = 0x5,
+ PIX_YUV_444P = 0x6,
+ PIX_YUV_411P = 0x7,
+ PIX_GRAY = 0x8,
+ PIX_RGB_565 = 0x9,
+ PIX_RGB_24 = 0xa,
+ PIX_YUV_420SP_UV = 0xb,
+ PIX_YUV_420SP_VU = 0xc,
+ PIX_YUV_422SP_UV = 0xd,
+ PIX_YUV_422SP_VU = 0xe
+} PIX_COLOR_FORMAT_T;
+
+#define CLIP_VAL(n, min, max) (((n) > (max)) ? (max) : (((n) < (min)) ? (min) : (n)))
+#define ALIGN_VAL(val, align) ((((val) + (align) - 1) / (align)) * (align))
+
+static int atomic_inc(volatile int* pcnt) {
+ int ret;
+ ret = *pcnt;
+ ret++;
+ *pcnt = ret;
+ return ret;
+}
+
+static int atomic_dec(volatile int* pcnt) {
+ int ret;
+ ret = *pcnt;
+ ret--;
+ *pcnt = ret;
+ return ret;
+}
+
+/* Function to allocate memory for picture buffer:
+ This function might need to modify according to O/S or CPU platform
+*/
+static void* picbuf_alloc(int size) {
+ return malloc(size);
+}
+
+/* Function to free memory allocated for picture buffer:
+ This function might need to modify according to O/S or CPU platform
+*/
+static void picbuf_free(void* p) {
+ if (p) {
+ free(p);
+ }
+}
+
+static int imgb_addref(oapv_imgb_t* imgb) {
+ return atomic_inc(&imgb->refcnt);
+}
+
+static int imgb_getref(oapv_imgb_t* imgb) {
+ return imgb->refcnt;
+}
+
+static int imgb_release(oapv_imgb_t* imgb) {
+ int refcnt, i;
+ refcnt = atomic_dec(&imgb->refcnt);
+ if (refcnt == 0) {
+ for (i = 0; i < OAPV_MAX_CC; i++) {
+ if (imgb->baddr[i]) picbuf_free(imgb->baddr[i]);
+ }
+ free(imgb);
+ }
+ return refcnt;
+}
+
+static oapv_imgb_t* imgb_create(int w, int h, int cs) {
+ int i, bd;
+ oapv_imgb_t* imgb;
+
+ imgb = (oapv_imgb_t*)malloc(sizeof(oapv_imgb_t));
+ if (imgb == NULL) goto ERR;
+ memset(imgb, 0, sizeof(oapv_imgb_t));
+
+ bd = OAPV_CS_GET_BYTE_DEPTH(cs); /* byte unit */
+
+ imgb->w[0] = w;
+ imgb->h[0] = h;
+ switch (OAPV_CS_GET_FORMAT(cs)) {
+ case OAPV_CF_YCBCR400:
+ imgb->w[1] = imgb->w[2] = w;
+ imgb->h[1] = imgb->h[2] = h;
+ imgb->np = 1;
+ break;
+ case OAPV_CF_YCBCR420:
+ imgb->w[1] = imgb->w[2] = (w + 1) >> 1;
+ imgb->h[1] = imgb->h[2] = (h + 1) >> 1;
+ imgb->np = 3;
+ break;
+ case OAPV_CF_YCBCR422:
+ imgb->w[1] = imgb->w[2] = (w + 1) >> 1;
+ imgb->h[1] = imgb->h[2] = h;
+ imgb->np = 3;
+ break;
+ case OAPV_CF_YCBCR444:
+ imgb->w[1] = imgb->w[2] = w;
+ imgb->h[1] = imgb->h[2] = h;
+ imgb->np = 3;
+ break;
+ case OAPV_CF_YCBCR4444:
+ imgb->w[1] = imgb->w[2] = imgb->w[3] = w;
+ imgb->h[1] = imgb->h[2] = imgb->h[3] = h;
+ imgb->np = 4;
+ break;
+ case OAPV_CF_PLANAR2:
+ imgb->w[1] = w;
+ imgb->h[1] = h;
+ imgb->np = 2;
+ break;
+ default:
+ goto ERR;
+ }
+
+ for (i = 0; i < imgb->np; i++) {
+ // width and height need to be aligned to macroblock size
+ imgb->aw[i] = ALIGN_VAL(imgb->w[i], OAPV_MB_W);
+ imgb->s[i] = imgb->aw[i] * bd;
+ imgb->ah[i] = ALIGN_VAL(imgb->h[i], OAPV_MB_H);
+ imgb->e[i] = imgb->ah[i];
+
+ imgb->bsize[i] = imgb->s[i] * imgb->e[i];
+ imgb->a[i] = imgb->baddr[i] = picbuf_alloc(imgb->bsize[i]);
+ memset(imgb->a[i], 0, imgb->bsize[i]);
+ }
+ imgb->cs = cs;
+ imgb->addref = imgb_addref;
+ imgb->getref = imgb_getref;
+ imgb->release = imgb_release;
+
+ imgb->addref(imgb); /* increase reference count */
+ return imgb;
+
+ERR:
+ if (imgb) {
+ for (int i = 0; i < OAPV_MAX_CC; i++) {
+ if (imgb->a[i]) picbuf_free(imgb->a[i]);
+ }
+ free(imgb);
+ }
+ return NULL;
+}
+
+#endif // ANDROID_C2_SOFT_APV_COMMON_H__
\ No newline at end of file
diff --git a/media/codec2/components/apv/C2SoftApvDec.cpp b/media/codec2/components/apv/C2SoftApvDec.cpp
new file mode 100644
index 0000000..0064cec
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvDec.cpp
@@ -0,0 +1,1247 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftApvDec"
+#include <log/log.h>
+
+#include <android_media_swcodec_flags.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include "C2SoftApvDec.h"
+
+#include <cutils/properties.h>
+
+const char* MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
+
+#define MAX_NUM_FRMS (1) // supports only 1-frame output
+#define FRM_IDX (0) // supports only 1-frame output
+// check generic frame or not
+#define IS_NON_AUX_FRM(frm) \
+ (((frm)->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) || \
+ ((frm)->pbu_type == OAPV_PBU_TYPE_NON_PRIMARY_FRAME))
+// check auxiliary frame or not
+#define IS_AUX_FRM(frm) (!(IS_NON_AUX_FRM(frm)))
+#define OUTPUT_CSP_NATIVE (0)
+#define OUTPUT_CSP_P210 (1)
+
+namespace android {
+namespace {
+constexpr char COMPONENT_NAME[] = "c2.android.apv.decoder";
+constexpr uint32_t kDefaultOutputDelay = 8;
+constexpr uint32_t kMaxOutputDelay = 16;
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+} // namespace
+
+class C2SoftApvDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
+ noPrivateBuffers(); // TODO: account for our buffers here.
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(
+ C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4096),
+ C2F(mSize, height).inRange(2, 4096),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(
+ 0u, C2Config::PROFILE_APV_422_10))
+ .withFields(
+ {C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
+ C2F(mProfileLevel, level)
+ .oneOf({
+ C2Config::LEVEL_APV_1_BAND_0,
+ C2Config::LEVEL_APV_1_1_BAND_0,
+ C2Config::LEVEL_APV_2_BAND_0,
+ C2Config::LEVEL_APV_2_1_BAND_0,
+ C2Config::LEVEL_APV_3_BAND_0,
+ C2Config::LEVEL_APV_3_1_BAND_0,
+ C2Config::LEVEL_APV_4_BAND_0,
+ C2Config::LEVEL_APV_4_1_BAND_0,
+ C2Config::LEVEL_APV_5_BAND_0,
+ C2Config::LEVEL_APV_5_1_BAND_0,
+ C2Config::LEVEL_APV_6_BAND_0,
+ C2Config::LEVEL_APV_6_1_BAND_0,
+ C2Config::LEVEL_APV_7_BAND_0,
+ C2Config::LEVEL_APV_7_1_BAND_0,
+ C2Config::LEVEL_APV_1_BAND_1,
+ C2Config::LEVEL_APV_1_1_BAND_1,
+ C2Config::LEVEL_APV_2_BAND_1,
+ C2Config::LEVEL_APV_2_1_BAND_1,
+ C2Config::LEVEL_APV_3_BAND_1,
+ C2Config::LEVEL_APV_3_1_BAND_1,
+ C2Config::LEVEL_APV_4_BAND_1,
+ C2Config::LEVEL_APV_4_1_BAND_1,
+ C2Config::LEVEL_APV_5_BAND_1,
+ C2Config::LEVEL_APV_5_1_BAND_1,
+ C2Config::LEVEL_APV_6_BAND_1,
+ C2Config::LEVEL_APV_6_1_BAND_1,
+ C2Config::LEVEL_APV_7_BAND_1,
+ C2Config::LEVEL_APV_7_1_BAND_1,
+ C2Config::LEVEL_APV_1_BAND_2,
+ C2Config::LEVEL_APV_1_1_BAND_2,
+ C2Config::LEVEL_APV_2_BAND_2,
+ C2Config::LEVEL_APV_2_1_BAND_2,
+ C2Config::LEVEL_APV_3_BAND_2,
+ C2Config::LEVEL_APV_3_1_BAND_2,
+ C2Config::LEVEL_APV_4_BAND_2,
+ C2Config::LEVEL_APV_4_1_BAND_2,
+ C2Config::LEVEL_APV_5_BAND_2,
+ C2Config::LEVEL_APV_5_1_BAND_2,
+ C2Config::LEVEL_APV_6_BAND_2,
+ C2Config::LEVEL_APV_6_1_BAND_2,
+ C2Config::LEVEL_APV_7_BAND_2,
+ C2Config::LEVEL_APV_7_1_BAND_2,
+ C2Config::LEVEL_APV_1_BAND_3,
+ C2Config::LEVEL_APV_1_1_BAND_3,
+ C2Config::LEVEL_APV_2_BAND_3,
+ C2Config::LEVEL_APV_2_1_BAND_3,
+ C2Config::LEVEL_APV_3_BAND_3,
+ C2Config::LEVEL_APV_3_1_BAND_3,
+ C2Config::LEVEL_APV_4_BAND_3,
+ C2Config::LEVEL_APV_4_1_BAND_3,
+ C2Config::LEVEL_APV_5_BAND_3,
+ C2Config::LEVEL_APV_5_1_BAND_3,
+ C2Config::LEVEL_APV_6_BAND_3,
+ C2Config::LEVEL_APV_6_1_BAND_3,
+ C2Config::LEVEL_APV_7_BAND_3,
+ C2Config::LEVEL_APV_7_1_BAND_3,
+ })})
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+ mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+ addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+ .withDefault(mHdr10PlusInfoInput)
+ .withFields({
+ C2F(mHdr10PlusInfoInput, m.value).any(),
+ })
+ .withSetter(Hdr10PlusInfoInputSetter)
+ .build());
+
+ mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+ addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+ .withDefault(mHdr10PlusInfoOutput)
+ .withFields({
+ C2F(mHdr10PlusInfoOutput, m.value).any(),
+ })
+ .withSetter(Hdr10PlusInfoOutputSetter)
+ .build());
+
+ // default static info
+ C2HdrStaticMetadataStruct defaultStaticInfo{};
+ helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+ addParameter(
+ DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+ .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+ .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+ C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+ C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+ C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+ C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+ .withSetter(HdrStaticInfoSetter)
+ .build());
+
+ addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+ {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+ addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({C2F(mDefaultColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED,
+ C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED,
+ C2Color::MATRIX_OTHER)})
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
+ addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({C2F(mCodedColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED,
+ C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED,
+ C2Color::MATRIX_OTHER)})
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields(
+ {C2F(mColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED,
+ C2Color::MATRIX_OTHER)})
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
+ // TODO: support more formats?
+ std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
+ // If color format surface isn't added to supported formats, there is no way to know
+ // when the color-format is configured to surface. This is necessary to be able to
+ // choose 10-bit format while decoding 10-bit clips in surface mode.
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+ addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withDefault(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+ .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+ C2P<C2StreamPictureSizeInfo::output>& me) {
+ (void)mayBlock;
+ ALOGV("%s - %d x %d", __FUNCTION__, me.v.width, me.v.height);
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+ const C2P<C2StreamPictureSizeInfo::output>& size) {
+ (void)mayBlock;
+ ALOGV("%s - %d x %d", __FUNCTION__, me.v.width, me.v.height);
+ // TODO: get max width/height from the size's field helpers vs.
+ // hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+ const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+ (void)mayBlock;
+ ALOGV("%s", __FUNCTION__);
+ // assume compression ratio of 2, but enforce a floor
+ me.set().value =
+ c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+ kMinInputBufferSize);
+ return C2R::Ok();
+ }
+
+ static C2R DefaultColorAspectsSetter(bool mayBlock,
+ C2P<C2StreamColorAspectsTuning::output>& me) {
+ (void)mayBlock;
+ ALOGV("%s - range: %u, primary: %u, transfer: %u, matrix: %u", __FUNCTION__, me.v.range,
+ me.v.primaries, me.v.transfer, me.v.matrix);
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+ (void)mayBlock;
+ ALOGV("%s - range: %u, primaries: %u, transfer: %u, matrix: %u", __func__, me.v.range,
+ me.v.primaries, me.v.transfer, me.v.matrix);
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+ const C2P<C2StreamColorAspectsTuning::output>& def,
+ const C2P<C2StreamColorAspectsInfo::input>& coded) {
+ (void)mayBlock;
+ ALOGV("%s", __FUNCTION__);
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries =
+ coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+ me.set().transfer =
+ coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ ALOGV("%s - me.v.range = %u, me.v.primaries = %u, me.v.transfer = %u, me.v.matrix = %u",
+ __func__, me.v.range, me.v.primaries, me.v.transfer, me.v.matrix);
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+ const C2P<C2StreamPictureSizeInfo::output>& size) {
+ (void)mayBlock;
+ ALOGV("%s", __FUNCTION__);
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+ ALOGV("%s - mDefaultColorAspects: %u", __FUNCTION__, mDefaultColorAspects->primaries);
+ return mDefaultColorAspects;
+ }
+
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ ALOGV("%s - mColorAspects: %u", __FUNCTION__, mColorAspects->primaries);
+ return mColorAspects;
+ }
+
+ static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+ (void)mayBlock;
+ ALOGV("%s", __FUNCTION__);
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+ (void)mayBlock;
+ ALOGV("%s", __FUNCTION__);
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ // unsafe getters
+ std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+ return mPixelFormat;
+ }
+
+ static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+ (void)mayBlock;
+ ALOGV("%s", __FUNCTION__);
+ if (me.v.mastering.red.x > 1) {
+ me.set().mastering.red.x = 1;
+ }
+ if (me.v.mastering.red.y > 1) {
+ me.set().mastering.red.y = 1;
+ }
+ if (me.v.mastering.green.x > 1) {
+ me.set().mastering.green.x = 1;
+ }
+ if (me.v.mastering.green.y > 1) {
+ me.set().mastering.green.y = 1;
+ }
+ if (me.v.mastering.blue.x > 1) {
+ me.set().mastering.blue.x = 1;
+ }
+ if (me.v.mastering.blue.y > 1) {
+ me.set().mastering.blue.y = 1;
+ }
+ if (me.v.mastering.white.x > 1) {
+ me.set().mastering.white.x = 1;
+ }
+ if (me.v.mastering.white.y > 1) {
+ me.set().mastering.white.y = 1;
+ }
+ if (me.v.mastering.maxLuminance > 65535.0) {
+ me.set().mastering.maxLuminance = 65535.0;
+ }
+ if (me.v.mastering.minLuminance > 6.5535) {
+ me.set().mastering.minLuminance = 6.5535;
+ }
+ if (me.v.maxCll > 65535.0) {
+ me.set().maxCll = 65535.0;
+ }
+ if (me.v.maxFall > 65535.0) {
+ me.set().maxFall = 65535.0;
+ }
+ return C2R::Ok();
+ }
+
+ private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+ std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+ std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+static void ivd_aligned_free(void* ctxt, void* mem) {
+ (void)ctxt;
+ free(mem);
+}
+
+C2SoftApvDec::C2SoftApvDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecHandle(nullptr),
+ mOutBufferFlush(nullptr),
+ mIvColorformat(IV_YUV_420P),
+ mOutputDelay(kDefaultOutputDelay),
+ mHeaderDecoded(false),
+ mOutIndex(0u),
+ mHalPixelFormat(HAL_PIXEL_FORMAT_YV12),
+ mWidth(320),
+ mHeight(240),
+ mSignalledOutputEos(false),
+ mSignalledError(false) {
+ oapvdHandle = NULL;
+ oapvmHandle = NULL;
+ outputCsp = OUTPUT_CSP_NATIVE;
+}
+
+C2SoftApvDec::~C2SoftApvDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftApvDec::onInit() {
+ ALOGV("%s", __FUNCTION__);
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftApvDec::onStop() {
+ ALOGV("%s", __FUNCTION__);
+ if (OK != resetDecoder()) return C2_CORRUPTED;
+ resetPlugin();
+ return C2_OK;
+}
+
+void C2SoftApvDec::onReset() {
+ ALOGV("%s", __FUNCTION__);
+ (void)onStop();
+}
+
+status_t C2SoftApvDec::deleteDecoder() {
+ ALOGV("%s", __FUNCTION__);
+ if (oapvdHandle) {
+ oapvd_delete(oapvdHandle);
+ oapvdHandle = NULL;
+ }
+ if (oapvmHandle) {
+ oapvm_delete(oapvmHandle);
+ oapvmHandle = NULL;
+ }
+ for (int i = 0; i < ofrms.num_frms; i++) {
+ if (ofrms.frm[i].imgb != NULL) {
+ ofrms.frm[i].imgb->release(ofrms.frm[i].imgb);
+ ofrms.frm[i].imgb = NULL;
+ }
+ }
+ return OK;
+}
+
+void C2SoftApvDec::onRelease() {
+ ALOGV("%s", __FUNCTION__);
+ (void)deleteDecoder();
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+}
+
+c2_status_t C2SoftApvDec::onFlush_sm() {
+ ALOGV("%s", __FUNCTION__);
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ return C2_OK;
+}
+
+status_t C2SoftApvDec::createDecoder() {
+ ALOGV("%s", __FUNCTION__);
+ return OK;
+}
+
+status_t C2SoftApvDec::initDecoder() {
+ int ret;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mPixelFormatInfo = mIntf->getPixelFormat_l();
+ ALOGW("Hal pixel format = %d", mPixelFormatInfo->value);
+ }
+ memset(&cdesc, 0, sizeof(oapvd_cdesc_t));
+
+ cdesc.threads = 1; // default
+ oapvdHandle = oapvd_create(&cdesc, &ret);
+ if (oapvdHandle == NULL) {
+ ALOGE("ERROR: cannot create APV decoder (err=%d)\n", ret);
+ return C2_NO_INIT;
+ }
+
+ memset(&ofrms, 0, sizeof(oapv_frms_t));
+
+ oapvmHandle = oapvm_create(&ret);
+ if (OAPV_FAILED(ret)) {
+ ALOGE("oapvm create failed");
+ oapvd_delete(oapvdHandle);
+ oapvdHandle = NULL;
+ return C2_NO_INIT;
+ }
+
+ ALOGV("oapvd init done");
+ return OK;
+}
+
+status_t C2SoftApvDec::setFlushMode() {
+ ALOGV("%s", __FUNCTION__);
+ return OK;
+}
+
+status_t C2SoftApvDec::resetDecoder() {
+ ALOGV("%s", __FUNCTION__);
+ return OK;
+}
+
+void C2SoftApvDec::resetPlugin() {
+ ALOGV("%s", __FUNCTION__);
+ mSignalledOutputEos = false;
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftApvDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2GraphicBlock>& block) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
+
+ class FillWork {
+ public:
+ FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+ const std::shared_ptr<C2Buffer>& buffer)
+ : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+ ~FillWork() = default;
+
+ void operator()(const std::unique_ptr<C2Work>& work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = mOrdinal;
+ work->workletsProcessed = 1u;
+ work->result = C2_OK;
+ if (mBuffer) {
+ work->worklets.front()->output.buffers.push_back(mBuffer);
+ }
+ ALOGV("timestamp = %lld, index = %lld, w/%s buffer", mOrdinal.timestamp.peekll(),
+ mOrdinal.frameIndex.peekll(), mBuffer ? "" : "o");
+ }
+
+ private:
+ const uint32_t mFlags;
+ const C2WorkOrdinalStruct mOrdinal;
+ const std::shared_ptr<C2Buffer> mBuffer;
+ };
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ // TODO: Check if cloneAndSend can be avoided by tracking number of frames remaining
+ if (eos) {
+ if (buffer) {
+ mOutIndex = index;
+ C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+ cloneAndSend(mOutIndex, work,
+ FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+ buffer.reset();
+ }
+ } else {
+ fillWork(work);
+ }
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+static void copyBufferFromYUV420ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+ const uint8_t* srcY, const uint8_t* srcU,
+ const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstYStride, size_t dstUStride,
+ size_t dstVStride, uint32_t width, uint32_t height) {
+ for (size_t i = 0; i < height; ++i) {
+ memcpy(dstY, srcY, width);
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dstU, srcU, width / 2);
+ memcpy(dstV, srcV, width / 2);
+ dstU += dstUStride;
+ srcU += srcUStride;
+ dstV += dstVStride;
+ srcV += srcVStride;
+ }
+}
+
+static void copyBufferFromYUV422ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+ const uint8_t* srcY, const uint8_t* srcU,
+ const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstYStride, size_t dstUStride,
+ size_t dstVStride, uint32_t width, uint32_t height) {
+ for (size_t i = 0; i < height; ++i) {
+ memcpy(dstY, srcY, width);
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dstU, srcU, width / 2);
+ memcpy(dstV, srcV, width / 2);
+ dstU += dstUStride;
+ srcU += srcUStride * 2;
+ dstV += dstVStride;
+ srcV += srcVStride * 2;
+ }
+}
+
+static void copyBufferFromYUV42010bitToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+ const uint16_t* srcU, const uint16_t* srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride, size_t width,
+ size_t height) {
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ dstY[x] = srcY[x] << 6;
+ }
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ for (size_t y = 0; y < height / 2; ++y) {
+ for (size_t x = 0; x < width / 2; ++x) {
+ dstUV[2 * x] = srcU[x] << 6;
+ dstUV[2 * x + 1] = srcV[x] << 6;
+ }
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dstUV += dstUVStride;
+ }
+}
+
+static void copyBufferFromYUV42210bitToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+ const uint16_t* srcU, const uint16_t* srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride, size_t width,
+ size_t height) {
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ dstY[x] = srcY[x] << 6;
+ }
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ for (size_t y = 0; y < height / 2; ++y) {
+ for (size_t x = 0; x < width / 2; ++x) {
+ dstUV[2 * x] = srcU[x] << 6;
+ dstUV[2 * x + 1] = srcV[x] << 6;
+ }
+ srcU += srcUStride * 2;
+ srcV += srcVStride * 2;
+ dstUV += dstUVStride;
+ }
+}
+
+static void copyBufferFromP210ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+ const uint16_t* srcUV, size_t srcYStride, size_t srcUVStride,
+ size_t dstYStride, size_t dstUVStride, size_t width,
+ size_t height) {
+ for (size_t y = 0; y < height; ++y) {
+ memcpy(dstY, srcY, width * sizeof(uint16_t));
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ for (size_t y = 0; y < height / 2; ++y) {
+ memcpy(dstUV, srcUV, width * 2);
+ srcUV += srcUVStride * 2;
+ dstUV += dstUVStride;
+ }
+}
+
+static void copyBufferFromYUV42010bitToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+ const uint16_t* srcY, const uint16_t* srcU,
+ const uint16_t* srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUStride, size_t dstVStride, uint32_t width,
+ uint32_t height) {
+ for (size_t i = 0; i < height; ++i) {
+ for (size_t j = 0; j < width; ++j) {
+ dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 2) & 0xFF;
+ }
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ for (size_t j = 0; j < width / 2; ++j) {
+ dstU[i * dstUStride + j] = (srcU[i * srcUStride + j] >> 2) & 0xFF;
+ }
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ for (size_t j = 0; j < width / 2; ++j) {
+ dstV[i * dstVStride + j] = (srcV[i * srcVStride + j] >> 2) & 0xFF;
+ }
+ }
+}
+
+static void copyBufferFromYUV42210bitToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+ const uint16_t* srcY, const uint16_t* srcU,
+ const uint16_t* srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUStride, size_t dstVStride, uint32_t width,
+ uint32_t height) {
+ for (size_t i = 0; i < height; ++i) {
+ for (size_t j = 0; j < width; ++j) {
+ dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 2) & 0xFF;
+ }
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ for (size_t j = 0; j < width / 2; ++j) {
+ dstU[i * dstUStride + j] = (srcU[i * srcUStride * 2 + j] >> 2) & 0xFF;
+ }
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ for (size_t j = 0; j < width / 2; ++j) {
+ dstV[i * dstVStride + j] = (srcV[i * srcVStride * 2 + j] >> 2) & 0xFF;
+ }
+ }
+}
+
+static void copyBufferFromP210ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+ const uint16_t* srcY, const uint16_t* srcUV, size_t srcYStride,
+ size_t srcUVStride, size_t dstYStride, size_t dstUStride,
+ size_t dstVStride, size_t width, size_t height) {
+ for (size_t i = 0; i < height; ++i) {
+ for (size_t j = 0; j < width; ++j) {
+ dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 8) & 0xFF;
+ }
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ for (size_t j = 0; j < width / 2; ++j) {
+ dstV[i * dstVStride + j] = (srcUV[i * srcUVStride * 2 + j * 2] >> 8) & 0xFF;
+ dstU[i * dstUStride + j] = (srcUV[i * srcUVStride * 2 + j * 2 + 1] >> 8) & 0xFF;
+ }
+ }
+}
+
+void C2SoftApvDec::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ int ret = 0;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+ ALOGV("in buffer attr. size %zu timestamp %llu frameindex %d, flags %x", inSize,
+ work->input.ordinal.timestamp.peekull(), (int)work->input.ordinal.frameIndex.peeku(),
+ work->input.flags);
+
+ if (codecConfig) {
+ fillEmptyWork(work);
+ return;
+ }
+
+ if (inSize > 0) {
+ uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+ oapv_au_info_t aui;
+ oapv_bitb_t bitb;
+ bitb.addr = bitstream + 4; // skip au
+ bitb.ssize = inSize - 4;
+
+ if (OAPV_FAILED(oapvd_info(bitb.addr, bitb.ssize, &aui))) {
+ ALOGE("cannot get information from bitstream");
+ return;
+ }
+
+ /* create decoding frame buffers */
+ ofrms.num_frms = aui.num_frms;
+ if (ofrms.num_frms <= 0) {
+ ALOGE("Parse error - no output frame(%d)", ofrms.num_frms);
+ fillEmptyWork(work);
+ return;
+ }
+ for (int i = 0; i < ofrms.num_frms; i++) {
+ oapv_frm_info_t* finfo = &aui.frm_info[FRM_IDX];
+ oapv_frm_t* frm = &ofrms.frm[i];
+
+ if (mWidth != finfo->w || mHeight != finfo->w) {
+ mWidth = finfo->w;
+ mHeight = finfo->h;
+ }
+
+ if (frm->imgb != NULL && (frm->imgb->w[0] != finfo->w || frm->imgb->h[0] != finfo->h)) {
+ frm->imgb->release(frm->imgb);
+ frm->imgb = NULL;
+ }
+
+ if (frm->imgb == NULL) {
+ if (outputCsp == OUTPUT_CSP_P210) {
+ frm->imgb = imgb_create(finfo->w, finfo->h, OAPV_CS_P210);
+ } else {
+ frm->imgb = imgb_create(finfo->w, finfo->h, finfo->cs);
+ }
+ if (frm->imgb == NULL) {
+ ALOGE("cannot allocate image buffer (w:%d, h:%d, cs:%d)", finfo->w, finfo->h,
+ finfo->cs);
+ fillEmptyWork(work);
+ return;
+ }
+ }
+ }
+
+ oapvd_stat_t stat;
+ ret = oapvd_decode(oapvdHandle, &bitb, &ofrms, oapvmHandle, &stat);
+ if (bitb.ssize != stat.read) {
+ ALOGW("decode done, input size: %d, processed size: %d", bitb.ssize, stat.read);
+ }
+
+ if (OAPV_FAILED(ret)) {
+ ALOGE("failed to decode bitstream\n");
+ fillEmptyWork(work);
+ return;
+ }
+
+ status_t err = outputBuffer(pool, work);
+ if (err == NOT_ENOUGH_DATA) {
+ if (inSize > 0) {
+ ALOGV("Maybe non-display frame at %lld.", work->input.ordinal.frameIndex.peekll());
+ // send the work back with empty buffer.
+ inSize = 0;
+ }
+ } else if (err != OK) {
+ ALOGD("Error while getting the output frame out");
+ // work->result would be already filled; do fillEmptyWork() below to
+ // send the work back.
+ inSize = 0;
+ }
+ }
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!inSize) {
+ fillEmptyWork(work);
+ }
+}
+
+status_t C2SoftApvDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work) {
+ if (!(work && pool)) return BAD_VALUE;
+
+ oapv_imgb_t* imgbOutput;
+ std::shared_ptr<C2GraphicBlock> block;
+
+ if (ofrms.num_frms > 0) {
+ oapv_frm_t* frm = &ofrms.frm[0];
+ imgbOutput = frm->imgb;
+ } else {
+ ALOGW("No output frames");
+ return false;
+ }
+ bool isMonochrome = OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CS_YCBCR400;
+
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+ if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10 &&
+ mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+ IntfImpl::Lock lock = mIntf->lock();
+ codedColorAspects = mIntf->getColorAspects_l();
+ bool allowRGBA1010102 = false;
+ if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+ codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+ codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ allowRGBA1010102 = true;
+ }
+ format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+ }
+
+ if (mHalPixelFormat != format) {
+ C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+ } else {
+ ALOGE("Config update pixelFormat failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return UNKNOWN_ERROR;
+ }
+ mHalPixelFormat = format;
+ }
+ ALOGV("mHalPixelFormat: %u, format: %d", mHalPixelFormat, format);
+
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+ // check. align height to 2 times does not work.
+ c2_status_t err =
+ pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 16), format, usage, &block);
+
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ work->result = err;
+ return false;
+ }
+
+ C2GraphicView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+
+ ALOGV("provided (%dx%d) required (%dx%d)", block->width(), block->height(), mWidth, mHeight);
+
+ uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+ uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+ const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+ const uint16_t* srcU = (const uint16_t*)imgbOutput->a[1];
+ const uint16_t* srcV = (const uint16_t*)imgbOutput->a[2];
+ size_t srcYStride = imgbOutput->s[0] / 2;
+ size_t srcUStride = imgbOutput->s[1] / 2;
+ size_t srcVStride = imgbOutput->s[2] / 2;
+ dstYStride /= 2;
+ dstUStride /= 2;
+ dstVStride /= 2;
+ if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+ ALOGV("OAPV_CS_YUV420 10bit to P010");
+ copyBufferFromYUV42010bitToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ dstUStride, mWidth, mHeight);
+ } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+ ALOGV("OAPV_CS_YUV422 10bit to P010");
+ copyBufferFromYUV42210bitToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ dstUStride, mWidth, mHeight);
+ } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+ ALOGV("OAPV_CS_P210 to P010");
+ copyBufferFromP210ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcYStride,
+ srcUStride, dstYStride, dstUStride, mWidth, mHeight);
+ } else {
+ ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+ }
+ } else {
+ ALOGE("Not supported convder from bd:%d, format: %d(%s), to format: %d(%s)",
+ OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+ OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+ ? "YUV420"
+ : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
+ : "UNKNOWN"),
+ format,
+ format == HAL_PIXEL_FORMAT_YCBCR_P010
+ ? "P010"
+ : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+ ? "YUV420"
+ : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+ }
+ } else { // HAL_PIXEL_FORMAT_YV12
+ if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+ const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+ const uint16_t* srcV = (const uint16_t*)imgbOutput->a[1];
+ const uint16_t* srcU = (const uint16_t*)imgbOutput->a[2];
+ size_t srcYStride = imgbOutput->s[0] / 2;
+ size_t srcVStride = imgbOutput->s[1] / 2;
+ size_t srcUStride = imgbOutput->s[2] / 2;
+ if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+ ALOGV("OAPV_CS_YUV420 10bit to YV12");
+ copyBufferFromYUV42010bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ dstVStride, mWidth, mHeight);
+ } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+ ALOGV("OAPV_CS_YUV422 10bit to YV12");
+ copyBufferFromYUV42210bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ dstVStride, mWidth, mHeight);
+ } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+ ALOGV("OAPV_CS_P210 to YV12");
+ copyBufferFromP210ToYV12(dstY, dstU, dstV, srcY, srcV, srcYStride, srcVStride,
+ dstYStride, dstUStride, dstVStride, mWidth, mHeight);
+ } else {
+ ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+ }
+ } else if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 8) {
+ const uint8_t* srcY = (const uint8_t*)imgbOutput->a[0];
+ const uint8_t* srcV = (const uint8_t*)imgbOutput->a[1];
+ const uint8_t* srcU = (const uint8_t*)imgbOutput->a[2];
+ size_t srcYStride = imgbOutput->s[0];
+ size_t srcVStride = imgbOutput->s[1];
+ size_t srcUStride = imgbOutput->s[2];
+ if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+ ALOGV("OAPV_CS_YUV420 to YV12");
+ copyBufferFromYUV420ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ dstVStride, mWidth, mHeight);
+ } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+ ALOGV("OAPV_CS_YUV422 to YV12");
+ copyBufferFromYUV422ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ dstVStride, mWidth, mHeight);
+ } else {
+ ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+ }
+ } else {
+ ALOGE("Not supported convert from bd:%d, format: %d(%s), to format: %d(%s)",
+ OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+ OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+ ? "YUV420"
+ : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
+ : "UNKNOWN"),
+ format,
+ format == HAL_PIXEL_FORMAT_YCBCR_P010
+ ? "P010"
+ : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+ ? "YUV420"
+ : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+ }
+ }
+
+ finishWork(work->input.ordinal.frameIndex.peekll(), work, std::move(block));
+ return OK;
+}
+
+c2_status_t C2SoftApvDec::drainInternal(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+ fillEmptyWork(work);
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftApvDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftApvDecFactory : public C2ComponentFactory {
+ public:
+ C2SoftApvDecFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftApvDec(COMPONENT_NAME, id,
+ std::make_shared<C2SoftApvDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftApvDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftApvDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftApvDecFactory() override = default;
+
+ private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ if (!android::media::swcodec::flags::apv_software_codec()) {
+ ALOGV("APV SW Codec is not enabled");
+ return nullptr;
+ }
+ return new ::android::C2SoftApvDecFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+ ::C2ComponentFactory* factory) {
+ delete factory;
+}
diff --git a/media/codec2/components/apv/C2SoftApvDec.h b/media/codec2/components/apv/C2SoftApvDec.h
new file mode 100644
index 0000000..f5beb8f
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvDec.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_DEC_H_
+#define ANDROID_C2_SOFT_APV_DEC_H_
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <SimpleC2Component.h>
+#include <inttypes.h>
+#include <atomic>
+
+#include "oapv.h"
+#include <C2SoftApvCommon.h>
+
+typedef unsigned int UWORD32;
+
+typedef enum {
+ IV_CHROMA_NA = 0xFFFFFFFF,
+ IV_YUV_420P = 0x1,
+ IV_YUV_422P = 0x2,
+ IV_420_UV_INTL = 0x3,
+ IV_YUV_422IBE = 0x4,
+ IV_YUV_422ILE = 0x5,
+ IV_YUV_444P = 0x6,
+ IV_YUV_411P = 0x7,
+ IV_GRAY = 0x8,
+ IV_RGB_565 = 0x9,
+ IV_RGB_24 = 0xa,
+ IV_YUV_420SP_UV = 0xb,
+ IV_YUV_420SP_VU = 0xc,
+ IV_YUV_422SP_UV = 0xd,
+ IV_YUV_422SP_VU = 0xe
+
+} IV_COLOR_FORMAT_T;
+
+typedef struct {
+ /**
+ * u4_size of the structure
+ */
+ UWORD32 u4_size;
+
+ /**
+ * Pointer to the API function pointer table of the codec
+ */
+ void* pv_fxns;
+
+ /**
+ * Pointer to the handle of the codec
+ */
+ void* pv_codec_handle;
+} iv_obj_t;
+
+namespace android {
+
+struct C2SoftApvDec final : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftApvDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftApvDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+ c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+ private:
+ status_t createDecoder();
+ status_t initDecoder();
+ bool isConfigured() const;
+ void drainDecoder();
+ status_t setFlushMode();
+ status_t resetDecoder();
+ void resetPlugin();
+ status_t deleteDecoder();
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2GraphicBlock>& block);
+ void drainRingBuffer(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool, bool eos);
+ c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
+
+ status_t outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
+
+ std::shared_ptr<IntfImpl> mIntf;
+ iv_obj_t* mDecHandle;
+ uint8_t* mOutBufferFlush;
+ IV_COLOR_FORMAT_T mIvColorformat;
+ uint32_t mOutputDelay;
+ bool mHeaderDecoded;
+ std::atomic_uint64_t mOutIndex;
+ std::shared_ptr<C2GraphicBlock> mOutBlock;
+
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ uint32_t mHalPixelFormat;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+
+ oapvd_t oapvdHandle;
+ oapvm_t oapvmHandle;
+ oapvd_cdesc_t cdesc;
+ oapv_frms_t ofrms;
+
+ int outputCsp;
+
+ C2_DO_NOT_COPY(C2SoftApvDec);
+};
+
+} // namespace android
+
+#endif
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
new file mode 100644
index 0000000..a61cfd6
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -0,0 +1,1285 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftApvEnc"
+#include <log/log.h>
+
+#include <android_media_swcodec_flags.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <media/stagefright/foundation/ABitReader.h>
+#include <util/C2InterfaceHelper.h>
+#include <cmath>
+#include "C2SoftApvEnc.h"
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.apv.encoder";
+constexpr uint32_t kMinOutBufferSize = 524288;
+constexpr uint32_t kMaxBitstreamBufSize = 16 * 1024 * 1024;
+constexpr int32_t kApvQpMin = 0;
+constexpr int32_t kApvQpMax = 51;
+constexpr int32_t kApvDefaultQP = 32;
+
+#define PROFILE_APV_DEFAULT 0
+#define LEVEL_APV_DEFAULT 0
+#define MAX_NUM_FRMS (1) // supports only 1-frame input
+
+} // namespace
+
+class C2SoftApvEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
+ C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
+ noPrivateBuffers();
+ noInputReferences();
+ noOutputReferences();
+ noTimeStretch();
+ setDerivedInstance(this);
+
+ addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(
+ C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ // matches size limits in codec library
+ addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ // matches limits in codec library
+ addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(
+ 0u, C2Config::BITRATE_VARIABLE))
+ .withFields({C2F(mBitrateMode, value)
+ .oneOf({C2Config::BITRATE_CONST,
+ C2Config::BITRATE_VARIABLE,
+ C2Config::BITRATE_IGNORE})})
+ .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 512000))
+ .withFields({C2F(mBitrate, value).inRange(512000, 240000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 15.))
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(DefineParam(mQuality, C2_PARAMKEY_QUALITY)
+ .withDefault(new C2StreamQualityTuning::output(0u, 40))
+ .withFields({C2F(mQuality, value).inRange(0, 100)})
+ .withSetter(Setter<decltype(*mQuality)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, C2Config::PROFILE_APV_422_10, LEVEL_APV_1_BAND_0))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
+ C2F(mProfileLevel, level)
+ .oneOf({
+ C2Config::LEVEL_APV_1_BAND_0,
+ C2Config::LEVEL_APV_1_1_BAND_0,
+ C2Config::LEVEL_APV_2_BAND_0,
+ C2Config::LEVEL_APV_2_1_BAND_0,
+ C2Config::LEVEL_APV_3_BAND_0,
+ C2Config::LEVEL_APV_3_1_BAND_0,
+ C2Config::LEVEL_APV_4_BAND_0,
+ C2Config::LEVEL_APV_4_1_BAND_0,
+ C2Config::LEVEL_APV_5_BAND_0,
+ C2Config::LEVEL_APV_5_1_BAND_0,
+ C2Config::LEVEL_APV_6_BAND_0,
+ C2Config::LEVEL_APV_6_1_BAND_0,
+ C2Config::LEVEL_APV_7_BAND_0,
+ C2Config::LEVEL_APV_7_1_BAND_0,
+ C2Config::LEVEL_APV_1_BAND_1,
+ C2Config::LEVEL_APV_1_1_BAND_1,
+ C2Config::LEVEL_APV_2_BAND_1,
+ C2Config::LEVEL_APV_2_1_BAND_1,
+ C2Config::LEVEL_APV_3_BAND_1,
+ C2Config::LEVEL_APV_3_1_BAND_1,
+ C2Config::LEVEL_APV_4_BAND_1,
+ C2Config::LEVEL_APV_4_1_BAND_1,
+ C2Config::LEVEL_APV_5_BAND_1,
+ C2Config::LEVEL_APV_5_1_BAND_1,
+ C2Config::LEVEL_APV_6_BAND_1,
+ C2Config::LEVEL_APV_6_1_BAND_1,
+ C2Config::LEVEL_APV_7_BAND_1,
+ C2Config::LEVEL_APV_7_1_BAND_1,
+ C2Config::LEVEL_APV_1_BAND_2,
+ C2Config::LEVEL_APV_1_1_BAND_2,
+ C2Config::LEVEL_APV_2_BAND_2,
+ C2Config::LEVEL_APV_2_1_BAND_2,
+ C2Config::LEVEL_APV_3_BAND_2,
+ C2Config::LEVEL_APV_3_1_BAND_2,
+ C2Config::LEVEL_APV_4_BAND_2,
+ C2Config::LEVEL_APV_4_1_BAND_2,
+ C2Config::LEVEL_APV_5_BAND_2,
+ C2Config::LEVEL_APV_5_1_BAND_2,
+ C2Config::LEVEL_APV_6_BAND_2,
+ C2Config::LEVEL_APV_6_1_BAND_2,
+ C2Config::LEVEL_APV_7_BAND_2,
+ C2Config::LEVEL_APV_7_1_BAND_2,
+ C2Config::LEVEL_APV_1_BAND_3,
+ C2Config::LEVEL_APV_1_1_BAND_3,
+ C2Config::LEVEL_APV_2_BAND_3,
+ C2Config::LEVEL_APV_2_1_BAND_3,
+ C2Config::LEVEL_APV_3_BAND_3,
+ C2Config::LEVEL_APV_3_1_BAND_3,
+ C2Config::LEVEL_APV_4_BAND_3,
+ C2Config::LEVEL_APV_4_1_BAND_3,
+ C2Config::LEVEL_APV_5_BAND_3,
+ C2Config::LEVEL_APV_5_1_BAND_3,
+ C2Config::LEVEL_APV_6_BAND_3,
+ C2Config::LEVEL_APV_6_1_BAND_3,
+ C2Config::LEVEL_APV_7_BAND_3,
+ C2Config::LEVEL_APV_7_1_BAND_3,
+ }),
+ })
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+ .build());
+
+ addParameter(DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({C2F(mColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED,
+ C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED,
+ C2Color::MATRIX_OTHER)})
+ .withSetter(ColorAspectsSetter)
+ .build());
+
+ addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({C2F(mCodedColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED,
+ C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED,
+ C2Color::MATRIX_OTHER)})
+ .withSetter(CodedColorAspectsSetter, mColorAspects)
+ .build());
+ std::vector<uint32_t> pixelFormats = {
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ };
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
+ addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withDefault(new C2StreamPixelFormatInfo::input(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_P010))
+ .withFields({C2F(mPixelFormat, value).oneOf({pixelFormats})})
+ .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+ .build());
+ }
+
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value < 1000000) {
+ me.set().value = 1000000;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+ C2P<C2StreamPictureSizeInfo::input>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+ const C2P<C2StreamPictureSizeInfo::input>& size,
+ const C2P<C2StreamFrameRateInfo::output>& frameRate,
+ const C2P<C2StreamBitrateInfo::output>& bitrate) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = C2Config::PROFILE_APV_422_10;
+ }
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ me.set().level = LEVEL_APV_1_BAND_0;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+ const C2P<C2StreamColorAspectsInfo::input>& coded) {
+ (void)mayBlock;
+ me.set().range = coded.v.range;
+ me.set().primaries = coded.v.primaries;
+ me.set().transfer = coded.v.transfer;
+ me.set().matrix = coded.v.matrix;
+ return C2R::Ok();
+ }
+
+ uint32_t getProfile_l() const {
+ int32_t profile = PROFILE_UNUSED;
+
+ switch (mProfileLevel->profile) {
+ case C2Config::PROFILE_APV_422_10:
+ profile = 33;
+ break;
+ case C2Config::PROFILE_APV_422_12:
+ profile = 44;
+ break;
+ case C2Config::PROFILE_APV_444_10:
+ profile = 55;
+ break;
+ case C2Config::PROFILE_APV_444_12:
+ profile = 66;
+ break;
+ case C2Config::PROFILE_APV_4444_10:
+ profile = 77;
+ break;
+ case C2Config::PROFILE_APV_4444_12:
+ profile = 88;
+ break;
+ case C2Config::PROFILE_APV_400_10:
+ profile = 99;
+ break;
+ default:
+ ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+ }
+ return profile;
+ }
+
+ uint32_t getLevel_l() const {
+ int32_t level = LEVEL_UNUSED;
+
+ // TODO: Add Band settings
+ switch (mProfileLevel->level) {
+ case C2Config::LEVEL_APV_1_BAND_0:
+ level = 10;
+ break;
+ case C2Config::LEVEL_APV_1_1_BAND_0:
+ level = 11;
+ break;
+ case C2Config::LEVEL_APV_2_BAND_0:
+ level = 20;
+ break;
+ case C2Config::LEVEL_APV_2_1_BAND_0:
+ level = 21;
+ break;
+ case C2Config::LEVEL_APV_3_BAND_0:
+ level = 30;
+ break;
+ case C2Config::LEVEL_APV_3_1_BAND_0:
+ level = 31;
+ break;
+ case C2Config::LEVEL_APV_4_BAND_0:
+ level = 40;
+ break;
+ case C2Config::LEVEL_APV_4_1_BAND_0:
+ level = 41;
+ break;
+ case C2Config::LEVEL_APV_5_BAND_0:
+ level = 50;
+ break;
+ case C2Config::LEVEL_APV_5_1_BAND_0:
+ level = 51;
+ break;
+ case C2Config::LEVEL_APV_6_BAND_0:
+ level = 60;
+ break;
+ case C2Config::LEVEL_APV_6_1_BAND_0:
+ level = 61;
+ break;
+ case C2Config::LEVEL_APV_7_BAND_0:
+ level = 70;
+ break;
+ case C2Config::LEVEL_APV_7_1_BAND_0:
+ level = 71;
+ break;
+ default:
+ ALOGD("Unrecognized level: %x", mProfileLevel->level);
+ }
+ // Convert to APV level_idc according to APV spec
+ return level * 3;
+ }
+
+ int32_t getBitrateMode_l() const {
+ int32_t bitrateMode = C2Config::BITRATE_CONST;
+
+ switch (mBitrateMode->value) {
+ case C2Config::BITRATE_CONST:
+ bitrateMode = OAPV_RC_CQP;
+ break;
+ case C2Config::BITRATE_VARIABLE:
+ bitrateMode = OAPV_RC_ABR;
+ break;
+ case C2Config::BITRATE_IGNORE:
+ bitrateMode = 0;
+ break;
+ default:
+ ALOGE("Unrecognized bitrate mode: %x", mBitrateMode->value);
+ }
+ return bitrateMode;
+ }
+
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+ std::shared_ptr<C2StreamQualityTuning::output> getQuality_l() const { return mQuality; }
+ std::shared_ptr<C2StreamColorAspectsInfo::input> getColorAspects_l() const {
+ return mColorAspects;
+ }
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+ return mCodedColorAspects;
+ }
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+ return mPictureQuantization;
+ }
+ std::shared_ptr<C2StreamProfileLevelInfo::output> getProfileLevel_l() const {
+ return mProfileLevel;
+ }
+ std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
+ return mPixelFormat;
+ }
+
+ private:
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+ std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+ std::shared_ptr<C2StreamColorInfo::input> mColorFormat;
+ std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+};
+
+C2SoftApvEnc::C2SoftApvEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mColorFormat(OAPV_CF_PLANAR2),
+ mStarted(false),
+ mSignalledEos(false),
+ mSignalledError(false),
+ mOutBlock(nullptr) {
+ reset();
+}
+
+C2SoftApvEnc::~C2SoftApvEnc() {
+ onRelease();
+}
+
+c2_status_t C2SoftApvEnc::onInit() {
+ return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::onStop() {
+ return C2_OK;
+}
+
+void C2SoftApvEnc::onReset() {
+ releaseEncoder();
+ reset();
+}
+
+void C2SoftApvEnc::onRelease() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftApvEnc::onFlush_sm() {
+ return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("Signalling EOS");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+int32_t C2SoftApvEnc::getQpFromQuality(int32_t quality) {
+ int32_t qp = ((kApvQpMin - kApvQpMax) * quality / 100) + kApvQpMax;
+ qp = std::min(qp, (int)kApvQpMax);
+ qp = std::max(qp, (int)kApvQpMin);
+ return qp;
+}
+
+c2_status_t C2SoftApvEnc::reset() {
+ ALOGV("reset");
+ mInitEncoder = false;
+ mStarted = false;
+ mSignalledEos = false;
+ mSignalledError = false;
+ mBitDepth = 10;
+ mMaxFrames = MAX_NUM_FRMS;
+ mReceivedFrames = 0;
+ mReceivedFirstFrame = false;
+ mColorFormat = OAPV_CF_PLANAR2;
+ memset(&mInputFrames, 0, sizeof(mInputFrames));
+ memset(&mReconFrames, 0, sizeof(mReconFrames));
+ return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::releaseEncoder() {
+ for (int32_t i = 0; i < MAX_NUM_FRMS; i++) {
+ if (mInputFrames.frm[i].imgb != nullptr) {
+ imgb_release(mInputFrames.frm[i].imgb);
+ mInputFrames.frm[i].imgb = nullptr;
+ }
+ }
+
+ if (mBitstreamBuf) {
+ std::free(mBitstreamBuf);
+ mBitstreamBuf = nullptr;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+void C2SoftApvEnc::showEncoderParams(oapve_cdesc_t* cdsc) {
+ std::string title = "APV encoder params:";
+ ALOGD("%s width = %d, height = %d", title.c_str(), cdsc->param[0].w, cdsc->param[0].h);
+ ALOGD("%s FrameRate = %f", title.c_str(),
+ (double)cdsc->param[0].fps_num / cdsc->param[0].fps_den);
+ ALOGD("%s BitRate = %d Kbps", title.c_str(), cdsc->param[0].bitrate);
+ ALOGD("%s QP = %d", title.c_str(), cdsc->param[0].qp);
+ ALOGD("%s profile_idc = %d, level_idc = %d, band_idc = %d", title.c_str(),
+ cdsc->param[0].profile_idc, cdsc->param[0].level_idc / 3, cdsc->param[0].band_idc);
+ ALOGD("%s Bitrate Mode: %d", title.c_str(), cdsc->param[0].rc_type);
+ ALOGD("%s mColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
+ mColorAspects->primaries, mColorAspects->transfer, mColorAspects->matrix,
+ mColorAspects->range);
+ ALOGD("%s mCodedColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
+ mCodedColorAspects->primaries, mCodedColorAspects->transfer, mCodedColorAspects->matrix,
+ mCodedColorAspects->range);
+ ALOGD("%s Input color format: %s", title.c_str(),
+ mColorFormat == OAPV_CF_YCBCR422 ? "YUV422P10LE" : "P210");
+ ALOGD("%s max_num_frms: %d", title.c_str(), cdsc->max_num_frms);
+}
+
+c2_status_t C2SoftApvEnc::initEncoder() {
+ if (mInitEncoder) {
+ return C2_OK;
+ }
+ ALOGV("initEncoder");
+
+ mSize = mIntf->getSize_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mBitrate = mIntf->getBitrate_l();
+ mQuality = mIntf->getQuality_l();
+ mColorAspects = mIntf->getColorAspects_l();
+ mCodedColorAspects = mIntf->getCodedColorAspects_l();
+ mProfileLevel = mIntf->getProfileLevel_l();
+ mPixelFormat = mIntf->getPixelFormat_l();
+
+ mCodecDesc = std::make_unique<oapve_cdesc_t>();
+ if (mCodecDesc == nullptr) {
+ ALOGE("Allocate ctx failed");
+ return C2_NO_INIT;
+ }
+ mCodecDesc->max_bs_buf_size = kMaxBitstreamBufSize;
+ mCodecDesc->max_num_frms = MAX_NUM_FRMS;
+ // TODO: Bound parameters to CPU count
+ mCodecDesc->threads = 4;
+
+ int32_t ret = C2_OK;
+ /* set params */
+ for (int32_t i = 0; i < mMaxFrames; i++) {
+ oapve_param_t* param = &mCodecDesc->param[i];
+ ret = oapve_param_default(param);
+ if (OAPV_FAILED(ret)) {
+ ALOGE("cannot set default parameter");
+ return C2_NO_INIT;
+ }
+ setParams(*param);
+ }
+
+ showEncoderParams(mCodecDesc.get());
+
+ /* create encoder */
+ mEncoderId = oapve_create(mCodecDesc.get(), NULL);
+ if (mEncoderId == NULL) {
+ ALOGE("cannot create APV encoder");
+ return C2_CORRUPTED;
+ }
+
+ /* create metadata */
+ mMetaId = oapvm_create(&ret);
+ if (mMetaId == NULL) {
+ ALOGE("cannot create APV encoder");
+ return C2_NO_MEMORY;
+ }
+
+ /* create image buffers */
+ for (int32_t i = 0; i < mMaxFrames; i++) {
+ if (mBitDepth == 10) {
+ mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
+ OAPV_CS_SET(mColorFormat, mBitDepth, 0));
+ mReconFrames.frm[i].imgb = nullptr;
+ } else {
+ mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
+ OAPV_CS_SET(mColorFormat, 10, 0));
+ mReconFrames.frm[i].imgb = nullptr;
+ }
+ }
+
+ /* allocate bitstream buffer */
+ mBitstreamBuf = new unsigned char[kMaxBitstreamBufSize];
+ if (mBitstreamBuf == nullptr) {
+ ALOGE("cannot allocate bitstream buffer, size= %d", kMaxBitstreamBufSize);
+ return C2_NO_MEMORY;
+ }
+
+ /* Calculate SDR to HDR mapping values */
+ mSdrToHdrMapping.clear();
+ for (int32_t i = 0; i < 256; i++) {
+ mSdrToHdrMapping.push_back((uint16_t)(i * 1023 / 255 + 0.5));
+ }
+
+ mStarted = true;
+ mInitEncoder = true;
+ return C2_OK;
+}
+
+void C2SoftApvEnc::setParams(oapve_param_t& param) {
+ param.w = mSize->width;
+ param.h = mSize->height;
+ param.fps_num = (int)(mFrameRate->value * 100);
+ param.fps_den = 100;
+ param.bitrate = mBitrate->value / 1000;
+ param.rc_type = mIntf->getBitrateMode_l();
+
+ int ApvQP = kApvDefaultQP;
+ if (param.rc_type == OAPV_RC_CQP) {
+ ApvQP = getQpFromQuality(mQuality->value);
+ ALOGI("Bitrate mode is CQ, so QP value is derived from Quality. Quality is %d, QP is %d",
+ mQuality->value, ApvQP);
+ }
+ param.qp = ApvQP;
+ param.band_idc = 0; // TODO: Get from the Level setting
+ param.profile_idc = mIntf->getProfile_l();
+ C2Config::level_t level = decisionApvLevel(
+ param.w, param.h, (int)(param.fps_num / param.fps_den), param.bitrate, param.band_idc);
+ if (mProfileLevel->level != level) {
+ mProfileLevel->level = level;
+ ALOGI("Need to update level to %d", mIntf->getLevel_l());
+ }
+ param.level_idc = mIntf->getLevel_l();
+}
+
+c2_status_t C2SoftApvEnc::setEncodeArgs(oapv_frms_t* inputFrames, const C2GraphicView* const input,
+ uint64_t workIndex) {
+ if (input->width() < mSize->width || input->height() < mSize->height) {
+ /* Expect width height to be configured */
+ ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(), mSize->width,
+ input->height(), mSize->height);
+ return C2_BAD_VALUE;
+ }
+ const C2PlanarLayout& layout = input->layout();
+ uint8_t* yPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* uPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t* vPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+
+ /* width and height must be even */
+ if (width & 1u || height & 1u) {
+ ALOGW("height(%u) and width(%u) must both be even", height, width);
+ return C2_BAD_VALUE;
+ }
+
+ /* Set num frames */
+ inputFrames->num_frms = MAX_NUM_FRMS;
+ inputFrames->frm[mReceivedFrames].group_id = 1;
+ inputFrames->frm[mReceivedFrames].pbu_type = OAPV_PBU_TYPE_PRIMARY_FRAME;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ [[fallthrough]];
+ case C2PlanarLayout::TYPE_RGBA: {
+ // TODO: Add RGBA1010102 support
+ ALOGE("Not supported RGB color format");
+ return C2_BAD_VALUE;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (IsP010(*input)) {
+ if (mColorFormat == OAPV_CF_YCBCR422) {
+ ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
+ } else if (mColorFormat == OAPV_CF_PLANAR2) {
+ ColorConvertP010ToP210(input, inputFrames->frm[0].imgb);
+ } else {
+ ALOGE("Not supported color format. %d", mColorFormat);
+ return C2_BAD_VALUE;
+ }
+ } else if (IsNV12(*input)) {
+ ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
+ } else if (IsNV21(*input)) {
+ ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
+ } else if (IsYUV420(*input)) {
+ return C2_BAD_VALUE;
+ } else if (IsI420(*input)) {
+ return C2_BAD_VALUE;
+ } else {
+ ALOGE("Not supported color format. %d", mColorFormat);
+ return C2_BAD_VALUE;
+ }
+ break;
+ }
+
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ return C2_OK;
+}
+
+void C2SoftApvEnc::ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
+ auto width = input->width();
+ auto height = input->height();
+
+ auto* yPlane = (uint8_t*)input->data()[0];
+ auto* uvPlane = (uint8_t*)input->data()[1];
+
+ auto* dst = (uint16_t*)imgb->a[0];
+ int32_t lumaOffset = 0;
+ for (int32_t y = 0; y < height; ++y) {
+ for (int32_t x = 0; x < width; ++x) {
+ lumaOffset = y * width + x;
+ dst[lumaOffset] = (mSdrToHdrMapping[yPlane[lumaOffset]] << 6) |
+ ((mSdrToHdrMapping[yPlane[lumaOffset]] & 0x300) >> 3);
+ }
+ }
+
+ auto* dst_uv = (uint16_t*)imgb->a[1];
+ uint32_t uvDstStride = width;
+ int32_t srcOffset = 0;
+ int32_t dstOffset1 = 0, dstOffset2 = 0;
+ int32_t tmp1 = 0, tmp2 = 0;
+ for (int32_t y = 0; y < height / 2; ++y) {
+ for (int32_t x = 0; x < width; x += 2) {
+ srcOffset = y * width + x;
+ dstOffset1 = (y * 2) * width + x;
+ dstOffset2 = ((y * 2) + 1) * width + x;
+
+ tmp1 = (mSdrToHdrMapping[uvPlane[srcOffset]] << 6) |
+ ((mSdrToHdrMapping[uvPlane[srcOffset]] & 0x300) >> 3);
+ tmp2 = (mSdrToHdrMapping[uvPlane[srcOffset + 1]] << 6) |
+ ((mSdrToHdrMapping[uvPlane[srcOffset + 1]] & 0x300) >> 3);
+ dst_uv[dstOffset1] = (uint16_t)tmp1;
+ dst_uv[dstOffset1 + 1] = (uint16_t)tmp2;
+ dst_uv[dstOffset2] = (uint16_t)tmp1;
+ dst_uv[dstOffset2 + 1] = (uint16_t)tmp2;
+ }
+ }
+}
+
+C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
+ int32_t bitrate, int32_t band) {
+ C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
+
+ struct LevelLimits {
+ C2Config::level_t level;
+ uint64_t samplesPerSec;
+ uint32_t bitratesOfBand;
+ };
+
+ constexpr LevelLimits kLimitsBand0[] = {
+ {LEVEL_APV_1_BAND_0, 3'041'280, 7'000},
+ {LEVEL_APV_1_1_BAND_0, 6'082'560, 14'000},
+ {LEVEL_APV_2_BAND_0, 15'667'200, 36'000},
+ {LEVEL_APV_2_1_BAND_0, 31'334'400, 71'000},
+ {LEVEL_APV_3_BAND_0, 66'846'720, 101'000},
+ {LEVEL_APV_3_1_BAND_0, 133'693'440, 201'000},
+ {LEVEL_APV_4_BAND_0, 265'420'800, 401'000},
+ {LEVEL_APV_4_1_BAND_0, 530'841'600, 780'000},
+ {LEVEL_APV_5_BAND_0, 1'061'683'200, 1'560'000},
+ {LEVEL_APV_5_1_BAND_0, 2'123'366'400, 3'324'000},
+ {LEVEL_APV_6_BAND_0, 4'777'574'400, 6'648'000},
+ {LEVEL_APV_6_1_BAND_0, 8'493'465'600, 13'296'000},
+ {LEVEL_APV_7_BAND_0, 16'986'931'200, 26'592'000},
+ {LEVEL_APV_7_1_BAND_0, 33'973'862'400, 53'184'000},
+ };
+
+ constexpr LevelLimits kLimitsBand1[] = {
+ {LEVEL_APV_1_BAND_1, 3'041'280, 11'000},
+ {LEVEL_APV_1_1_BAND_1, 6'082'560, 21'000},
+ {LEVEL_APV_2_BAND_1, 15'667'200, 53'000},
+ {LEVEL_APV_2_1_BAND_1, 31'334'400, 106'00},
+ {LEVEL_APV_3_BAND_1, 66'846'720, 151'000},
+ {LEVEL_APV_3_1_BAND_1, 133'693'440, 301'000},
+ {LEVEL_APV_4_BAND_1, 265'420'800, 602'000},
+ {LEVEL_APV_4_1_BAND_1, 530'841'600, 1'170'000},
+ {LEVEL_APV_5_BAND_1, 1'061'683'200, 2'340'000},
+ {LEVEL_APV_5_1_BAND_1, 2'123'366'400, 4'986'000},
+ {LEVEL_APV_6_BAND_1, 4'777'574'400, 9'972'000},
+ {LEVEL_APV_6_1_BAND_1, 8'493'465'600, 19'944'000},
+ {LEVEL_APV_7_BAND_1, 16'986'931'200, 39'888'000},
+ {LEVEL_APV_7_1_BAND_1, 33'973'862'400, 79'776'000},
+ };
+
+ constexpr LevelLimits kLimitsBand2[] = {
+ {LEVEL_APV_1_BAND_2, 3'041'280, 14'000},
+ {LEVEL_APV_1_1_BAND_2, 6'082'560, 28'000},
+ {LEVEL_APV_2_BAND_2, 15'667'200, 71'000},
+ {LEVEL_APV_2_1_BAND_2, 31'334'400, 141'000},
+ {LEVEL_APV_3_BAND_2, 66'846'720, 201'000},
+ {LEVEL_APV_3_1_BAND_2, 133'693'440, 401'000},
+ {LEVEL_APV_4_BAND_2, 265'420'800, 780'000},
+ {LEVEL_APV_4_1_BAND_2, 530'841'600, 1'560'000},
+ {LEVEL_APV_5_BAND_2, 1'061'683'200, 3'324'000},
+ {LEVEL_APV_5_1_BAND_2, 2'123'366'400, 6'648'000},
+ {LEVEL_APV_6_BAND_2, 4'777'574'400, 13'296'000},
+ {LEVEL_APV_6_1_BAND_2, 8'493'465'600, 26'592'000},
+ {LEVEL_APV_7_BAND_2, 16'986'931'200, 53'184'000},
+ {LEVEL_APV_7_1_BAND_2, 33'973'862'400, 106'368'000},
+ };
+
+ constexpr LevelLimits kLimitsBand3[] = {
+ {LEVEL_APV_1_BAND_3, 3'041'280, 21'000},
+ {LEVEL_APV_1_1_BAND_3, 6'082'560, 42'000},
+ {LEVEL_APV_2_BAND_3, 15'667'200, 106'000},
+ {LEVEL_APV_2_1_BAND_3, 31'334'400, 212'000},
+ {LEVEL_APV_3_BAND_3, 66'846'720, 301'000},
+ {LEVEL_APV_3_1_BAND_3, 133'693'440, 602'000},
+ {LEVEL_APV_4_BAND_3, 265'420'800, 1'170'000},
+ {LEVEL_APV_4_1_BAND_3, 530'841'600, 2'340'000},
+ {LEVEL_APV_5_BAND_3, 1'061'683'200, 4'986'000},
+ {LEVEL_APV_5_1_BAND_3, 2'123'366'400, 9'972'000},
+ {LEVEL_APV_6_BAND_3, 4'777'574'400, 19'944'000},
+ {LEVEL_APV_6_1_BAND_3, 8'493'465'600, 39'888'000},
+ {LEVEL_APV_7_BAND_3, 16'986'931'200, 79'776'000},
+ {LEVEL_APV_7_1_BAND_3, 33'973'862'400, 159'552'000},
+ };
+
+ uint64_t samplesPerSec = width * height * fps;
+ if (band == 0) {
+ for (const LevelLimits& limit : kLimitsBand0) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else if (band == 1) {
+ for (const LevelLimits& limit : kLimitsBand1) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else if (band == 2) {
+ for (const LevelLimits& limit : kLimitsBand2) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else if (band == 3) {
+ for (const LevelLimits& limit : kLimitsBand3) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else {
+ ALOGE("Invalid band_idc on calculte level");
+ }
+
+ return level;
+}
+
+void C2SoftApvEnc::ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
+ auto width = input->width();
+ auto height = input->height();
+
+ auto* yPlane = (uint8_t*)input->data()[0];
+ auto* uvPlane = (uint8_t*)input->data()[1];
+ uint32_t uvStride = width * 2;
+
+ auto* src = yPlane;
+ auto* dst = (uint8_t*)imgb->a[0];
+ std::memcpy(dst, src, width * height * 2);
+
+ auto* dst_uv = (uint8_t*)imgb->a[1];
+ int32_t offset1 = 0, offset2 = 0;
+ for (int32_t y = 0; y < height / 2; ++y) {
+ offset1 = (y * 2) * uvStride;
+ offset2 = (y * 2 + 1) * uvStride;
+ src = uvPlane + (y * uvStride);
+
+ std::memcpy(dst_uv + offset1, src, uvStride);
+ std::memcpy(dst_uv + offset2, src, uvStride);
+ }
+}
+
+void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
+ oapv_imgb_t* imgb) {
+ uint32_t width = input->width();
+ uint32_t height = input->height();
+
+ uint8_t* yPlane = (uint8_t*)input->data()[0];
+ auto* uvPlane = (uint8_t*)input->data()[1];
+ uint32_t stride[3];
+ stride[0] = width * 2;
+ stride[1] = stride[2] = width;
+
+ uint8_t *dst, *src;
+ uint16_t tmp;
+ for (int32_t y = 0; y < height; ++y) {
+ src = yPlane + y * stride[0];
+ dst = (uint8_t*)imgb->a[0] + y * stride[0];
+ for (int32_t x = 0; x < stride[0]; x += 2) {
+ tmp = (src[x + 1] << 2) | (src[x] >> 6);
+ dst[x] = tmp & 0xFF;
+ dst[x + 1] = tmp >> 8;
+ }
+ }
+
+ uint8_t *dst_u, *dst_v;
+ for (int32_t y = 0; y < height / 2; ++y) {
+ src = uvPlane + y * stride[1] * 2;
+ dst_u = (uint8_t*)imgb->a[1] + (y * 2) * stride[1];
+ dst_v = (uint8_t*)imgb->a[2] + (y * 2) * stride[2];
+ for (int32_t x = 0; x < stride[1] * 2; x += 4) {
+ tmp = (src[x + 1] << 2) | (src[x] >> 6); // cb
+ dst_u[x / 2] = tmp & 0xFF;
+ dst_u[x / 2 + 1] = tmp >> 8;
+ dst_u[x / 2 + stride[1]] = dst_u[x / 2];
+ dst_u[x / 2 + stride[1] + 1] = dst_u[x / 2 + 1];
+
+ tmp = (src[x + 3] << 2) | (src[x + 2] >> 6); // cr
+ dst_v[x / 2] = tmp & 0xFF;
+ dst_v[x / 2 + 1] = tmp >> 8;
+ dst_v[x / 2 + stride[2]] = dst_v[x / 2];
+ dst_v[x / 2 + stride[2] + 1] = dst_v[x / 2 + 1];
+ }
+ }
+}
+
+void C2SoftApvEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
+ oapve_stat_t* stat) {
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ c2_status_t status = pool->fetchLinearBlock(stat->write, usage, &block);
+ if (C2_OK != status) {
+ ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+ mSignalledError = true;
+ work->result = status;
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ C2WriteView wView = block->map().get();
+ if (C2_OK != wView.error()) {
+ ALOGE("write view map failed with status 0x%x", wView.error());
+ mSignalledError = true;
+ work->result = wView.error();
+ work->workletsProcessed = 1u;
+ return;
+ }
+ if ((!mReceivedFirstFrame)) {
+ createCsdData(work, bitb, stat->write);
+ mReceivedFirstFrame = true;
+ }
+
+ memcpy(wView.data(), bitb->addr, stat->write);
+ std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, stat->write);
+
+ /* All frames are SYNC FRAME */
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u /* stream id */,
+ C2Config::SYNC_FRAME));
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ if (mSignalledEos) {
+ work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
+ }
+ } else {
+ finish(workIndex, fillWork);
+ }
+}
+void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
+ uint32_t encodedSize) {
+ uint32_t csdStart = 0, csdEnd = 0;
+ uint32_t bitOffset = 0;
+ uint8_t* buf = (uint8_t*)bitb->addr + csdStart;
+
+ if (encodedSize == 0) {
+ ALOGE("the first frame size is zero, so no csd data will be created.");
+ return;
+ }
+ ABitReader reader(buf, encodedSize);
+
+ /* pbu_header() */
+ reader.skipBits(32);
+ bitOffset += 32; // pbu_size
+ reader.skipBits(32);
+ bitOffset += 32; // currReadSize
+ csdStart = bitOffset / 8;
+
+ int32_t pbu_type = reader.getBits(8);
+ bitOffset += 8; // pbu_type
+ reader.skipBits(16);
+ bitOffset += 16; // group_id
+ reader.skipBits(8);
+ bitOffset += 8; // reserved_zero_8bits
+
+ /* frame info() */
+ int32_t profile_idc = reader.getBits(8);
+ bitOffset += 8; // profile_idc
+ int32_t level_idc = reader.getBits(8);
+ bitOffset += 8; // level_idc
+ int32_t band_idc = reader.getBits(3);
+ bitOffset += 3; // band_idc
+ reader.skipBits(5);
+ bitOffset += 5; // reserved_zero_5bits
+ int32_t width = reader.getBits(32);
+ bitOffset += 32; // width
+ int32_t height = reader.getBits(32);
+ bitOffset += 32; // height
+ int32_t chroma_idc = reader.getBits(4);
+ bitOffset += 4; // chroma_format_idc
+ reader.skipBits(4);
+ bitOffset += 4; // bit_depth
+ reader.skipBits(8);
+ bitOffset += 8; // capture_time_distance
+ reader.skipBits(8);
+ bitOffset += 8; // reserved_zero_8bits
+
+ /* frame header() */
+ reader.skipBits(8);
+ bitOffset += 8; // reserved_zero_8bit
+ bool color_description_present_flag = reader.getBits(1);
+ bitOffset += 1; // color_description_present_flag
+ if (color_description_present_flag) {
+ reader.skipBits(8);
+ bitOffset += 8; // color_primaries
+ reader.skipBits(8);
+ bitOffset += 8; // transfer_characteristics
+ reader.skipBits(8);
+ bitOffset += 8; // matrix_coefficients
+ }
+ bool use_q_matrix = reader.getBits(1);
+ bitOffset += 1; // use_q_matrix
+ if (use_q_matrix) {
+ /* quantization_matrix() */
+ int32_t numComp = chroma_idc == 0 ? 1
+ : chroma_idc == 2 ? 3
+ : chroma_idc == 3 ? 3
+ : chroma_idc == 4 ? 4
+ : -1;
+ int32_t needBitsForQ = 64 * 8 * numComp;
+ reader.skipBits(needBitsForQ);
+ bitOffset += needBitsForQ;
+ }
+
+ /* tile_info() */
+ int32_t tile_width_in_mbs_minus1 = reader.getBits(28);
+ bitOffset += 28;
+ int32_t tile_height_in_mbs_minus1 = reader.getBits(28);
+ bitOffset += 28;
+ bool tile_size_present_in_fh_flag = reader.getBits(1);
+ bitOffset += 1;
+ if (tile_size_present_in_fh_flag) {
+ int32_t numTiles = ceil((double)width / (double)tile_width_in_mbs_minus1) *
+ ceil((double)height / (double)tile_height_in_mbs_minus1);
+ reader.skipBits(32 * numTiles);
+ bitOffset += (32 * numTiles);
+ }
+
+ reader.skipBits(8);
+ bitOffset += 8; // reserved_zero_8bits
+
+ /* byte_alignmenet() */
+ while (bitOffset % 8) {
+ reader.skipBits(1);
+ bitOffset += 1;
+ }
+ csdEnd = bitOffset / 8;
+ int32_t csdSize = csdEnd - csdStart + 1;
+
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(csdSize, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ buf = buf + csdStart;
+ memcpy(csd->m.value, buf, csdSize);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+}
+c2_status_t C2SoftApvEnc::drainInternal(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work) {
+ fillEmptyWork(work);
+ return C2_OK;
+}
+
+void C2SoftApvEnc::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ c2_status_t error;
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ nsecs_t timeDelay = 0;
+ uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
+
+ mSignalledEos = false;
+ mOutBlock = nullptr;
+
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ ALOGV("Got FLAG_END_OF_STREAM");
+ mSignalledEos = true;
+ }
+
+ /* Initialize encoder if not already initialized */
+ if (initEncoder() != C2_OK) {
+ ALOGE("Failed to initialize encoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ ALOGE("[%s] Failed to make Codec context", __func__);
+ return;
+ }
+ if (mSignalledError) {
+ ALOGE("[%s] Received signalled error", __func__);
+ return;
+ }
+
+ if (mSignalledEos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ return;
+ }
+
+ if (work->input.buffers.empty()) {
+ return;
+ }
+
+ std::shared_ptr<C2GraphicView> view;
+ std::shared_ptr<C2Buffer> inputBuffer = nullptr;
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ view = std::make_shared<C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (view->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view->error());
+ work->workletsProcessed = 1u;
+ return;
+ }
+ }
+ if (!inputBuffer) {
+ fillEmptyWork(work);
+ return;
+ }
+
+ oapve_stat_t stat;
+ auto outBufferSize =
+ mCodecDesc->param[mReceivedFrames].w * mCodecDesc->param[mReceivedFrames].h * 4;
+ if (!mOutBlock) {
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ c2_status_t err = pool->fetchLinearBlock(outBufferSize, usage, &mOutBlock);
+ if (err != C2_OK) {
+ work->result = err;
+ work->workletsProcessed = 1u;
+ ALOGE("fetchLinearBlock has failed. err = %d", err);
+ return;
+ }
+ }
+
+ C2WriteView wView = mOutBlock->map().get();
+ if (wView.error() != C2_OK) {
+ work->result = wView.error();
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
+ if (error != C2_OK) {
+ mSignalledError = true;
+ work->result = error;
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ if (++mReceivedFrames < mMaxFrames) {
+ return;
+ }
+ mReceivedFrames = 0;
+
+ std::shared_ptr<oapv_bitb_t> bits = std::make_shared<oapv_bitb_t>();
+ std::memset(mBitstreamBuf, 0, kMaxBitstreamBufSize);
+ bits->addr = mBitstreamBuf;
+ bits->bsize = kMaxBitstreamBufSize;
+ bits->err = C2_OK;
+
+ if (mInputFrames.frm[0].imgb) {
+ int32_t status =
+ oapve_encode(mEncoderId, &mInputFrames, mMetaId, bits.get(), &stat, &mReconFrames);
+ if (status != C2_OK) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ return;
+ }
+ } else if (!mSignalledEos) {
+ fillEmptyWork(work);
+ }
+ finishWork(workIndex, work, pool, bits.get(), &stat);
+}
+
+class C2SoftApvEncFactory : public C2ComponentFactory {
+ public:
+ C2SoftApvEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftApvEnc(COMPONENT_NAME, id,
+ std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ c2_status_t createInterface(c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftApvEnc::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ ~C2SoftApvEncFactory() override = default;
+
+ private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ if (!android::media::swcodec::flags::apv_software_codec()) {
+ ALOGV("APV SW Codec is not enabled");
+ return nullptr;
+ }
+ return new ::android::C2SoftApvEncFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+ ::C2ComponentFactory* factory) {
+ delete factory;
+}
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
new file mode 100644
index 0000000..441c663
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_ENC_H_
+#define ANDROID_C2_SOFT_APV_ENC_H_
+
+#include <SimpleC2Component.h>
+#include <utils/Vector.h>
+#include <map>
+#include "oapv.h"
+
+#include <C2SoftApvCommon.h>
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+
+#define APV_QP_MIN 1
+#define APV_QP_MAX 51
+
+struct C2SoftApvEnc final : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftApvEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftApvEnc();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+ c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+ private:
+ c2_status_t reset();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+ c2_status_t setEncodeArgs(oapv_frms_t* imgb_inp, const C2GraphicView* const input,
+ uint64_t workIndex);
+ void finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
+ oapve_stat_t* stat);
+ c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
+ void setParams(oapve_param_t& param);
+ int32_t getQpFromQuality(int quality);
+ C2Config::level_t decisionApvLevel(int32_t width, int32_t height, int32_t fps, int32_t bitrate,
+ int32_t band);
+
+ void showEncoderParams(oapve_cdesc_t* cdsc);
+
+ void ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
+ void ColorConvertP010ToYUV422P10le(const C2GraphicView* const input, oapv_imgb_t* imgb);
+ void ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
+
+ void createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
+ uint32_t encodedSize);
+
+ std::shared_ptr<IntfImpl> mIntf;
+ int32_t mBitDepth;
+ int32_t mColorFormat;
+
+ bool mStarted;
+ bool mSignalledEos;
+ bool mSignalledError;
+
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+ std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+ std::shared_ptr<C2LinearBlock> mOutBlock;
+ std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
+ std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+
+ std::map<const void*, std::shared_ptr<C2Buffer>> mBuffers;
+ MemoryBlockPool mConversionBuffers;
+ std::map<const void*, MemoryBlock> mConversionBuffersInUse;
+
+ bool mInitEncoder;
+ int32_t mMaxFrames;
+ int32_t mReceivedFrames;
+ std::unique_ptr<oapve_cdesc_t> mCodecDesc;
+ oapv_frms_t mInputFrames;
+ oapv_frms_t mReconFrames;
+ oapve_t mEncoderId;
+ oapvm_t mMetaId;
+ uint8_t* mBitstreamBuf = nullptr;
+ std::vector<uint16_t> mSdrToHdrMapping;
+ bool mReceivedFirstFrame = false;
+ C2_DO_NOT_COPY(C2SoftApvEnc);
+};
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_APV_ENC_H_
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 4ec26d6..44a8dd1 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -139,8 +139,8 @@
addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
.withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(MaxPictureSizeSetter, mSize)
.build());
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 318f093..83cbe47 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -69,8 +69,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048),
- C2F(mSize, height).inRange(2, 2048),
+ C2F(mSize, width).inRange(2, 4096),
+ C2F(mSize, height).inRange(2, 4096),
})
.withSetter(SizeSetter)
.build());
@@ -167,8 +167,8 @@
DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
.withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(MaxPictureSizeSetter, mSize)
.build());
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index e6782a9..069d6ad 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -430,6 +430,7 @@
_C2_PL_AV1_BASE = 0x9000,
_C2_PL_VP8_BASE = 0xA000,
_C2_PL_MPEGH_BASE = 0xB000, // MPEG-H 3D Audio
+ _C2_PL_APV_BASE = 0xC000, // APV
C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
};
@@ -597,6 +598,15 @@
PROFILE_MPEGH_HIGH, ///< MPEG-H High
PROFILE_MPEGH_LC, ///< MPEG-H Low-complexity
PROFILE_MPEGH_BASELINE, ///< MPEG-H Baseline
+
+ // Advanced Professional VideoCodec (APV)
+ PROFILE_APV_422_10 = _C2_PL_APV_BASE, ///< APV 422-10 Profile
+ PROFILE_APV_422_12, ///< APV 422-12 Profile
+ PROFILE_APV_444_10, ///< APV 444-10 Profile
+ PROFILE_APV_444_12, ///< APV 444-12 Profile
+ PROFILE_APV_4444_10, ///< APV 4444-10 Profile
+ PROFILE_APV_4444_12, ///< APV 4444-12 Profile
+ PROFILE_APV_400_10, ///< APV 400-10 Profile
};
enum C2Config::level_t : uint32_t {
@@ -752,6 +762,68 @@
LEVEL_MPEGH_3, ///< MPEG-H L3
LEVEL_MPEGH_4, ///< MPEG-H L4
LEVEL_MPEGH_5, ///< MPEG-H L5
+
+ // Advanced Professional VideoCodec(APV) levels/bands
+ LEVEL_APV_1_BAND_0 = _C2_PL_APV_BASE, ///< APV L 1, BAND 0
+ LEVEL_APV_1_1_BAND_0, ///< APV L 1.1, BAND 0
+ LEVEL_APV_2_BAND_0, ///< APV L 2, BAND 0
+ LEVEL_APV_2_1_BAND_0, ///< APV L 2.1, BAND 0
+ LEVEL_APV_3_BAND_0, ///< APV L 3, BAND 0
+ LEVEL_APV_3_1_BAND_0, ///< APV L 3.1, BAND 0
+ LEVEL_APV_4_BAND_0, ///< APV L 4, BAND 0
+ LEVEL_APV_4_1_BAND_0, ///< APV L 4.1, BAND 0
+ LEVEL_APV_5_BAND_0, ///< APV L 5, BAND 0
+ LEVEL_APV_5_1_BAND_0, ///< APV L 5.1, BAND 0
+ LEVEL_APV_6_BAND_0, ///< APV L 6, BAND 0
+ LEVEL_APV_6_1_BAND_0, ///< APV L 6.1, BAND 0
+ LEVEL_APV_7_BAND_0, ///< APV L 7, BAND 0
+ LEVEL_APV_7_1_BAND_0, ///< APV L 7.1, BAND 0
+
+ LEVEL_APV_1_BAND_1 = _C2_PL_APV_BASE + 0x100, ///< APV L 1, BAND 1
+ LEVEL_APV_1_1_BAND_1, ///< APV L 1.1, BAND 1
+ LEVEL_APV_2_BAND_1, ///< APV L 2, BAND 1
+ LEVEL_APV_2_1_BAND_1, ///< APV L 2.1, BAND 1
+ LEVEL_APV_3_BAND_1, ///< APV L 3, BAND 1
+ LEVEL_APV_3_1_BAND_1, ///< APV L 3.1, BAND 1
+ LEVEL_APV_4_BAND_1, ///< APV L 4, BAND 1
+ LEVEL_APV_4_1_BAND_1, ///< APV L 4.1, BAND 1
+ LEVEL_APV_5_BAND_1, ///< APV L 5, BAND 1
+ LEVEL_APV_5_1_BAND_1, ///< APV L 5.1, BAND 1
+ LEVEL_APV_6_BAND_1, ///< APV L 6, BAND 1
+ LEVEL_APV_6_1_BAND_1, ///< APV L 6.1, BAND 1
+ LEVEL_APV_7_BAND_1, ///< APV L 7, BAND 1
+ LEVEL_APV_7_1_BAND_1, ///< APV L 7.1, BAND 1
+
+ LEVEL_APV_1_BAND_2 = _C2_PL_APV_BASE + 0x200, ///< APV L 1, BAND 2
+ LEVEL_APV_1_1_BAND_2, ///< APV L 1.1, BAND 2
+ LEVEL_APV_2_BAND_2, ///< APV L 2, BAND 2
+ LEVEL_APV_2_1_BAND_2, ///< APV L 2.1, BAND 2
+ LEVEL_APV_3_BAND_2, ///< APV L 3, BAND 2
+ LEVEL_APV_3_1_BAND_2, ///< APV L 3.1, BAND 2
+ LEVEL_APV_4_BAND_2, ///< APV L 4, BAND 2
+ LEVEL_APV_4_1_BAND_2, ///< APV L 4.1, BAND 2
+ LEVEL_APV_5_BAND_2, ///< APV L 5, BAND 2
+ LEVEL_APV_5_1_BAND_2, ///< APV L 5.1, BAND 2
+ LEVEL_APV_6_BAND_2, ///< APV L 6, BAND 2
+ LEVEL_APV_6_1_BAND_2, ///< APV L 6.1, BAND 2
+ LEVEL_APV_7_BAND_2, ///< APV L 7, BAND 2
+ LEVEL_APV_7_1_BAND_2, ///< APV L 7.1, BAND 2
+
+ LEVEL_APV_1_BAND_3 = _C2_PL_APV_BASE + 0x300, ///< APV L 1, BAND 3
+ LEVEL_APV_1_1_BAND_3, ///< APV L 1.1, BAND 3
+ LEVEL_APV_2_BAND_3, ///< APV L 2, BAND 3
+ LEVEL_APV_2_1_BAND_3, ///< APV L 2.1, BAND 3
+ LEVEL_APV_3_BAND_3, ///< APV L 3, BAND 3
+ LEVEL_APV_3_1_BAND_3, ///< APV L 3.1, BAND 3
+ LEVEL_APV_4_BAND_3, ///< APV L 4, BAND 3
+ LEVEL_APV_4_1_BAND_3, ///< APV L 4.1, BAND 3
+ LEVEL_APV_5_BAND_3, ///< APV L 5, BAND 3
+ LEVEL_APV_5_1_BAND_3, ///< APV L 5.1, BAND 3
+ LEVEL_APV_6_BAND_3, ///< APV L 6, BAND 3
+ LEVEL_APV_6_1_BAND_3, ///< APV L 6.1, BAND 3
+ LEVEL_APV_7_BAND_3, ///< APV L 7, BAND 3
+ LEVEL_APV_7_1_BAND_3, ///< APV L 7.1, BAND 3
+
};
struct C2ProfileLevelStruct {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 632eaed..72b5a61 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2877,6 +2877,7 @@
// handle configuration changes in work done
std::shared_ptr<const C2StreamInitDataInfo::output> initData;
+ sp<AMessage> inputFormat = nullptr;
sp<AMessage> outputFormat = nullptr;
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
@@ -2964,10 +2965,12 @@
initData->m.value, initData->flexCount(), config->mCodingMediaType,
config->mOutputFormat);
}
+ inputFormat = config->mInputFormat;
outputFormat = config->mOutputFormat;
}
mChannel->onWorkDone(
- std::move(work), outputFormat, initData ? initData.get() : nullptr);
+ std::move(work), inputFormat, outputFormat,
+ initData ? initData.get() : nullptr);
// log metrics to MediaCodec
if (mMetrics->countEntries() == 0) {
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 4353521..d67a876 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -335,7 +335,7 @@
void CCodecBufferChannel::setComponent(
const std::shared_ptr<Codec2Client::Component> &component) {
- mComponent = component;
+ std::atomic_store(&mComponent, component);
mComponentName = component->getName() + StringPrintf("#%d", int(uintptr_t(component.get()) % 997));
mName = mComponentName.c_str();
}
@@ -351,7 +351,7 @@
inputSurface->numProcessingBuffersBalance = 0;
inputSurface->surface = surface;
mHasInputSurface = true;
- return inputSurface->surface->connect(mComponent);
+ return inputSurface->surface->connect(std::atomic_load(&mComponent));
}
status_t CCodecBufferChannel::signalEndOfInputStream() {
@@ -547,7 +547,7 @@
now);
}
}
- err = mComponent->queue(&items);
+ err = std::atomic_load(&mComponent)->queue(&items);
}
if (err != C2_OK) {
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
@@ -1457,7 +1457,7 @@
qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
IGraphicBufferProducer::QueueBufferOutput qbo;
- status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
+ status_t result = std::atomic_load(&mComponent)->queueToOutputSurface(block, qbi, &qbo);
if (result != OK) {
ALOGI("[%s] queueBuffer failed: %d", mName, result);
if (result == NO_INIT) {
@@ -1596,7 +1596,7 @@
void CCodecBufferChannel::pollForRenderedBuffers() {
FrameEventHistoryDelta delta;
- mComponent->pollForRenderedFrames(&delta);
+ std::atomic_load(&mComponent)->pollForRenderedFrames(&delta);
processRenderedFrames(delta);
}
@@ -1605,7 +1605,7 @@
// knowing the internal state of CCodec/CCodecBufferChannel,
// prevent mComponent from being destroyed by holding the shared reference
// during this interface being executed.
- std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
if (comp) {
SurfaceCallbackHandler::GetInstance().post(
SurfaceCallbackHandler::ON_BUFFER_RELEASED, comp, generation);
@@ -1617,7 +1617,7 @@
// knowing the internal state of CCodec/CCodecBufferChannel,
// prevent mComponent from being destroyed by holding the shared reference
// during this interface being executed.
- std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
if (comp) {
SurfaceCallbackHandler::GetInstance().post(
SurfaceCallbackHandler::ON_BUFFER_ATTACHED, comp, generation);
@@ -1691,7 +1691,7 @@
C2ActualPipelineDelayTuning pipelineDelay(0);
C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
- c2_status_t err = mComponent->query(
+ c2_status_t err = std::atomic_load(&mComponent)->query(
{
&iStreamFormat,
&oStreamFormat,
@@ -1722,7 +1722,7 @@
size_t numOutputSlots = outputDelayValue + kSmoothnessFactor;
// TODO: get this from input format
- bool secure = mComponent->getName().find(".secure") != std::string::npos;
+ bool secure = std::atomic_load(&mComponent)->getName().find(".secure") != std::string::npos;
// secure mode is a static parameter (shall not change in the executing state)
mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
@@ -1768,7 +1768,7 @@
channelCount.invalidate();
pcmEncoding.invalidate();
}
- err = mComponent->query(stackParams,
+ err = std::atomic_load(&mComponent)->query(stackParams,
{ C2PortAllocatorsTuning::input::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1836,6 +1836,9 @@
channelCount.value,
pcmEncoding ? pcmEncoding.value : C2Config::PCM_16);
}
+ if (!buffersBoundToCodec) {
+ inputFormat->setInt32(KEY_NUM_SLOTS, numInputSlots);
+ }
bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
// For encrypted content, framework decrypts source buffer (ashmem) into
// C2Buffers. Thus non-conforming codecs can process these.
@@ -1929,7 +1932,7 @@
// query C2PortAllocatorsTuning::output from component, or use default allocator if
// unsuccessful.
std::vector<std::unique_ptr<C2Param>> params;
- err = mComponent->query({ },
+ err = std::atomic_load(&mComponent)->query({ },
{ C2PortAllocatorsTuning::output::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1957,7 +1960,7 @@
// if unsuccessful.
if (outputSurface) {
params.clear();
- err = mComponent->query({ },
+ err = std::atomic_load(&mComponent)->query({ },
{ C2PortSurfaceAllocatorTuning::output::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1988,7 +1991,7 @@
}
if ((poolMask >> pools->outputAllocatorId) & 1) {
- err = mComponent->createBlockPool(
+ err = std::atomic_load(&mComponent)->createBlockPool(
pools->outputAllocatorId, &pools->outputPoolId, &pools->outputPoolIntf);
ALOGI("[%s] Created output block pool with allocatorID %u => poolID %llu - %s",
mName, pools->outputAllocatorId,
@@ -2009,7 +2012,8 @@
C2PortBlockPoolsTuning::output::AllocUnique({ pools->outputPoolId });
std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = mComponent->config({ poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
+ err = std::atomic_load(&mComponent)->config(
+ { poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
ALOGD("[%s] Configured output block pool ids %llu => %s",
mName, (unsigned long long)poolIdsTuning->m.values[0], asString(err));
outputPoolId_ = pools->outputPoolId;
@@ -2017,7 +2021,7 @@
if (prevOutputPoolId != C2BlockPool::BASIC_LINEAR
&& prevOutputPoolId != C2BlockPool::BASIC_GRAPHIC) {
- c2_status_t err = mComponent->destroyBlockPool(prevOutputPoolId);
+ c2_status_t err = std::atomic_load(&mComponent)->destroyBlockPool(prevOutputPoolId);
if (err != C2_OK) {
ALOGW("Failed to clean up previous block pool %llu - %s (%d)\n",
(unsigned long long) prevOutputPoolId, asString(err), err);
@@ -2049,7 +2053,7 @@
// Try to set output surface to created block pool if given.
if (outputSurface) {
- mComponent->setOutputSurface(
+ std::atomic_load(&mComponent)->setOutputSurface(
outputPoolId_,
outputSurface,
outputGeneration,
@@ -2058,7 +2062,7 @@
// configure CPU read consumer usage
C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+ err = std::atomic_load(&mComponent)->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
// do not print error message for now as most components may not yet
// support this setting
ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
@@ -2180,7 +2184,8 @@
}
C2StreamBufferTypeSetting::output oStreamFormat(0u);
C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
- c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
+ c2_status_t err = std::atomic_load(&mComponent)->query(
+ { &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
if (err != C2_OK && err != C2_BAD_INDEX) {
return UNKNOWN_ERROR;
}
@@ -2198,7 +2203,7 @@
now);
}
}
- err = mComponent->queue(&flushedConfigs);
+ err = std::atomic_load(&mComponent)->queue(&flushedConfigs);
if (err != C2_OK) {
ALOGW("[%s] Error while queueing a flushed config", mName);
return UNKNOWN_ERROR;
@@ -2249,7 +2254,8 @@
Mutexed<BlockPools>::Locked pools(mBlockPools);
outputPoolId = pools->outputPoolId;
}
- if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
+ if (comp) comp->stopUsingOutputSurface(outputPoolId);
if (pushBlankBuffer) {
sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
@@ -2283,7 +2289,8 @@
void CCodecBufferChannel::release() {
mInfoBuffers.clear();
- mComponent.reset();
+ std::shared_ptr<Codec2Client::Component> nullComp;
+ std::atomic_store(&mComponent, nullComp);
mInputAllocator.reset();
mOutputSurface.lock()->surface.clear();
{
@@ -2352,9 +2359,11 @@
}
void CCodecBufferChannel::onWorkDone(
- std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+ std::unique_ptr<C2Work> work,
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat,
const C2StreamInitDataInfo::output *initData) {
- if (handleWork(std::move(work), outputFormat, initData)) {
+ if (handleWork(std::move(work), inputFormat, outputFormat, initData)) {
feedInputBufferIfAvailable();
}
}
@@ -2384,6 +2393,7 @@
bool CCodecBufferChannel::handleWork(
std::unique_ptr<C2Work> work,
+ const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat,
const C2StreamInitDataInfo::output *initData) {
{
@@ -2557,6 +2567,9 @@
} else {
input->numSlots = newNumSlots;
}
+ if (inputFormat->contains(KEY_NUM_SLOTS)) {
+ inputFormat->setInt32(KEY_NUM_SLOTS, input->numSlots);
+ }
}
size_t numOutputSlots = 0;
uint32_t reorderDepth = 0;
@@ -2605,7 +2618,7 @@
}
}
if (maxDequeueCount > 0) {
- mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+ std::atomic_load(&mComponent)->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
}
}
@@ -2853,7 +2866,7 @@
}
if (outputPoolIntf) {
- if (mComponent->setOutputSurface(
+ if (std::atomic_load(&mComponent)->setOutputSurface(
outputPoolId,
producer,
generation,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 4d296fd..6493b87 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -193,12 +193,15 @@
/**
* Notify input client about work done.
*
- * @param workItems finished work item.
+ * @param workItems finished work item.
+ * @param inputFormat input format
* @param outputFormat new output format if it has changed, otherwise nullptr
- * @param initData new init data (CSD) if it has changed, otherwise nullptr
+ * @param initData new init data (CSD) if it has changed, otherwise nullptr
*/
void onWorkDone(
- std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+ std::unique_ptr<C2Work> work,
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat,
const C2StreamInitDataInfo::output *initData);
/**
@@ -311,7 +314,9 @@
std::shared_ptr<C2LinearBlock> encryptedBlock = nullptr,
size_t blockSize = 0);
bool handleWork(
- std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+ std::unique_ptr<C2Work> work,
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat,
const C2StreamInitDataInfo::output *initData);
void sendOutputBuffers();
void ensureDecryptDestination(size_t size);
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 9297520..3841831 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -436,6 +436,86 @@
{ C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
};
+// APV
+ALookup<C2Config::profile_t, int32_t> sApvProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10 },
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdrProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdr10PlusProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::level_t, int32_t> sApvLevels = {
+ { C2Config::LEVEL_APV_1_BAND_0, APVLevel1Band0 },
+ { C2Config::LEVEL_APV_1_BAND_1, APVLevel1Band1 },
+ { C2Config::LEVEL_APV_1_BAND_2, APVLevel1Band2 },
+ { C2Config::LEVEL_APV_1_BAND_3, APVLevel1Band3 },
+ { C2Config::LEVEL_APV_1_1_BAND_0, APVLevel11Band0 },
+ { C2Config::LEVEL_APV_1_1_BAND_1, APVLevel11Band1 },
+ { C2Config::LEVEL_APV_1_1_BAND_2, APVLevel11Band2 },
+ { C2Config::LEVEL_APV_1_1_BAND_3, APVLevel11Band3 },
+ { C2Config::LEVEL_APV_2_BAND_0, APVLevel2Band0 },
+ { C2Config::LEVEL_APV_2_BAND_1, APVLevel2Band1 },
+ { C2Config::LEVEL_APV_2_BAND_2, APVLevel2Band2 },
+ { C2Config::LEVEL_APV_2_BAND_3, APVLevel2Band3 },
+ { C2Config::LEVEL_APV_2_1_BAND_0, APVLevel21Band0 },
+ { C2Config::LEVEL_APV_2_1_BAND_1, APVLevel21Band1 },
+ { C2Config::LEVEL_APV_2_1_BAND_2, APVLevel21Band2 },
+ { C2Config::LEVEL_APV_2_1_BAND_3, APVLevel21Band3 },
+ { C2Config::LEVEL_APV_3_BAND_0, APVLevel3Band0 },
+ { C2Config::LEVEL_APV_3_BAND_1, APVLevel3Band1 },
+ { C2Config::LEVEL_APV_3_BAND_2, APVLevel3Band2 },
+ { C2Config::LEVEL_APV_3_BAND_3, APVLevel3Band3 },
+ { C2Config::LEVEL_APV_3_1_BAND_0, APVLevel31Band0 },
+ { C2Config::LEVEL_APV_3_1_BAND_1, APVLevel31Band1 },
+ { C2Config::LEVEL_APV_3_1_BAND_2, APVLevel31Band2 },
+ { C2Config::LEVEL_APV_3_1_BAND_3, APVLevel31Band3 },
+ { C2Config::LEVEL_APV_4_BAND_0, APVLevel4Band0 },
+ { C2Config::LEVEL_APV_4_BAND_1, APVLevel4Band1 },
+ { C2Config::LEVEL_APV_4_BAND_2, APVLevel4Band2 },
+ { C2Config::LEVEL_APV_4_BAND_3, APVLevel4Band3 },
+ { C2Config::LEVEL_APV_4_1_BAND_0, APVLevel41Band0 },
+ { C2Config::LEVEL_APV_4_1_BAND_1, APVLevel41Band1 },
+ { C2Config::LEVEL_APV_4_1_BAND_2, APVLevel41Band2 },
+ { C2Config::LEVEL_APV_4_1_BAND_3, APVLevel41Band3 },
+ { C2Config::LEVEL_APV_5_BAND_0, APVLevel5Band0 },
+ { C2Config::LEVEL_APV_5_BAND_1, APVLevel5Band1 },
+ { C2Config::LEVEL_APV_5_BAND_2, APVLevel5Band2 },
+ { C2Config::LEVEL_APV_5_BAND_3, APVLevel5Band3 },
+ { C2Config::LEVEL_APV_5_1_BAND_0, APVLevel51Band0 },
+ { C2Config::LEVEL_APV_5_1_BAND_1, APVLevel51Band1 },
+ { C2Config::LEVEL_APV_5_1_BAND_2, APVLevel51Band2 },
+ { C2Config::LEVEL_APV_5_1_BAND_3, APVLevel51Band3 },
+ { C2Config::LEVEL_APV_6_BAND_0, APVLevel6Band0 },
+ { C2Config::LEVEL_APV_6_BAND_1, APVLevel6Band1 },
+ { C2Config::LEVEL_APV_6_BAND_2, APVLevel6Band2 },
+ { C2Config::LEVEL_APV_6_BAND_3, APVLevel6Band3 },
+ { C2Config::LEVEL_APV_6_1_BAND_0, APVLevel61Band0 },
+ { C2Config::LEVEL_APV_6_1_BAND_1, APVLevel61Band1 },
+ { C2Config::LEVEL_APV_6_1_BAND_2, APVLevel61Band2 },
+ { C2Config::LEVEL_APV_6_1_BAND_3, APVLevel61Band3 },
+ { C2Config::LEVEL_APV_7_BAND_0, APVLevel7Band0 },
+ { C2Config::LEVEL_APV_7_BAND_1, APVLevel7Band1 },
+ { C2Config::LEVEL_APV_7_BAND_2, APVLevel7Band2 },
+ { C2Config::LEVEL_APV_7_BAND_3, APVLevel7Band3 },
+ { C2Config::LEVEL_APV_7_1_BAND_0, APVLevel71Band0 },
+ { C2Config::LEVEL_APV_7_1_BAND_1, APVLevel71Band1 },
+ { C2Config::LEVEL_APV_7_1_BAND_2, APVLevel71Band2 },
+ { C2Config::LEVEL_APV_7_1_BAND_3, APVLevel71Band3 },
+};
+
+ALookup<C2Config::hdr_format_t, int32_t> sApvHdrFormats = {
+ { C2Config::hdr_format_t::HLG, APVProfile422_10 },
+ { C2Config::hdr_format_t::HDR10, APVProfile422_10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, APVProfile422_10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -720,6 +800,37 @@
int32_t mBitDepth;
};
+// APV
+struct ApvProfileLevelMapper : ProfileLevelMapperHelper {
+ ApvProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+ ProfileLevelMapperHelper(),
+ mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+
+ virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+ return sApvLevels.map(from, to);
+ }
+ virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+ return sApvLevels.map(from, to);
+ }
+ virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+ return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sApvHdrProfiles.map(from, to) :
+ sApvProfiles.map(from, to);
+ }
+ virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+ return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sApvHdrProfiles.map(from, to) :
+ sApvProfiles.map(from, to);
+ }
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sApvHdrFormats.map(from, to);
+ }
+
+private:
+ bool mIsHdr;
+ bool mIsHdr10Plus;
+};
+
} // namespace
// the default mapper is used for media types that do not support HDR
@@ -753,6 +864,8 @@
return std::make_shared<Vp9ProfileLevelMapper>();
} else if (mediaType == MIMETYPE_VIDEO_AV1) {
return std::make_shared<Av1ProfileLevelMapper>();
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>();
}
return nullptr;
}
@@ -767,6 +880,8 @@
return std::make_shared<Vp9ProfileLevelMapper>(true, isHdr10Plus);
} else if (mediaType == MIMETYPE_VIDEO_AV1) {
return std::make_shared<Av1ProfileLevelMapper>(true, isHdr10Plus);
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>(true, isHdr10Plus);
}
return nullptr;
}
@@ -779,6 +894,8 @@
return GetProfileLevelMapper(mediaType);
} else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>();
}
return nullptr;
}
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 0987da2..6ec9d6b 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -1206,7 +1206,8 @@
emplace("libcodec2_soft_vp8enc.so");
emplace("libcodec2_soft_vp9dec.so");
emplace("libcodec2_soft_vp9enc.so");
-
+ emplace("libcodec2_soft_apvenc.so");
+ emplace("libcodec2_soft_apvdec.so");
}
// For testing only
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 3243726..95c2b97 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -4,6 +4,7 @@
andrewlewis@google.com
bachinger@google.com
claincly@google.com
+dancho@google.com
ibaker@google.com
ivanbuper@google.com
jbibik@google.com
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 1b06ea7..c3b43ab 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -17,6 +17,7 @@
#include "aaudio/AAudio.h"
#include "aaudio/AAudioTesting.h"
+#include "system/aaudio/AAudio.h"
#include <fuzzer/FuzzedDataProvider.h>
#include <functional>
@@ -183,6 +184,12 @@
fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder, framesPerDataCallback);
+ const size_t tagsNumBytes = fdp.ConsumeIntegralInRange<size_t>(
+ 0, AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 10);
+ AAudioStreamBuilder_setTags(mAaudioBuilder,
+ (tagsNumBytes == 0 ? nullptr
+ : fdp.ConsumeBytesAsString(tagsNumBytes).c_str()));
+
aaudio_policy_t policy =
fdp.PickValueInArray({fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
AAudio_setMMapPolicy(policy);
@@ -193,6 +200,7 @@
int32_t maxFrames = 0;
int32_t count = 0;
aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+ char tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1];
invokeAAudioSetAPIs(fdp);
@@ -312,6 +320,9 @@
(void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
},
[&]() {
+ (void)AAudioStream_getTags(mAaudioStream, tags);
+ },
+ [&]() {
(void)AAudioStream_isMMapUsed(mAaudioStream);
},
});
diff --git a/media/libaaudio/include/system/aaudio/AAudio.h b/media/libaaudio/include/system/aaudio/AAudio.h
new file mode 100644
index 0000000..933ad35
--- /dev/null
+++ b/media/libaaudio/include/system/aaudio/AAudio.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This is the system APIs for AAudio.
+ */
+#ifndef SYSTEM_AAUDIO_H
+#define SYSTEM_AAUDIO_H
+
+#include <aaudio/AAudio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * The tags string attributes allows OEMs to extend the
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>.
+ *
+ * Note that the maximum length includes all tags combined with delimiters and null terminator.
+ *
+ * Note that it matches the equivalent value in
+ * <a href="/reference/android/system/media/audio">AUDIO_ATTRIBUTES_TAGS_MAX_SIZE</a>
+ * in the Android native API.
+ */
+#define AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE 256
+
+/**
+ * Set one or more vendor extension tags that the output stream will carry.
+ *
+ * The tags can be used by the audio policy engine for routing purpose.
+ * Routing is based on audio attributes, translated into legacy stream type.
+ * The stream types cannot be extended, so the product strategies have been introduced to allow
+ * vendor extension of routing capabilities.
+ * This could, for example, affect how volume and routing is handled for the stream.
+ *
+ * The tags can also be used by a System App to pass vendor specific information through the
+ * framework to the HAL. That info could affect routing, ducking or other audio behavior in the HAL.
+ *
+ * By default, audio attributes tags are empty if this method is not called.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param tags the desired tags to add, which must be UTF-8 format and null-terminated. The size
+ * of the tags must be at most {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}. Multiple tags
+ * must be separated by semicolons.
+ * @return {@link #AAUDIO_OK} on success or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given
+ * tags is null or its length is greater than {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ */
+aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* _Nonnull builder,
+ const char* _Nonnull tags);
+
+/**
+ * Read the audio attributes' tags for the stream into a buffer.
+ * The caller is responsible for allocating and freeing the returned data.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @param tags pointer to write the value to in UTF-8 that containing OEM extension tags. It must
+ * be sized with {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ * @return {@link #AAUDIO_OK} or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given tags is null.
+ */
+aaudio_result_t AAudioStream_getTags(AAudioStream* _Nonnull stream, char* _Nonnull tags);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif //SYSTEM_AAUDIO_H
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index c4692ce..c53a897 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -50,7 +50,7 @@
setUsage(parcelable.usage);
static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
setContentType(parcelable.contentType);
-
+ setTags(parcelable.tags);
static_assert(sizeof(aaudio_spatialization_behavior_t) ==
sizeof(parcelable.spatializationBehavior));
setSpatializationBehavior(parcelable.spatializationBehavior);
@@ -106,6 +106,8 @@
result.usage = getUsage();
static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
result.contentType = getContentType();
+ std::optional<std::string> tags = getTags();
+ result.tags = tags.has_value() ? tags.value() : "";
static_assert(
sizeof(aaudio_spatialization_behavior_t) == sizeof(result.spatializationBehavior));
result.spatializationBehavior = getSpatializationBehavior();
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index fa46e0d..a301da8 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -27,6 +27,7 @@
int /* aaudio_direction_t */ direction; // = AAUDIO_DIRECTION_OUTPUT;
int /* aaudio_usage_t */ usage; // = AAUDIO_UNSPECIFIED;
int /* aaudio_content_type_t */ contentType; // = AAUDIO_UNSPECIFIED;
+ @utf8InCpp String tags; /* UTF8 */
int /* aaudio_spatialization_behavior_t */spatializationBehavior; //= AAUDIO_UNSPECIFIED;
boolean isContentSpatialized; // = false;
int /* aaudio_input_preset_t */ inputPreset; // = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index fa3f5a0..99b90e2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -129,6 +129,7 @@
request.getConfiguration().setUsage(getUsage());
request.getConfiguration().setContentType(getContentType());
+ request.getConfiguration().setTags(getTags());
request.getConfiguration().setSpatializationBehavior(getSpatializationBehavior());
request.getConfiguration().setIsContentSpatialized(isContentSpatialized());
request.getConfiguration().setInputPreset(getInputPreset());
@@ -185,6 +186,7 @@
setUsage(configurationOutput.getUsage());
setContentType(configurationOutput.getContentType());
+ setTags(configurationOutput.getTags());
setSpatializationBehavior(configurationOutput.getSpatializationBehavior());
setIsContentSpatialized(configurationOutput.isContentSpatialized());
setInputPreset(configurationOutput.getInputPreset());
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 3315344..fb87dd9 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,6 +25,7 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <system/aaudio/AAudio.h>
#include "AudioClock.h"
#include "AudioGlobal.h"
#include "AudioStreamBuilder.h"
@@ -177,6 +178,17 @@
streamBuilder->setContentType(contentType);
}
+AAUDIO_API aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* builder,
+ const char* tags) {
+ if (tags == nullptr || strlen(tags) >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ std::optional<std::string> optionalTags = std::string(tags);
+ streamBuilder->setTags(optionalTags);
+ return AAUDIO_OK;
+}
+
AAUDIO_API void AAudioStreamBuilder_setSpatializationBehavior(AAudioStreamBuilder* builder,
aaudio_spatialization_behavior_t spatializationBehavior) {
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
@@ -546,6 +558,22 @@
return audioStream->getContentType();
}
+AAUDIO_API aaudio_result_t AAudioStream_getTags(AAudioStream* stream, char* tags)
+{
+ if (tags == nullptr) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ std::optional<std::string> optTags = audioStream->getTags();
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE-1] = '\0';
+ } else {
+ tags[0] = '\0';
+ }
+ return AAUDIO_OK;
+}
+
AAUDIO_API aaudio_spatialization_behavior_t AAudioStream_getSpatializationBehavior(
AAudioStream* stream)
{
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 67fc668..056918a 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "AAudioStreamParameters"
#include <utils/Log.h>
#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
#include "AAudioStreamParameters.h"
@@ -34,6 +35,7 @@
mBufferCapacity = other.mBufferCapacity;
mUsage = other.mUsage;
mContentType = other.mContentType;
+ mTags = other.mTags;
mSpatializationBehavior = other.mSpatializationBehavior;
mIsContentSpatialized = other.mIsContentSpatialized;
mInputPreset = other.mInputPreset;
@@ -199,6 +201,10 @@
// break;
}
+ if (mTags.has_value() && mTags->size() >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+
return validateChannelMask();
}
@@ -301,6 +307,7 @@
ALOGD("mBufferCapacity = %6d", mBufferCapacity);
ALOGD("mUsage = %6d", mUsage);
ALOGD("mContentType = %6d", mContentType);
+ ALOGD("mTags = %s", mTags.has_value() ? mTags.value().c_str() : "");
ALOGD("mSpatializationBehavior = %6d", mSpatializationBehavior);
ALOGD("mIsContentSpatialized = %s", mIsContentSpatialized ? "true" : "false");
ALOGD("mInputPreset = %6d", mInputPreset);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 7c78f03..cad27a7 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -97,6 +97,14 @@
mContentType = contentType;
}
+ void setTags(const std::optional<std::string>& tags) {
+ mTags = tags;
+ }
+
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -223,6 +231,7 @@
aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior
= AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index e0fd325..a75a2a1 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -93,6 +93,7 @@
if (mContentType == AAUDIO_UNSPECIFIED) {
mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
}
+ mTags = builder.getTags();
mSpatializationBehavior = builder.getSpatializationBehavior();
// for consistency with other properties, note UNSPECIFIED is the same as AUTO
if (mSpatializationBehavior == AAUDIO_UNSPECIFIED) {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 49a63c4..3271882 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -290,6 +290,10 @@
return mContentType;
}
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -687,6 +691,13 @@
mContentType = contentType;
}
+ /**
+ * This should not be called after the open() call.
+ */
+ void setTags(const std::optional<std::string> &tags) {
+ mTags = tags;
+ }
+
void setSpatializationBehavior(aaudio_spatialization_behavior_t spatializationBehavior) {
mSpatializationBehavior = spatializationBehavior;
}
@@ -776,6 +787,7 @@
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior = AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
aaudio_input_preset_t mInputPreset = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index d729047..16c0bcd 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -146,14 +146,14 @@
builder.isContentSpatialized(),
flags);
- const audio_attributes_t attributes = {
- .content_type = contentType,
- .usage = usage,
- .source = AUDIO_SOURCE_DEFAULT, // only used for recording
- .flags = attributesFlags,
- .tags = ""
- };
-
+ const std::optional<std::string> tags = builder.getTags();
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+ attributes.content_type = contentType;
+ attributes.usage = usage;
+ attributes.flags = attributesFlags;
+ if (tags.has_value() && !tags.value().empty()) {
+ strcpy(attributes.tags, tags.value().c_str());
+ }
mAudioTrack = new AudioTrack();
// TODO b/182392769: use attribution source util
mAudioTrack->set(
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 7213393..13c19a1 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -72,6 +72,9 @@
AAudioStream_getHardwareSampleRate; # introduced=UpsideDownCake
AAudio_getPlatformMMapPolicy; # introduced=36
AAudio_getPlatformMMapExclusivePolicy; #introduced=36
+
+ AAudioStreamBuilder_setTags; # systemapi
+ AAudioStream_getTags; # systemapi
local:
*;
};
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index e5676a7..045c236 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -26,6 +26,8 @@
#include <aaudio/AAudio.h>
#include <gtest/gtest.h>
+#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
constexpr int64_t kNanosPerSecond = 1000000000;
constexpr int kNumFrames = 256;
@@ -36,6 +38,7 @@
static void checkAttributes(aaudio_performance_mode_t perfMode,
aaudio_usage_t usage,
aaudio_content_type_t contentType,
+ const char * tags = nullptr,
aaudio_input_preset_t preset = DONT_SET,
aaudio_allowed_capture_policy_t capturePolicy = DONT_SET,
int privacyMode = DONT_SET,
@@ -45,6 +48,7 @@
AAudioStreamBuilder *aaudioBuilder = nullptr;
AAudioStream *aaudioStream = nullptr;
+ aaudio_result_t expectedSetTagsResult = AAUDIO_OK;
// Use an AAudioStreamBuilder to contain requested parameters.
ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
@@ -60,6 +64,12 @@
if (contentType != DONT_SET) {
AAudioStreamBuilder_setContentType(aaudioBuilder, contentType);
}
+ if (tags != nullptr) {
+ aaudio_result_t result = AAudioStreamBuilder_setTags(aaudioBuilder, tags);
+ expectedSetTagsResult = (strlen(tags) >= AUDIO_ATTRIBUTES_TAGS_MAX_SIZE) ?
+ AAUDIO_ERROR_ILLEGAL_ARGUMENT : AAUDIO_OK;
+ EXPECT_EQ(result, expectedSetTagsResult);
+ }
if (preset != DONT_SET) {
AAudioStreamBuilder_setInputPreset(aaudioBuilder, preset);
}
@@ -87,6 +97,20 @@
: contentType;
EXPECT_EQ(expectedContentType, AAudioStream_getContentType(aaudioStream));
+ char readTags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE] = {};
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_getTags(aaudioStream, readTags))
+ << "Expected tags=" << (tags != nullptr ? tags : "null") << ", got tags=" << readTags;;
+ EXPECT_LT(strlen(readTags), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE)
+ << "expected tags len " << strlen(readTags) << " less than "
+ << AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+
+ // Null tags or failed to set, empty tags expected (default initializer)
+ const char * expectedTags = tags == nullptr ?
+ "" : (expectedSetTagsResult != AAUDIO_OK ? "" : tags);
+ // Oversized tags will be discarded
+ EXPECT_TRUE(std::strcmp(expectedTags, readTags) == 0)
+ << "Expected tags=" << expectedTags << ", got tags=" << readTags;
+
aaudio_input_preset_t expectedPreset =
(preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
? AAUDIO_INPUT_PRESET_VOICE_RECOGNITION // default
@@ -139,6 +163,21 @@
// Note that the AAUDIO_SYSTEM_USAGE_* values requires special permission.
};
+static const std::string oversizedTags2 = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1, 'A');
+static const std::string oversizedTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE, 'B');
+static const std::string maxSizeTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1, 'C');
+
+static const char * const sTags[] = {
+ nullptr,
+ "",
+ "oem=routing_extension",
+ "VX_OEM_ROUTING_EXTENSION",
+ maxSizeTags.c_str(),
+ // intentionnaly use oversized tags
+ oversizedTags.c_str(),
+ oversizedTags2.c_str()
+};
+
static const aaudio_content_type_t sContentypes[] = {
DONT_SET,
AAUDIO_UNSPECIFIED,
@@ -185,11 +224,18 @@
}
}
+static void checkAttributesTags(aaudio_performance_mode_t perfMode) {
+ for (const char * const tags : sTags) {
+ checkAttributes(perfMode, DONT_SET, DONT_SET, tags);
+ }
+}
+
static void checkAttributesInputPreset(aaudio_performance_mode_t perfMode) {
for (aaudio_input_preset_t inputPreset : sInputPresets) {
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
inputPreset,
DONT_SET,
DONT_SET,
@@ -202,6 +248,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
policy,
AAUDIO_DIRECTION_INPUT);
@@ -213,6 +260,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
DONT_SET,
privacyMode,
@@ -228,6 +276,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_NONE);
}
+TEST(test_attributes, aaudio_tags_perfnone) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
TEST(test_attributes, aaudio_input_preset_perfnone) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_NONE);
}
@@ -244,6 +296,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
+TEST(test_attributes, aaudio_tags_lowlat) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
TEST(test_attributes, aaudio_input_preset_lowlat) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 483a1ef..bb0deb1 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -95,7 +95,7 @@
// ServiceSingleton to provide interaction with the service notifications and
// binder death notifications.
//
-// If the AF/AP service is unavailable for kServiceWaitMs from ServiceManager,
+// If the AF/AP service is unavailable for kServiceClientWaitMs from ServiceManager,
// ServiceSingleton will return a nullptr service handle resulting in the same dead object error
// as if the service died (which it did, otherwise we'd be returning the cached handle).
//
@@ -129,9 +129,10 @@
//
// TODO(b/375691003) We use 10s as a conservative timeout value, and will tune closer to 3s.
// Too small a value (i.e. less than 1s would churn repeated calls to get the service).
-static constexpr int32_t kServiceWaitMs = 10'000;
+// The value can be tuned by the property audio.service.client_wait_ms.
+static constexpr int32_t kServiceClientWaitMs = 10'000;
-static constexpr const char kServiceWaitProperty[] = "audio.service.wait_ms";
+static constexpr const char kServiceWaitProperty[] = "audio.service.client_wait_ms";
// AudioFlingerServiceTraits is a collection of methods that parameterize the
// ServiceSingleton handler for IAudioFlinger
@@ -172,7 +173,7 @@
}
mediautils::initService<media::IAudioFlingerService, AudioFlingerServiceTraits>();
mWaitMs = std::chrono::milliseconds(
- property_get_int32(kServiceWaitProperty, kServiceWaitMs));
+ property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
init = true;
}
if (mValid) return mService;
@@ -272,7 +273,8 @@
static inline constinit std::mutex mMutex;
static inline constinit sp<AudioSystem::AudioFlingerClient> mClient GUARDED_BY(mMutex);
static inline constinit sp<IAudioFlinger> mService GUARDED_BY(mMutex);
- static inline constinit std::chrono::milliseconds mWaitMs GUARDED_BY(mMutex) {kServiceWaitMs};
+ static inline constinit std::chrono::milliseconds mWaitMs
+ GUARDED_BY(mMutex) {kServiceClientWaitMs};
static inline constinit bool mValid GUARDED_BY(mMutex) = false;
static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
};
@@ -1014,7 +1016,7 @@
}
mediautils::initService<IAudioPolicyService, AudioPolicyServiceTraits>();
mWaitMs = std::chrono::milliseconds(
- property_get_int32(kServiceWaitProperty, kServiceWaitMs));
+ property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
init = true;
}
if (mValid) return mService;
@@ -1071,7 +1073,8 @@
static inline constinit sp<AudioSystem::AudioPolicyServiceClient> mClient GUARDED_BY(mMutex);
static inline constinit sp<IAudioPolicyService> mService GUARDED_BY(mMutex);
static inline constinit bool mValid GUARDED_BY(mMutex) = false;
- static inline constinit std::chrono::milliseconds mWaitMs GUARDED_BY(mMutex) {kServiceWaitMs};
+ static inline constinit std::chrono::milliseconds mWaitMs
+ GUARDED_BY(mMutex) {kServiceClientWaitMs};
static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
};
@@ -1224,7 +1227,7 @@
const AttributionSourceState& attributionSource,
audio_config_t* config,
audio_output_flags_t flags,
- audio_port_handle_t* selectedDeviceId,
+ DeviceIdVector* selectedDeviceIds,
audio_port_handle_t* portId,
std::vector<audio_io_handle_t>* secondaryOutputs,
bool *isSpatialized,
@@ -1239,8 +1242,8 @@
ALOGE("%s NULL output - shouldn't happen", __func__);
return BAD_VALUE;
}
- if (selectedDeviceId == nullptr) {
- ALOGE("%s NULL selectedDeviceId - shouldn't happen", __func__);
+ if (selectedDeviceIds == nullptr) {
+ ALOGE("%s NULL selectedDeviceIds - shouldn't happen", __func__);
return BAD_VALUE;
}
if (portId == nullptr) {
@@ -1262,20 +1265,20 @@
legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
int32_t flagsAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
- int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(*selectedDeviceId));
+ auto selectedDeviceIdsAidl = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<int32_t>>(
+ *selectedDeviceIds, legacy2aidl_audio_port_handle_t_int32_t));
media::GetOutputForAttrResponse responseAidl;
status_t status = statusTFromBinderStatus(
aps->getOutputForAttr(attrAidl, sessionAidl, attributionSource, configAidl, flagsAidl,
- selectedDeviceIdAidl, &responseAidl));
+ selectedDeviceIdsAidl, &responseAidl));
if (status != NO_ERROR) {
config->format = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
+ aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
config->channel_mask = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
- responseAidl.configBase.channelMask, false /*isInput*/));
+ aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ responseAidl.configBase.channelMask, false /*isInput*/));
config->sample_rate = responseAidl.configBase.sampleRate;
return status;
}
@@ -1287,8 +1290,8 @@
*stream = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioStreamType_audio_stream_type_t(responseAidl.stream));
}
- *selectedDeviceId = VALUE_OR_RETURN_STATUS(
- aidl2legacy_int32_t_audio_port_handle_t(responseAidl.selectedDeviceId));
+ *selectedDeviceIds = VALUE_OR_RETURN_STATUS(convertContainer<DeviceIdVector>(
+ responseAidl.selectedDeviceIds, aidl2legacy_int32_t_audio_port_handle_t));
*portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(responseAidl.portId));
*secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index d3975c0..5d066bb 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -26,8 +26,8 @@
/** Interpreted as audio_io_handle_t. */
int output;
AudioStreamType stream;
- /** Interpreted as audio_port_handle_t. */
- int selectedDeviceId;
+ /** Interpreted as audio_port_handle_t[]. */
+ int[] selectedDeviceIds;
/** Interpreted as audio_port_handle_t. */
int portId;
/** Interpreted as audio_io_handle_t[]. */
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 956acce..7f4a7dd 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -94,7 +94,7 @@
in AttributionSourceState attributionSource,
in AudioConfig config,
int /* Bitmask, indexed by AudioOutputFlags */ flags,
- int /* audio_port_handle_t */ selectedDeviceId);
+ in int[] /* audio_port_handle_t */ selectedDeviceIds);
void startOutput(int /* audio_port_handle_t */ portId);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 5565281..fbc7629 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -342,7 +342,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
bool *isSpatialized,
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 2076045..2cb5f09 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -689,6 +689,25 @@
AudioEncapsulationMetadataType::FRAMEWORK_TUNER,
AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR));
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_layoutMask) {
+ AudioPortDeviceExt initial{};
+ initial.speakerLayout = make_ACL_Stereo();
+ auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_null) {
+ const AudioPortDeviceExt initial{}; // speakerLayout is null
+ auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+
class AudioGainModeRoundTripTest : public testing::TestWithParam<AudioGainMode> {};
TEST_P(AudioGainModeRoundTripTest, Aidl2Legacy2Aidl) {
const auto initial = GetParam();
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index e1265cf..3e2066b 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -130,6 +130,27 @@
return ss.str();
}
+std::string toString(const DeviceIdVector& deviceIds) {
+ if (deviceIds.empty()) {
+ return "AUDIO_PORT_HANDLE_NONE";
+ }
+ std::stringstream ss;
+ for (auto it = deviceIds.begin(); it != deviceIds.end(); ++it) {
+ if (it != deviceIds.begin()) {
+ ss << ", ";
+ }
+ ss << *it;
+ }
+ return ss.str();
+}
+
+audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds) {
+ if (deviceIds.empty()) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return deviceIds[0];
+}
+
AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
uint32_t first,
uint32_t last) {
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 46fd620..8d4665e 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -34,6 +34,7 @@
using SampleRateSet = std::set<uint32_t>;
using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
+using DeviceIdVector = std::vector<audio_port_handle_t>;
using FormatVector = std::vector<audio_format_t>;
using AudioProfileAttributesMultimap =
std::multimap<audio_format_t, std::pair<SampleRateSet, ChannelMaskSet>>;
@@ -139,6 +140,16 @@
}
/**
+ * Returns human readable string for a set of device ids.
+ */
+std::string toString(const DeviceIdVector& deviceIds);
+
+/**
+ * Returns the first device id of a set of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
+ */
+audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds);
+
+/**
* Create audio profile attributes map by given audio profile array from the range of [first, last).
*
* @param profiles the array of audio profiles.
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index f5dec56..ddef852 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -206,6 +206,7 @@
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
+ "com.android.media.audio-aconfig-cc",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 2753906..ac69b26 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -25,10 +25,12 @@
#include <error/expected_utils.h>
#include <aidl/android/media/audio/common/AudioStreamType.h>
#include <android/binder_manager.h>
+#include <com_android_media_audio.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
#include <system/audio.h>
#include <system/audio_aidl_utils.h>
+#include <system/audio_effects/effect_uuid.h>
#include <utils/Log.h>
#include "AidlUtils.h"
@@ -68,6 +70,7 @@
std::vector<Descriptor> list;
if (mFactory) {
mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+ filterHalDescriptors(list);
}
return list;
}()),
@@ -180,6 +183,11 @@
AudioUuid aidlUuid =
VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+ if (!com_android_media_audio_audio_eraser_effect() && isAudioEraser(aidlUuid)) {
+ ALOGE("%s Audio eraser effect not supported yet", __func__);
+ return BAD_VALUE;
+ }
+
std::shared_ptr<IEffect> aidlEffect;
// Use EffectProxy interface instead of IFactory to create
const bool isProxy = isProxyEffect(aidlUuid);
@@ -367,6 +375,23 @@
return 0;
}
+
+bool EffectsFactoryHalAidl::isAudioEraser(const AudioUuid& uuid) {
+ return uuid == getEffectTypeUuidEraser();
+}
+
+void EffectsFactoryHalAidl::filterHalDescriptors(std::vector<Descriptor>& descs) {
+ if (!com_android_media_audio_audio_eraser_effect()) {
+ descs.erase(std::remove_if(descs.begin(), descs.end(),
+ [](const Descriptor& desc) {
+ return isAudioEraser(desc.common.id.type);
+ }),
+ descs.end());
+ }
+
+ return;
+}
+
} // namespace effect
// When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 3b8628c..a3cd165 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -94,6 +94,11 @@
std::vector<effect_descriptor_t>* descriptors);
bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
+
+ static bool isAudioEraser(const aidl::android::media::audio::common::AudioUuid& uuid);
+
+ // filter out descriptors which can not supported by the framework
+ static void filterHalDescriptors(std::vector<Descriptor>& descs);
};
} // namespace effect
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 0d65f8c..e138cea 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -232,7 +232,9 @@
RETURN_STATUS_IF_ERROR(pause(&reply));
if (reply.state != StreamDescriptor::State::PAUSED &&
reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
- reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
+ reply.state != StreamDescriptor::State::TRANSFER_PAUSED &&
+ (state != StreamDescriptor::State::DRAINING ||
+ reply.state != StreamDescriptor::State::IDLE)) {
AUGMENT_LOG(E, "unexpected stream state: %s (expected PAUSED)",
toString(reply.state).c_str());
return INVALID_OPERATION;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 44b7d97..d791fab 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -55,8 +55,8 @@
defaults: ["libaudiopreprocessing-defaults"],
relative_install_path: "soundfx",
srcs: ["PreProcessing.cpp"],
- header_libs: [
- "libwebrtc_absl_headers",
+ static_libs: [
+ "libabsl",
],
}
@@ -77,6 +77,7 @@
"libutils",
],
static_libs: [
+ "libabsl",
"webrtc_audio_processing",
],
header_libs: [
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index d084f10..92bf35d 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -317,6 +317,7 @@
"server_configurable_flags",
"libaconfig_storage_read_api_cc",
"aconfig_mediacodec_flags_c_lib",
+ "camera_platform_flags_c_lib",
],
static_libs: [
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 3aa0107..1e233cf 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -53,7 +53,9 @@
#include <media/esds/ESDS.h>
#include "include/HevcUtils.h"
+#include <com_android_internal_camera_flags.h>
#include <com_android_media_editing_flags.h>
+namespace editing_flags = com::android::media::editing::flags;
#ifndef __predict_false
#define __predict_false(exp) __builtin_expect((exp) != 0, 0)
@@ -65,6 +67,8 @@
true; \
}))
+namespace flags_camera = com::android::internal::camera::flags;
+
namespace android {
static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
@@ -91,6 +95,8 @@
static const int kTimestampDebugCount = 10;
static const int kItemIdBase = 10000;
static const char kExifHeader[] = {'E', 'x', 'i', 'f', '\0', '\0'};
+static const char kGainmapMetaHeader[] = {'t', 'm', 'a', 'p', '\0', '\0'};
+static const char kGainmapHeader[] = {'g', 'm', 'a', 'p', '\0', '\0'};
static const uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xff, 0xe1};
static const uint8_t kMandatoryHevcNalUnitTypes[3] = {
@@ -160,6 +166,7 @@
bool isAvc() const { return mIsAvc; }
bool isHevc() const { return mIsHevc; }
bool isAv1() const { return mIsAv1; }
+ bool isApv() const { return mIsApv; }
bool isHeic() const { return mIsHeic; }
bool isAvif() const { return mIsAvif; }
bool isHeif() const { return mIsHeif; }
@@ -167,8 +174,11 @@
bool isMPEG4() const { return mIsMPEG4; }
bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
+ bool isGainmapMetaData(MediaBufferBase* buffer, uint32_t* offset) const;
+ bool isGainmapData(MediaBufferBase* buffer, uint32_t* offset) const;
void addChunkOffset(off64_t offset);
- void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
+ void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif,
+ bool isGainmapMeta = false, bool isGainmap = false);
void flushItemRefs();
TrackId& getTrackId() { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
@@ -178,8 +188,11 @@
void resetInternal();
int64_t trackMetaDataSize();
bool isTimestampValid(int64_t timeUs);
+ uint16_t getImageItemId() { return mImageItemId; };
+ uint16_t getGainmapItemId() { return mGainmapItemId; };
+ uint16_t getGainmapMetaItemId() { return mGainmapMetadataItemId; };
-private:
+ private:
// A helper class to handle faster write box with table entries
template<class TYPE, unsigned ENTRY_SIZE>
// ENTRY_SIZE: # of values in each entry
@@ -328,6 +341,7 @@
bool mIsAvc;
bool mIsHevc;
bool mIsAv1;
+ bool mIsApv;
bool mIsDovi;
bool mIsAudio;
bool mIsVideo;
@@ -405,6 +419,7 @@
Vector<uint16_t> mProperties;
ItemRefs mDimgRefs;
+ ItemRefs mGainmapDimgRefs;
Vector<uint16_t> mExifList;
uint16_t mImageItemId;
uint16_t mItemIdBase;
@@ -413,6 +428,10 @@
int32_t mTileWidth, mTileHeight;
int32_t mGridRows, mGridCols;
size_t mNumTiles, mTileIndex;
+ uint16_t mGainmapItemId, mGainmapMetadataItemId;
+ ColorAspects mColorAspects;
+ bool mColorAspectsValid;
+ Vector<uint8_t> mBitsPerChannel;
// Update the audio track's drift information.
void updateDriftTime(const sp<MetaData>& meta);
@@ -479,6 +498,7 @@
void writeAvccBox();
void writeHvccBox();
void writeAv1cBox();
+ void writeApvcBox();
void writeDoviConfigBox();
void writeUrlBox();
void writeDrefBox();
@@ -680,6 +700,9 @@
return "hvc1";
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
return "av01";
+ } else if (editing_flags::muxer_mp4_enable_apv() &&
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+ return "apv1";
}
} else if (!strncasecmp(mime, "application/", 12)) {
return "mett";
@@ -814,6 +837,10 @@
+ 12 // iref box (when empty)
;
+ if (flags_camera::camera_heif_gainmap()) {
+ metaSize += 36; // grpl box (when empty)
+ }
+
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if ((*it)->isHeif()) {
@@ -2213,8 +2240,7 @@
////////////////////////////////////////////////////////////////////////////////
-MPEG4Writer::Track::Track(
- MPEG4Writer *owner, const sp<MediaSource> &source, uint32_t aTrackId)
+MPEG4Writer::Track::Track(MPEG4Writer* owner, const sp<MediaSource>& source, uint32_t aTrackId)
: mOwner(owner),
mMeta(source->getFormat()),
mSource(source),
@@ -2234,7 +2260,7 @@
mStssTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
mSttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
mCttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
- mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
+ mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
mMinCttsOffsetTimeUs(0),
mMinCttsOffsetTicks(0),
mMaxCttsOffsetTicks(0),
@@ -2248,6 +2274,7 @@
mFirstSampleStartOffsetUs(0),
mRotation(0),
mDimgRefs("dimg"),
+ mGainmapDimgRefs("dimg"),
mImageItemId(0),
mItemIdBase(0),
mIsPrimary(0),
@@ -2258,7 +2285,10 @@
mGridRows(0),
mGridCols(0),
mNumTiles(1),
- mTileIndex(0) {
+ mTileIndex(0),
+ mGainmapItemId(0),
+ mGainmapMetadataItemId(0),
+ mColorAspectsValid(false) {
getCodecSpecificDataFromInputFormatIfPossible();
const char *mime;
@@ -2266,6 +2296,7 @@
mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
mIsAv1 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1);
+ mIsApv = editing_flags::muxer_mp4_enable_apv() && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
mIsAudio = !strncasecmp(mime, "audio/", 6);
mIsVideo = !strncasecmp(mime, "video/", 6);
@@ -2446,25 +2477,57 @@
return OK;
}
-bool MPEG4Writer::Track::isExifData(
- MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
+bool MPEG4Writer::Track::isGainmapMetaData(MediaBufferBase* buffer, uint32_t* offset) const {
+ if (!mIsHeif) {
+ return false;
+ }
+
+ // Gainmap metadata block starting with 'tmap\0\0'
+ size_t length = buffer->range_length();
+ uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kGainmapMetaHeader)) &&
+ !memcmp(data, kGainmapMetaHeader, sizeof(kGainmapMetaHeader))) {
+ *offset = sizeof(kGainmapMetaHeader);
+ return true;
+ }
+
+ return false;
+}
+
+bool MPEG4Writer::Track::isGainmapData(MediaBufferBase* buffer, uint32_t* offset) const {
+ if (!mIsHeif) {
+ return false;
+ }
+
+ // Gainmap block starting with 'gmap\0\0'
+ size_t length = buffer->range_length();
+ uint8_t* data = (uint8_t*)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kGainmapHeader)) &&
+ !memcmp(data, kGainmapHeader, sizeof(kGainmapHeader))) {
+ *offset = sizeof(kGainmapHeader);
+ return true;
+ }
+
+ return false;
+}
+
+bool MPEG4Writer::Track::isExifData(MediaBufferBase* buffer, uint32_t* tiffHdrOffset) const {
if (!mIsHeif) {
return false;
}
// Exif block starting with 'Exif\0\0'
size_t length = buffer->range_length();
- uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
- if ((length > sizeof(kExifHeader))
- && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
+ uint8_t* data = (uint8_t*)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kExifHeader)) && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
*tiffHdrOffset = sizeof(kExifHeader);
return true;
}
// Exif block starting with fourcc 'Exif' followed by APP1 marker
- if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader))
- && !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker))
- && !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
+ if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader)) &&
+ !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker)) &&
+ !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
// skip 'Exif' fourcc
buffer->set_range(4, buffer->range_length() - 4);
@@ -2481,7 +2544,8 @@
mCo64TableEntries->add(hton64(offset));
}
-void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
+void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif,
+ bool isGainmapMeta, bool isGainmap) {
CHECK(mIsHeif);
if (offset > UINT32_MAX || size > UINT32_MAX) {
@@ -2510,6 +2574,46 @@
return;
}
+ bool hasGrid = (mTileWidth > 0);
+
+ if (isGainmapMeta && flags_camera::camera_heif_gainmap()) {
+ uint16_t metaItemId;
+ if (mOwner->reserveItemId_l(1, &metaItemId) != OK) {
+ return;
+ }
+
+ Vector<uint16_t> props;
+ if (mColorAspectsValid) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ props.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ props.push_back(mOwner->addProperty_l(property));
+ }
+ props.push_back(mOwner->addProperty_l({
+ .type = FOURCC('i', 's', 'p', 'e'),
+ .width = hasGrid ? mTileWidth : mWidth,
+ .height = hasGrid ? mTileHeight : mHeight,
+ }));
+ mGainmapMetadataItemId = mOwner->addItem_l({
+ .itemType = "tmap",
+ .itemId = metaItemId,
+ .isPrimary = false,
+ .isHidden = false,
+ .offset = (uint32_t)offset,
+ .size = (uint32_t)size,
+ .properties = props,
+ });
+ return;
+ }
+
if (mTileIndex >= mNumTiles) {
ALOGW("Ignoring excess tiles!");
return;
@@ -2524,8 +2628,6 @@
default: break; // don't set if invalid
}
- bool hasGrid = (mTileWidth > 0);
-
if (mProperties.empty()) {
mProperties.push_back(mOwner->addProperty_l({
.type = static_cast<uint32_t>(mIsAvif ?
@@ -2550,7 +2652,7 @@
mTileIndex++;
if (hasGrid) {
- mDimgRefs.value.push_back(mOwner->addItem_l({
+ uint16_t id = mOwner->addItem_l({
.itemType = mIsAvif ? "av01" : "hvc1",
.itemId = mItemIdBase++,
.isPrimary = false,
@@ -2558,7 +2660,12 @@
.offset = (uint32_t)offset,
.size = (uint32_t)size,
.properties = mProperties,
- }));
+ });
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapDimgRefs.value.push_back(id);
+ } else {
+ mDimgRefs.value.push_back(id);
+ }
if (mTileIndex == mNumTiles) {
mProperties.clear();
@@ -2573,28 +2680,71 @@
.rotation = heifRotation,
}));
}
- mImageItemId = mOwner->addItem_l({
- .itemType = "grid",
- .itemId = mItemIdBase++,
- .isPrimary = (mIsPrimary != 0),
- .isHidden = false,
- .rows = (uint32_t)mGridRows,
- .cols = (uint32_t)mGridCols,
- .width = (uint32_t)mWidth,
- .height = (uint32_t)mHeight,
- .properties = mProperties,
+ if (mColorAspectsValid && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty() && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ uint16_t itemId = mOwner->addItem_l({
+ .itemType = "grid",
+ .itemId = mItemIdBase++,
+ .isPrimary = isGainmap && flags_camera::camera_heif_gainmap()
+ ? false
+ : (mIsPrimary != 0),
+ .isHidden = false,
+ .rows = (uint32_t)mGridRows,
+ .cols = (uint32_t)mGridCols,
+ .width = (uint32_t)mWidth,
+ .height = (uint32_t)mHeight,
+ .properties = mProperties,
});
+
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapItemId = itemId;
+ } else {
+ mImageItemId = itemId;
+ }
}
} else {
- mImageItemId = mOwner->addItem_l({
- .itemType = mIsAvif ? "av01" : "hvc1",
- .itemId = mItemIdBase++,
- .isPrimary = (mIsPrimary != 0),
- .isHidden = false,
- .offset = (uint32_t)offset,
- .size = (uint32_t)size,
- .properties = mProperties,
+ if (mColorAspectsValid && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty() && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ uint16_t itemId = mOwner->addItem_l({
+ .itemType = mIsAvif ? "av01" : "hvc1",
+ .itemId = mItemIdBase++,
+ .isPrimary = (isGainmap && flags_camera::camera_heif_gainmap()) ? false
+ : (mIsPrimary != 0),
+ .isHidden = false,
+ .offset = (uint32_t)offset,
+ .size = (uint32_t)size,
+ .properties = mProperties,
});
+
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapItemId = itemId;
+ } else {
+ mImageItemId = itemId;
+ }
}
}
@@ -2619,6 +2769,10 @@
}
}
}
+
+ if ((mGainmapItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ mOwner->addRefs_l(mGainmapItemId, mGainmapDimgRefs);
+ }
}
void MPEG4Writer::Track::setTimeScale() {
@@ -2708,6 +2862,9 @@
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
mMeta->findData(kKeyAV1C, &type, &data, &size);
+ } else if (editing_flags::muxer_mp4_enable_apv() &&
+ !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV)) {
+ mMeta->findData(kKeyAPVC, &type, &data, &size);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
getDolbyVisionProfile();
if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
@@ -3609,7 +3766,7 @@
(const uint8_t *)buffer->data()
+ buffer->range_offset(),
buffer->range_length());
- } else if (mIsMPEG4 || mIsAv1) {
+ } else if (mIsMPEG4 || mIsAv1 || mIsApv) {
err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
buffer->range_length());
}
@@ -3660,19 +3817,68 @@
break;
}
+ bool isGainmapMeta = false;
+ bool isGainmap = false;
bool isExif = false;
uint32_t tiffHdrOffset = 0;
+ uint32_t gainmapOffset = 0;
int32_t isMuxerData;
if (buffer->meta_data().findInt32(kKeyIsMuxerData, &isMuxerData) && isMuxerData) {
- // We only support one type of muxer data, which is Exif data block.
+ if (flags_camera::camera_heif_gainmap()) {
+ isGainmapMeta = isGainmapMetaData(buffer, &gainmapOffset);
+ isGainmap = isGainmapData(buffer, &gainmapOffset);
+ if ((isGainmap || isGainmapMeta) && (gainmapOffset > 0) &&
+ (gainmapOffset < buffer->range_length())) {
+ // Don't include the tmap/gmap header
+ buffer->set_range(gainmapOffset, buffer->range_length() - gainmapOffset);
+ }
+ }
isExif = isExifData(buffer, &tiffHdrOffset);
- if (!isExif) {
- ALOGW("Ignoring bad Exif data block");
+ if (!isExif && !isGainmap && !isGainmapMeta) {
+ ALOGW("Ignoring bad muxer data block");
buffer->release();
buffer = NULL;
continue;
}
}
+ if (flags_camera::camera_heif_gainmap()) {
+ int32_t val32;
+ if (buffer->meta_data().findInt32(kKeyColorPrimaries, &val32)) {
+ mColorAspects.mPrimaries = static_cast<ColorAspects::Primaries>(val32);
+ mColorAspectsValid = true;
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyTransferFunction, &val32)) {
+ mColorAspects.mTransfer = static_cast<ColorAspects::Transfer>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyColorMatrix, &val32)) {
+ mColorAspects.mMatrixCoeffs = static_cast<ColorAspects::MatrixCoeffs>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyColorRange, &val32)) {
+ mColorAspects.mRange = static_cast<ColorAspects::Range>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (mBitsPerChannel.empty() && buffer->meta_data().findInt32(kKeyColorFormat, &val32)) {
+ switch (val32) {
+ case COLOR_FormatYUV420Flexible:
+ case COLOR_FormatYUV420Planar:
+ case COLOR_FormatYUV420SemiPlanar: {
+ uint8_t bitsPerChannel[] = {8, 8, 8};
+ mBitsPerChannel.appendArray(bitsPerChannel, sizeof(bitsPerChannel));
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
if (!buffer->meta_data().findInt64(kKeySampleFileOffset, &sampleFileOffset)) {
sampleFileOffset = -1;
}
@@ -3698,7 +3904,7 @@
// Make a deep copy of the MediaBuffer and Metadata and release
// the original as soon as we can
- MediaBuffer *copy = new MediaBuffer(buffer->range_length());
+ MediaBuffer* copy = new MediaBuffer(buffer->range_length());
if (sampleFileOffset != -1) {
copy->meta_data().setInt64(kKeySampleFileOffset, sampleFileOffset);
} else {
@@ -3995,13 +4201,13 @@
trackProgressStatus(timestampUs);
}
}
- if (!hasMultipleTracks) {
+ if (!hasMultipleTracks || isGainmapMeta || isGainmap) {
size_t bytesWritten;
off64_t offset = mOwner->addSample_l(
copy, usePrefix, tiffHdrOffset, &bytesWritten);
if (mIsHeif) {
- addItemOffsetAndSize(offset, bytesWritten, isExif);
+ addItemOffsetAndSize(offset, bytesWritten, isExif, isGainmapMeta, isGainmap);
} else {
if (mCo64TableEntries->count() == 0) {
addChunkOffset(offset);
@@ -4304,6 +4510,15 @@
increase += 9; // 'irot' property (worst case)
}
+ if (flags_camera::camera_heif_gainmap()) {
+ // assume we have HDR gainmap and associated metadata
+ increase += (8 + mCodecSpecificDataSize) // 'hvcC' property (HDR gainmap)
+ + (2 * 20) // 'ispe' property
+ + (2 * 16) // 'pixi' property
+ + (2 * 19) // 'colr' property
+ ;
+ }
+
// increase to iref and idat
if (grid) {
increase += (12 + mNumTiles * 2) // 'dimg' in iref
@@ -4317,6 +4532,12 @@
+ 21) // increase to 'iinf'
* (mNumTiles + grid + 1); // "+1" is for 'Exif'
+ if (flags_camera::camera_heif_gainmap()) {
+ increase += (16 // increase to 'iloc'
+ + 21) // increase to 'iinf'
+ * 2; // "2" is for 'tmap', 'gmap'
+ }
+
// When total # of properties is > 127, the properties id becomes 2-byte.
// We write 4 properties at most for each image (2x'ispe', 1x'hvcC', 1x'irot').
// Set the threshold to be 30.
@@ -4338,6 +4559,7 @@
!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
+ (editing_flags::muxer_mp4_enable_apv() && !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
@@ -4512,6 +4734,9 @@
writeHvccBox();
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
writeAv1cBox();
+ } else if (editing_flags::muxer_mp4_enable_apv() &&
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+ writeApvcBox();
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
if (mDoviProfile <= DolbyVisionProfileDvheSt) {
writeHvccBox();
@@ -5103,6 +5328,15 @@
mOwner->endBox(); // av1C
}
+void MPEG4Writer::Track::writeApvcBox() {
+ CHECK(mCodecSpecificData);
+ CHECK_GE(mCodecSpecificDataSize, 4u);
+
+ mOwner->beginBox("apvC");
+ mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+ mOwner->endBox(); // apvC
+}
+
void MPEG4Writer::Track::writeDoviConfigBox() {
CHECK_NE(mDoviProfile, 0u);
@@ -5475,6 +5709,21 @@
endBox();
}
+void MPEG4Writer::writeGrplBox(const Vector<uint16_t> &items) {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("grpl");
+ beginBox("altr");
+ writeInt32(0); // Version = 0, Flags = 0
+ writeInt32(1); // Group Id
+ writeInt32(items.size());// Number of entities
+ for (size_t i = 0; i < items.size(); i++) {
+ writeInt32(items[i]);// Item Id
+ }
+ endBox();
+ endBox();
+ }
+}
+
void MPEG4Writer::writeIpcoBox() {
beginBox("ipco");
size_t numProperties = mProperties.size();
@@ -5520,6 +5769,32 @@
endBox();
break;
}
+ case FOURCC('c', 'o', 'l', 'r'):
+ {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("colr");
+ writeFourcc("nclx");
+ writeInt16(mProperties[propIndex].colorPrimaries);
+ writeInt16(mProperties[propIndex].colorTransfer);
+ writeInt16(mProperties[propIndex].colorMatrix);
+ writeInt8(int8_t(mProperties[propIndex].colorRange ? 0x80 : 0x0));
+ endBox();
+ }
+ break;
+ }
+ case FOURCC('p', 'i', 'x', 'i'):
+ {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("pixi");
+ writeInt32(0); // Version = 0, Flags = 0
+ writeInt8(mProperties[propIndex].bitsPerChannel.size()); // Number of channels
+ for (size_t i = 0; i < mProperties[propIndex].bitsPerChannel.size(); i++) {
+ writeInt8(mProperties[propIndex].bitsPerChannel[i]); // Channel bit depth
+ }
+ endBox();
+ }
+ break;
+ }
default:
ALOGW("Skipping unrecognized property: type 0x%08x",
mProperties[propIndex].type);
@@ -5574,6 +5849,12 @@
for (auto it = mItems.begin(); it != mItems.end(); it++) {
ItemInfo &item = it->second;
+ if (item.isGainmapMeta() && !item.properties.empty() &&
+ flags_camera::camera_heif_gainmap()) {
+ mAssociationEntryCount++;
+ continue;
+ }
+
if (!item.isImage()) continue;
if (item.isPrimary) {
@@ -5605,11 +5886,27 @@
}
}
+ uint16_t gainmapItemId = 0;
+ uint16_t gainmapMetaItemId = 0;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if ((*it)->isHeif()) {
(*it)->flushItemRefs();
}
+ if (flags_camera::camera_heif_gainmap()) {
+ if ((*it)->getGainmapItemId() > 0) {
+ gainmapItemId = (*it)->getGainmapItemId();
+ }
+ if ((*it)->getGainmapMetaItemId() > 0) {
+ gainmapMetaItemId = (*it)->getGainmapMetaItemId();
+ }
+ }
+ }
+ if ((gainmapItemId > 0) && (gainmapMetaItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ ItemRefs gainmapRefs("dimg");
+ gainmapRefs.value.push_back(mPrimaryItemId);
+ gainmapRefs.value.push_back(gainmapItemId);
+ addRefs_l(gainmapMetaItemId, gainmapRefs);
}
beginBox("meta");
@@ -5625,6 +5922,12 @@
if (mHasRefs) {
writeIrefBox();
}
+ if ((gainmapItemId > 0) && (gainmapMetaItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ Vector<uint16_t> itemIds;
+ itemIds.push_back(gainmapMetaItemId);
+ itemIds.push_back(mPrimaryItemId);
+ writeGrplBox(itemIds);
+ }
endBox();
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 9abe037..6b64ed7 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1298,7 +1298,12 @@
CHECK_EQ(mState, UNINITIALIZED);
mResourceManagerProxy->removeClient();
- flushMediametrics();
+ flushMediametrics(); // this deletes mMetricsHandle
+ // don't keep the last metrics handle around
+ if (mLastMetricsHandle != 0) {
+ mediametrics_delete(mLastMetricsHandle);
+ mLastMetricsHandle = 0;
+ }
// clean any saved metrics info we stored as part of configure()
if (mConfigureMsg != nullptr) {
@@ -1309,7 +1314,7 @@
}
}
-// except for in constructor, called from the looper thread (and therefore mutexed)
+// except for in constructor, called from the looper thread (and therefore not mutexed)
void MediaCodec::initMediametrics() {
if (mMetricsHandle == 0) {
mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -1335,6 +1340,7 @@
mInputBufferCounter = 0;
}
+ mSubsessionCount = 0;
mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
resetMetricsFields();
}
@@ -1346,6 +1352,17 @@
mReliabilityContextMetrics = ReliabilityContextMetrics();
}
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::resetSubsessionMetricsFields() {
+ mBytesEncoded = 0;
+ mFramesEncoded = 0;
+ mFramesInput = 0;
+ mBytesInput = 0;
+ mEarliestEncodedPtsUs = INT64_MAX;
+ mLatestEncodedPtsUs = INT64_MIN;
+}
+
+// always called from the looper thread
void MediaCodec::updateMediametrics() {
if (mMetricsHandle == 0) {
ALOGV("no metrics handle found");
@@ -1710,6 +1727,7 @@
}
}
+// except for in destructor, called from the looper thread
void MediaCodec::flushMediametrics() {
ALOGV("flushMediametrics");
@@ -1723,7 +1741,14 @@
if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
mediametrics_selfRecord(mMetricsHandle);
}
- mediametrics_delete(mMetricsHandle);
+ // keep previous metrics handle for subsequent getMetrics() calls.
+ // NOTE: There could be multiple error events, each flushing the metrics.
+ // We keep the last non-empty metrics handle, so getMetrics() in the
+ // next call will get the latest metrics prior to the errors.
+ if (mLastMetricsHandle != 0) {
+ mediametrics_delete(mLastMetricsHandle);
+ }
+ mLastMetricsHandle = mMetricsHandle;
mMetricsHandle = 0;
}
// we no longer have anything pending upload
@@ -1888,7 +1913,10 @@
});
}
- if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+ // NOTE: these were erroneously restricted to video encoders, but we want them for all
+ // codecs.
+ if (android::media::codec::provider_->subsession_metrics()
+ || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
mBytesInput += buffer->size();
mFramesInput++;
}
@@ -1910,12 +1938,15 @@
++mInputBufferCounter;
}
-// when we get a buffer back from the codec
+// when we get a buffer back from the codec, always called from the looper thread
void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
CHECK_NE(mState, UNINITIALIZED);
- if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+ // NOTE: these were erroneously restricted to video encoders, but we want them for all
+ // codecs.
+ if (android::media::codec::provider_->subsession_metrics()
+ || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
int32_t flags = 0;
(void) buffer->meta()->findInt32("flags", &flags);
@@ -2463,12 +2494,8 @@
mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
}
} else if (mFlags & kFlagIsSecure) {
- if (android::media::codec::provider_->secure_codecs_require_crypto()) {
- mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
- return INVALID_OPERATION;
- } else {
- ALOGW("Crypto or descrambler should be given for secure codec");
- }
+ // We'll catch this later when we process the buffers.
+ ALOGW("Crypto or descrambler should be given for secure codec");
}
if (mConfigureMsg != nullptr) {
@@ -3617,6 +3644,10 @@
updateMediametrics();
results = mediametrics_dup(mMetricsHandle);
updateEphemeralMediametrics(results);
+ } else if (mLastMetricsHandle != 0) {
+ // After error, mMetricsHandle is cleared, but we keep the last
+ // metrics around so that it can be queried by getMetrics().
+ results = mediametrics_dup(mLastMetricsHandle);
} else {
results = mediametrics_dup(mMetricsHandle);
}
@@ -3886,6 +3917,7 @@
return true;
}
+// always called from the looper thread
MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
const sp<AReplyToken> &replyID, bool newRequest) {
if (!isExecuting()) {
@@ -3941,6 +3973,9 @@
response->setInt32("flags", flags);
+ // NOTE: we must account the stats for an output buffer only after we
+ // already handled a potential output format change that could have
+ // started a new subsession.
statsBufferReceived(timeUs, buffer);
response->postReply(replyID);
@@ -5845,6 +5880,7 @@
}
}
+// always called from the looper thread
void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
sp<AMessage> format = buffer->format();
if (mOutputFormat == format) {
@@ -5928,6 +5964,24 @@
}
}
}
+
+ // Update the width and the height.
+ int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+ bool newSubsession = false;
+ if (android::media::codec::provider_->subsession_metrics()
+ && mOutputFormat->findInt32("width", &width)
+ && mOutputFormat->findInt32("height", &height)
+ && (width != mWidth || height != mHeight)) {
+ // consider a new subsession if the width or height changes.
+ newSubsession = true;
+ }
+ // TODO: properly detect new audio subsession
+
+ // Only consider a new subsession if we already have output (from a previous subsession).
+ if (newSubsession && mMetricsToUpload && mBytesEncoded > 0) {
+ handleStartingANewSubsession();
+ }
+
if (mFlags & kFlagIsAsync) {
onOutputFormatChanged();
} else {
@@ -5935,8 +5989,6 @@
postActivityNotificationIfPossible();
}
- // Update the width and the height.
- int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
bool resolutionChanged = false;
if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
mWidth = right - left + 1;
@@ -5963,6 +6015,35 @@
updateHdrMetrics(false /* isConfig */);
}
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::handleStartingANewSubsession() {
+ // create a new metrics item for the subsession with the new resolution.
+ // TODO: properly account input counts for the previous and the new
+ // subsessions. We only find out that a new subsession started from the
+ // output format, but by that time we already accounted the input counts
+ // to the previous subsession.
+ flushMediametrics(); // this deletes mMetricsHandle, but stores it in mLastMetricsHandle
+
+ // hence mLastMetricsHandle has the metrics item for the previous subsession.
+ if ((mFlags & kFlagIsAsync) && mCallback != nullptr) {
+ sp<AMessage> msg = mCallback->dup();
+ msg->setInt32("callbackID", CB_METRICS_FLUSHED);
+ std::unique_ptr<mediametrics::Item> flushedMetrics(
+ mediametrics::Item::convert(mediametrics_dup(mLastMetricsHandle)));
+ msg->setObject("metrics", new WrapperObject<std::unique_ptr<mediametrics::Item>>(
+ std::move(flushedMetrics)));
+ msg->post();
+ }
+
+ // reuse/continue old metrics item for the new subsession.
+ mMetricsHandle = mediametrics_dup(mLastMetricsHandle);
+ mMetricsToUpload = true;
+ // TODO: configured width/height for the new subsession should be the
+ // previous width/height.
+ mSubsessionCount++;
+ resetSubsessionMetricsFields();
+}
+
void MediaCodec::extractCSD(const sp<AMessage> &format) {
mCSD.clear();
@@ -6213,6 +6294,12 @@
CryptoPlugin::SubSample ss;
CryptoPlugin::Pattern pattern;
+ if (android::media::codec::provider_->secure_codecs_require_crypto()
+ && (mFlags & kFlagIsSecure) && !hasCryptoOrDescrambler()) {
+ mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
+ return INVALID_OPERATION;
+ }
+
if (msg->findSize("size", &size)) {
if (hasCryptoOrDescrambler()) {
ss.mNumBytesOfClearData = size;
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 1008445..96e399b 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -19,6 +19,8 @@
#include "webm/WebmWriter.h"
+#include <com_android_internal_camera_flags.h>
+
#include <utils/Log.h>
#include <media/stagefright/MediaMuxer.h>
@@ -38,6 +40,8 @@
#include <media/stagefright/OggWriter.h>
#include <media/stagefright/Utils.h>
+namespace flags_camera = com::android::internal::camera::flags;
+
namespace android {
static bool isMp4Format(MediaMuxer::OutputFormat format) {
@@ -270,6 +274,25 @@
sampleMetaData.setInt64(kKeyLastSampleIndexInChunk, val64);
}
+ if (flags_camera::camera_heif_gainmap()) {
+ int32_t val32;
+ if (bufMeta->findInt32("color-primaries", &val32)) {
+ sampleMetaData.setInt32(kKeyColorPrimaries, val32);
+ }
+ if (bufMeta->findInt32("color-transfer", &val32)) {
+ sampleMetaData.setInt32(kKeyTransferFunction, val32);
+ }
+ if (bufMeta->findInt32("color-matrix", &val32)) {
+ sampleMetaData.setInt32(kKeyColorMatrix, val32);
+ }
+ if (bufMeta->findInt32("color-range", &val32)) {
+ sampleMetaData.setInt32(kKeyColorRange, val32);
+ }
+ if (bufMeta->findInt32(KEY_COLOR_FORMAT, &val32)) {
+ sampleMetaData.setInt32(kKeyColorFormat, val32);
+ }
+ }
+
sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
// This pushBuffer will wait until the mediaBuffer is consumed.
return currentTrack->pushBuffer(mediaBuffer);
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 137b282..fc0a5e9 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -95,6 +95,14 @@
<Feature name="adaptive-playback" />
<Attribute name="software-codec" />
</MediaCodec>
+ <MediaCodec name="c2.android.apv.decoder" type="video/apv">
+ <Limit name="size" min="16x16" max="1920x1920"/>
+ <Limit name="alignment" value="2x2"/>
+ <Limit name="bitrate" range="1-240000000"/>
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Attribute name="software-codec"/>
+ </MediaCodec>
</Decoders>
<Encoders>
@@ -160,5 +168,13 @@
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
+ <MediaCodec name="c2.android.apv.encoder" type="video/apv">
+ <Limit name="size" min="2x2" max="1920x1920" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Limit name="bitrate" range="1-240000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ </MediaCodec>
</Encoders>
</Included>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index c18ab94..61b7198 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -255,6 +255,14 @@
<Feature name="adaptive-playback" />
<Attribute name="software-codec" />
</MediaCodec>
+ <MediaCodec name="c2.android.apv.decoder" type="video/apv">
+ <Limit name="size" min="16x16" max="1920x1920"/>
+ <Limit name="alignment" value="2x2"/>
+ <Limit name="bitrate" range="1-240000000"/>
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Attribute name="software-codec"/>
+ </MediaCodec>
</Decoders>
<Encoders>
<MediaCodec name="c2.android.aac.encoder" type="audio/mp4a-latm">
@@ -409,5 +417,14 @@
<Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
+ <MediaCodec name="c2.android.apv.encoder" type="video/apv" variant="!slow-cpu">
+ <Limit name="size" min="2x2" max="1920x1920" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Limit name="bitrate" range="1-240000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ <Attribute name="software-codec" />
+ </MediaCodec>
</Encoders>
</MediaCodecs>
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index ee75129..a409e46 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -200,6 +200,9 @@
bool isImage() const {
return !strcmp("hvc1", itemType) || !strcmp("av01", itemType) || isGrid();
}
+ bool isGainmapMeta() const {
+ return !strcmp("tmap", itemType);
+ }
const char *itemType;
uint16_t itemId;
bool isPrimary;
@@ -227,6 +230,11 @@
int32_t width;
int32_t height;
int32_t rotation;
+ int32_t colorPrimaries;
+ int32_t colorTransfer;
+ int32_t colorMatrix;
+ bool colorRange;
+ Vector<uint8_t> bitsPerChannel;
sp<ABuffer> data;
} ItemProperty;
@@ -347,6 +355,7 @@
void writeIdatBox();
void writeIrefBox();
void writePitmBox();
+ void writeGrplBox(const Vector<uint16_t> &items);
void writeFileLevelMetaBox();
void sendSessionSummary();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 7169b1e..ca2fa3d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -123,6 +123,13 @@
CB_RESOURCE_RECLAIMED = 5,
CB_CRYPTO_ERROR = 6,
CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
+
+ /** Callback ID for when the metrics for this codec have been flushed
+ * due to the start of a new subsession. The associated AMessage will
+ * contain an sp<WrapperObject<std::unique_ptr<mediametrics::Item>>>
+ * Object at the "metrics" key.
+ */
+ CB_METRICS_FLUSHED = 8,
};
static const pid_t kNoPid = -1;
@@ -484,12 +491,21 @@
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
+ mediametrics_handle_t mLastMetricsHandle = 0; // only accessed from the looper or destructor
bool mMetricsToUpload = false;
nsecs_t mLifetimeStartNs = 0;
void initMediametrics();
void updateMediametrics();
void flushMediametrics();
void resetMetricsFields();
+
+ // Reset the metrics fields for a new subsession.
+ void resetSubsessionMetricsFields();
+
+ // Start a new subsession (for metrics). This includes flushing the current
+ // metrics, notifying the client and resetting the session fields.
+ void handleStartingANewSubsession();
+
void updateEphemeralMediametrics(mediametrics_handle_t item);
void updateLowLatency(const sp<AMessage> &msg);
void updateCodecImportance(const sp<AMessage>& msg);
@@ -551,6 +567,7 @@
int32_t setOutputSurfaceCount;
int32_t resolutionChangeCount;
} mReliabilityContextMetrics;
+ int32_t mSubsessionCount;
// initial create parameters
AString mInitName;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index b1cf665..8f2f162 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -586,6 +586,139 @@
}
}
+// APV ProfileLevel
+inline constexpr int32_t APVProfile422_10 = 0x01;
+inline constexpr int32_t APVProfile422_10HDR10 = 0x1000;
+inline constexpr int32_t APVProfile422_10HDR10Plus = 0x2000;
+
+inline static const char *asString_APVProfile(int32_t i, const char *def = "??") {
+ switch (i) {
+ case APVProfile422_10: return "APVProfile422_10";
+ case APVProfile422_10HDR10: return "APVProfile422_10HDR10";
+ case APVProfile422_10HDR10Plus: return "APVProfile422_10HDR10Plus";
+ default: return def;
+ }
+}
+
+inline constexpr int32_t APVLevel1Band0 = 0x101;
+inline constexpr int32_t APVLevel1Band1 = 0x102;
+inline constexpr int32_t APVLevel1Band2 = 0x104;
+inline constexpr int32_t APVLevel1Band3 = 0x108;
+inline constexpr int32_t APVLevel11Band0 = 0x201;
+inline constexpr int32_t APVLevel11Band1 = 0x202;
+inline constexpr int32_t APVLevel11Band2 = 0x204;
+inline constexpr int32_t APVLevel11Band3 = 0x208;
+inline constexpr int32_t APVLevel2Band0 = 0x401;
+inline constexpr int32_t APVLevel2Band1 = 0x402;
+inline constexpr int32_t APVLevel2Band2 = 0x404;
+inline constexpr int32_t APVLevel2Band3 = 0x408;
+inline constexpr int32_t APVLevel21Band0 = 0x801;
+inline constexpr int32_t APVLevel21Band1 = 0x802;
+inline constexpr int32_t APVLevel21Band2 = 0x804;
+inline constexpr int32_t APVLevel21Band3 = 0x808;
+inline constexpr int32_t APVLevel3Band0 = 0x1001;
+inline constexpr int32_t APVLevel3Band1 = 0x1002;
+inline constexpr int32_t APVLevel3Band2 = 0x1004;
+inline constexpr int32_t APVLevel3Band3 = 0x1008;
+inline constexpr int32_t APVLevel31Band0 = 0x2001;
+inline constexpr int32_t APVLevel31Band1 = 0x2002;
+inline constexpr int32_t APVLevel31Band2 = 0x2004;
+inline constexpr int32_t APVLevel31Band3 = 0x2008;
+inline constexpr int32_t APVLevel4Band0 = 0x4001;
+inline constexpr int32_t APVLevel4Band1 = 0x4002;
+inline constexpr int32_t APVLevel4Band2 = 0x4004;
+inline constexpr int32_t APVLevel4Band3 = 0x4008;
+inline constexpr int32_t APVLevel41Band0 = 0x8001;
+inline constexpr int32_t APVLevel41Band1 = 0x8002;
+inline constexpr int32_t APVLevel41Band2 = 0x8004;
+inline constexpr int32_t APVLevel41Band3 = 0x8008;
+inline constexpr int32_t APVLevel5Band0 = 0x10001;
+inline constexpr int32_t APVLevel5Band1 = 0x10002;
+inline constexpr int32_t APVLevel5Band2 = 0x10004;
+inline constexpr int32_t APVLevel5Band3 = 0x10008;
+inline constexpr int32_t APVLevel51Band0 = 0x20001;
+inline constexpr int32_t APVLevel51Band1 = 0x20002;
+inline constexpr int32_t APVLevel51Band2 = 0x20004;
+inline constexpr int32_t APVLevel51Band3 = 0x20008;
+inline constexpr int32_t APVLevel6Band0 = 0x40001;
+inline constexpr int32_t APVLevel6Band1 = 0x40002;
+inline constexpr int32_t APVLevel6Band2 = 0x40004;
+inline constexpr int32_t APVLevel6Band3 = 0x40008;
+inline constexpr int32_t APVLevel61Band0 = 0x80001;
+inline constexpr int32_t APVLevel61Band1 = 0x80002;
+inline constexpr int32_t APVLevel61Band2 = 0x80004;
+inline constexpr int32_t APVLevel61Band3 = 0x80008;
+inline constexpr int32_t APVLevel7Band0 = 0x100001;
+inline constexpr int32_t APVLevel7Band1 = 0x100002;
+inline constexpr int32_t APVLevel7Band2 = 0x100004;
+inline constexpr int32_t APVLevel7Band3 = 0x100008;
+inline constexpr int32_t APVLevel71Band0 = 0x200001;
+inline constexpr int32_t APVLevel71Band1 = 0x200002;
+inline constexpr int32_t APVLevel71Band2 = 0x200004;
+inline constexpr int32_t APVLevel71Band3 = 0x200008;
+
+inline static const char *asString_APVBandLevel(int32_t i, const char *def = "??") {
+ switch (i) {
+ case APVLevel1Band0: return "Level 1, Band 0";
+ case APVLevel1Band1: return "Level 1, Band 1";
+ case APVLevel1Band2: return "Level 1, Band 2";
+ case APVLevel1Band3: return "Level 1, Band 3";
+ case APVLevel11Band0: return "Level 1.1, Band 0";
+ case APVLevel11Band1: return "Level 1.1, Band 1";
+ case APVLevel11Band2: return "Level 1.1, Band 2";
+ case APVLevel11Band3: return "Level 1.1, Band 3";
+ case APVLevel2Band0: return "Level 2, Band 0";
+ case APVLevel2Band1: return "Level 2, Band 1";
+ case APVLevel2Band2: return "Level 2, Band 2";
+ case APVLevel2Band3: return "Level 2, Band 3";
+ case APVLevel21Band0: return "Level 2.1, Band 0";
+ case APVLevel21Band1: return "Level 2.1, Band 1";
+ case APVLevel21Band2: return "Level 2.1, Band 2";
+ case APVLevel21Band3: return "Level 2.1, Band 3";
+ case APVLevel3Band0: return "Level 3, Band 0";
+ case APVLevel3Band1: return "Level 3, Band 1";
+ case APVLevel3Band2: return "Level 3, Band 2";
+ case APVLevel3Band3: return "Level 3, Band 3";
+ case APVLevel31Band0: return "Level 3.1, Band 0";
+ case APVLevel31Band1: return "Level 3.1, Band 1";
+ case APVLevel31Band2: return "Level 3.1, Band 2";
+ case APVLevel31Band3: return "Level 3.1, Band 3";
+ case APVLevel4Band0: return "Level 4, Band 0";
+ case APVLevel4Band1: return "Level 4, Band 1";
+ case APVLevel4Band2: return "Level 4, Band 2";
+ case APVLevel4Band3: return "Level 4, Band 3";
+ case APVLevel41Band0: return "Level 4.1, Band 0";
+ case APVLevel41Band1: return "Level 4.1, Band 1";
+ case APVLevel41Band2: return "Level 4.1, Band 2";
+ case APVLevel41Band3: return "Level 4.1, Band 3";
+ case APVLevel5Band0: return "Level 5, Band 0";
+ case APVLevel5Band1: return "Level 5, Band 1";
+ case APVLevel5Band2: return "Level 5, Band 2";
+ case APVLevel5Band3: return "Level 5, Band 3";
+ case APVLevel51Band0: return "Level 5.1, Band 0";
+ case APVLevel51Band1: return "Level 5.1, Band 1";
+ case APVLevel51Band2: return "Level 5.1, Band 2";
+ case APVLevel51Band3: return "Level 5.1, Band 3";
+ case APVLevel6Band0: return "Level 6, Band 0";
+ case APVLevel6Band1: return "Level 6, Band 1";
+ case APVLevel6Band2: return "Level 6, Band 2";
+ case APVLevel6Band3: return "Level 6, Band 3";
+ case APVLevel61Band0: return "Level 6.1, Band 0";
+ case APVLevel61Band1: return "Level 6.1, Band 1";
+ case APVLevel61Band2: return "Level 6.1, Band 2";
+ case APVLevel61Band3: return "Level 6.1, Band 3";
+ case APVLevel7Band0: return "Level 7, Band 0";
+ case APVLevel7Band1: return "Level 7, Band 1";
+ case APVLevel7Band2: return "Level 7, Band 2";
+ case APVLevel7Band3: return "Level 7, Band 3";
+ case APVLevel71Band0: return "Level 7.1, Band 0";
+ case APVLevel71Band1: return "Level 7.1, Band 1";
+ case APVLevel71Band2: return "Level 7.1, Band 2";
+ case APVLevel71Band3: return "Level 7.1, Band 3";
+ default: return def;
+ }
+}
+
inline constexpr int32_t BITRATE_MODE_CBR = 2;
inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -654,6 +787,7 @@
inline constexpr int32_t COLOR_FormatYUV444Flexible = 0x7F444888;
inline constexpr int32_t COLOR_FormatYUV444Interleaved = 29;
inline constexpr int32_t COLOR_FormatYUVP010 = 54;
+inline constexpr int32_t COLOR_FormatYUVP210 = 60;
inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
@@ -712,6 +846,7 @@
case COLOR_FormatYUV444Flexible: return "YUV444Flexible";
case COLOR_FormatYUV444Interleaved: return "YUV444Interleaved";
case COLOR_FormatYUVP010: return "YUVP010";
+ case COLOR_FormatYUVP210: return "YUVP210";
case COLOR_QCOM_FormatYUV420SemiPlanar: return "QCOM_YUV420SemiPlanar";
case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
default: return def;
@@ -731,6 +866,7 @@
inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_APV[] = "video/apv";
inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
@@ -847,6 +983,7 @@
inline constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
inline constexpr char KEY_MAX_WIDTH[] = "max-width";
inline constexpr char KEY_MIME[] = "mime";
+inline constexpr char KEY_NUM_SLOTS[] = "num-slots";
inline constexpr char KEY_OPERATING_RATE[] = "operating-rate";
inline constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
inline constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index a7d2eb9..9dce55b 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -63,6 +63,7 @@
kKeyDVVC = 'dvvc', // raw data
kKeyDVWC = 'dvwc', // raw data
kKeyAV1C = 'av1c', // raw data
+ kKeyAPVC = 'apvc', // raw data
kKeyThumbnailHVCC = 'thvc', // raw data
kKeyThumbnailAV1C = 'tav1', // raw data
kKeyD263 = 'd263', // raw data
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index 840c6b3c..483175c 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -47,6 +47,7 @@
"libcutils",
"libutils",
"server_configurable_flags",
+ "camera_platform_flags_c_lib",
],
}
diff --git a/media/module/codecs/amrnb/common/Android.bp b/media/module/codecs/amrnb/common/Android.bp
index 0bc6ed2..35937cb 100644
--- a/media/module/codecs/amrnb/common/Android.bp
+++ b/media/module/codecs/amrnb/common/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
default_applicable_licenses: [
"frameworks_av_media_codecs_amrnb_common_license",
],
@@ -42,8 +43,8 @@
"src/gains_tbl.cpp",
"src/gc_pred.cpp",
"src/gmed_n.cpp",
- "src/grid_tbl.cpp",
"src/gray_tbl.cpp",
+ "src/grid_tbl.cpp",
"src/int_lpc.cpp",
"src/inv_sqrt.cpp",
"src/inv_sqrt_tbl.cpp",
@@ -91,9 +92,9 @@
export_include_dirs: ["include"],
cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
- "-DOSCL_IMPORT_REF=",
"-DOSCL_EXPORT_REF=",
+ "-DOSCL_IMPORT_REF=",
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
"-Werror",
],
diff --git a/media/module/codecs/amrnb/dec/Android.bp b/media/module/codecs/amrnb/dec/Android.bp
index 70741d2..a28500a 100644
--- a/media/module/codecs/amrnb/dec/Android.bp
+++ b/media/module/codecs/amrnb/dec/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
default_applicable_licenses: [
"frameworks_av_media_codecs_amrnb_dec_license",
],
@@ -47,12 +48,12 @@
"src/b_cn_cod.cpp",
"src/bgnscd.cpp",
"src/c_g_aver.cpp",
- "src/d1035pf.cpp",
- "src/d2_11pf.cpp",
"src/d2_9pf.cpp",
+ "src/d2_11pf.cpp",
"src/d3_14pf.cpp",
"src/d4_17pf.cpp",
"src/d8_31pf.cpp",
+ "src/d1035pf.cpp",
"src/d_gain_c.cpp",
"src/d_gain_p.cpp",
"src/d_plsf.cpp",
@@ -81,8 +82,8 @@
export_include_dirs: ["src"],
cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
"-DOSCL_IMPORT_REF=",
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
"-Werror",
],
@@ -94,8 +95,8 @@
//},
shared_libs: [
- "libstagefright_amrnb_common",
"liblog",
+ "libstagefright_amrnb_common",
],
target: {
@@ -113,19 +114,22 @@
srcs: ["test/amrnbdec_test.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
local_include_dirs: ["src"],
static_libs: [
- "libstagefright_amrnbdec",
"libsndfile",
+ "libstagefright_amrnbdec",
],
shared_libs: [
- "libstagefright_amrnb_common",
"libaudioutils",
"liblog",
+ "libstagefright_amrnb_common",
],
target: {
diff --git a/media/module/codecs/amrnb/enc/Android.bp b/media/module/codecs/amrnb/enc/Android.bp
index 3c6566e..13bb29c 100644
--- a/media/module/codecs/amrnb/enc/Android.bp
+++ b/media/module/codecs/amrnb/enc/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
default_applicable_licenses: [
"frameworks_av_media_codecs_amrnb_enc_license",
],
@@ -42,12 +43,12 @@
srcs: [
"src/amrencode.cpp",
"src/autocorr.cpp",
- "src/c1035pf.cpp",
- "src/c2_11pf.cpp",
"src/c2_9pf.cpp",
+ "src/c2_11pf.cpp",
"src/c3_14pf.cpp",
"src/c4_17pf.cpp",
"src/c8_31pf.cpp",
+ "src/c1035pf.cpp",
"src/calc_cor.cpp",
"src/calc_en.cpp",
"src/cbsearch.cpp",
@@ -132,7 +133,10 @@
srcs: ["test/amrnb_enc_test.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
local_include_dirs: ["src"],
diff --git a/media/module/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
index bcbcee2..1b2ec87 100644
--- a/media/module/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -19,6 +19,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_enc_license"
@@ -39,8 +40,8 @@
static_libs: [
"liblog",
- "libstagefright_amrnbenc",
"libstagefright_amrnb_common",
+ "libstagefright_amrnbenc",
],
fuzz_config: {
diff --git a/media/module/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
index 3f29267..c5cbbe2 100644
--- a/media/module/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -19,6 +19,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -34,9 +35,9 @@
"amrnb_dec_fuzzer.cpp",
],
static_libs: [
- "libstagefright_amrnbdec",
- "libstagefright_amrnb_common",
"liblog",
+ "libstagefright_amrnb_common",
+ "libstagefright_amrnbdec",
],
target: {
darwin: {
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index 56cd8b8..1d1dd71 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -43,6 +43,8 @@
"libcodec2_soft_flacdec",
"libcodec2_soft_flacenc",
"libcodec2_soft_gsmdec",
+ "libcodec2_soft_apvenc",
+ "libcodec2_soft_apvdec",
],
}
diff --git a/media/module/extractors/Android.bp b/media/module/extractors/Android.bp
index e29d3e6..cbaabe3 100644
--- a/media/module/extractors/Android.bp
+++ b/media/module/extractors/Android.bp
@@ -81,6 +81,12 @@
srcs: ["extractor.aconfig"],
}
+java_aconfig_library {
+ name: "android.media.extractor.flags-aconfig-java",
+ aconfig_declarations: "android.media.extractor.flags-aconfig",
+ defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
cc_aconfig_library {
name: "android.media.extractor.flags-aconfig-cc",
aconfig_declarations: "android.media.extractor.flags-aconfig",
diff --git a/media/module/extractors/extractor.aconfig b/media/module/extractors/extractor.aconfig
index c9bf694..a7d3397 100644
--- a/media/module/extractors/extractor.aconfig
+++ b/media/module/extractors/extractor.aconfig
@@ -8,7 +8,16 @@
name: "extractor_sniff_midi_optimizations"
is_exported: true
is_fixed_read_only: true
- namespace: "media_extractor"
+ namespace: "media_solutions"
description: "Enable SniffMidi optimizations."
bug: "359920208"
}
+
+flag {
+ name: "extractor_mp4_enable_apv"
+ is_exported: true
+ is_fixed_read_only: true
+ namespace: "media_solutions"
+ description: "Enable APV support in mp4 extractor."
+ bug: "370061501"
+}
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 3da1589..f3da389 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -129,12 +129,18 @@
],
static_libs: [
+ "android.media.extractor.flags-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libstagefright_id3",
"libstagefright_esds",
"libmp4extractor",
"libstagefright_metadatautils",
],
+ shared_libs: [
+ "server_configurable_flags",
+ ],
+
dictionary: "mp4_extractor_fuzzer.dict",
corpus: ["corpus_mp4/*"],
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index 8072002..effd24a 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -42,12 +42,18 @@
],
static_libs: [
+ "android.media.extractor.flags-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libstagefright_esds",
"libstagefright_foundation",
"libstagefright_id3",
"libutils",
],
+ shared_libs: [
+ "server_configurable_flags",
+ ],
+
host_supported: true,
target: {
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 12c0aaf..f062491 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -33,6 +33,7 @@
#include "SampleTable.h"
#include "ItemTable.h"
+#include <com_android_media_extractor_flags.h>
#include <media/esds/ESDS.h>
#include <ID3.h>
#include <media/stagefright/DataSourceBase.h>
@@ -147,6 +148,7 @@
bool mIsAVC;
bool mIsHEVC;
+ bool mIsAPV;
bool mIsDolbyVision;
bool mIsAC4;
bool mIsMpegH = false;
@@ -366,6 +368,13 @@
case FOURCC("hev1"):
return MEDIA_MIMETYPE_VIDEO_HEVC;
+ case FOURCC("apv1"):
+ if (!com::android::media::extractor::flags::extractor_mp4_enable_apv()) {
+ ALOGV("APV support not enabled");
+ return "application/octet-stream";
+ }
+ return MEDIA_MIMETYPE_VIDEO_APV;
+
case FOURCC("dvav"):
case FOURCC("dva1"):
case FOURCC("dvhe"):
@@ -2106,6 +2115,7 @@
case FOURCC("dav1"):
case FOURCC("av01"):
case FOURCC("vp09"):
+ case FOURCC("apv1"):
{
uint8_t buffer[78];
if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -2623,8 +2633,16 @@
break;
}
+ case FOURCC("apvC"):
case FOURCC("av1C"):
{
+ if (!com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ chunk_type == FOURCC("apvC")) {
+ ALOGV("APV support not enabled");
+ *offset += chunk_size;
+ break;
+ }
+
auto buffer = heapbuffer<uint8_t>(chunk_data_size);
if (buffer.get() == NULL) {
@@ -5145,6 +5163,7 @@
mCurrentSampleInfoOffsets(NULL),
mIsAVC(false),
mIsHEVC(false),
+ mIsAPV(false),
mIsDolbyVision(false),
mIsAC4(false),
mIsPcm(false),
@@ -5187,6 +5206,8 @@
mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+ mIsAPV = com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index d6e79c7..5f0f4fa 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -21,6 +21,7 @@
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["frameworks_av_license"],
+ default_team: "trendy_team_android_media_solutions_playback",
}
cc_test {
@@ -31,6 +32,8 @@
srcs: ["ExtractorUnitTest.cpp"],
static_libs: [
+ "android.media.extractor.flags-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libaacextractor",
"libamrextractor",
"libmp3extractor",
@@ -77,6 +80,7 @@
"libhidlmemory",
"libhidlbase",
"libbase",
+ "server_configurable_flags",
],
compile_multilib: "first",
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 7abab63..a890696 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -25,6 +25,7 @@
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
const char *MEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char *MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 05ee7fc..2b3f446 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -27,6 +27,7 @@
extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
extern const char *MEDIA_MIMETYPE_VIDEO_AV1;
+extern const char *MEDIA_MIMETYPE_VIDEO_APV;
extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
new file mode 100644
index 0000000..790b749
--- /dev/null
+++ b/media/module/libapexcodecs/Android.bp
@@ -0,0 +1,71 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+ name: "libapexcodecs-defaults",
+ header_libs: [
+ "libbase_headers",
+ ],
+
+ srcs: ["ApexCodecs.cpp"],
+
+ shared_libs: [
+ "libbase",
+ "libnativewindow",
+ ],
+
+ export_include_dirs: ["include"],
+
+ export_shared_lib_headers: [
+ "libbase",
+ "libnativewindow",
+ ],
+
+}
+
+cc_library {
+ name: "libapexcodecs-testing",
+ defaults: ["libapexcodecs-defaults"],
+
+ visibility: [
+ ":__subpackages__",
+ ],
+}
+
+cc_library {
+ name: "libapexcodecs",
+ defaults: ["libapexcodecs-defaults"],
+
+ visibility: [
+ "//frameworks/av/apex:__subpackages__",
+ "//frameworks/av/media/codec2/hal/client",
+ ],
+
+ min_sdk_version: "apex_inherit",
+ version_script: "libapexcodecs.map.txt",
+ stubs: {
+ symbol_file: "libapexcodecs.map.txt",
+ versions: ["36"],
+ },
+
+ apex_available: [
+ "com.android.media.swcodec",
+ ],
+}
diff --git a/media/module/libapexcodecs/ApexCodecs.cpp b/media/module/libapexcodecs/ApexCodecs.cpp
new file mode 100644
index 0000000..7101677
--- /dev/null
+++ b/media/module/libapexcodecs/ApexCodecs.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <new>
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecs.h>
+
+// TODO: remove when we have real implementations
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-parameter"
+
+struct ApexCodec_ComponentStore {
+ ApexCodec_ComponentStore() = default;
+};
+
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
+ ::android::base::NoDestructor<ApexCodec_ComponentStore> store;
+ return store.get();
+}
+
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+ ApexCodec_ComponentStore *store, size_t index) {
+ return nullptr;
+}
+
+ApexCodec_Status ApexCodec_Component_create(
+ ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+ *comp = nullptr;
+ return APEXCODEC_STATUS_NOT_FOUND;
+}
+
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {}
+
+ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+ ApexCodec_Component *comp) {
+ return nullptr;
+}
+
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+ ApexCodec_SupportedValues *supportedValues,
+ ApexCodec_SupportedValuesType *type,
+ ApexCodec_SupportedValuesNumberType *numberType,
+ ApexCodec_Value **values,
+ uint32_t *numValues) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {}
+
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+ ApexCodec_SettingResults *results,
+ size_t index,
+ ApexCodec_SettingResultFailure *failure,
+ ApexCodec_ParamFieldValues *field,
+ ApexCodec_ParamFieldValues **conflicts,
+ size_t *numConflicts) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {}
+
+ApexCodec_Status ApexCodec_Component_process(
+ ApexCodec_Component *comp,
+ const ApexCodec_Buffer *input,
+ ApexCodec_Buffer *output,
+ size_t *consumed,
+ size_t *produced) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_config(
+ ApexCodec_Configurable *comp,
+ ApexCodec_LinearBuffer *config,
+ ApexCodec_SettingResults **results) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_query(
+ ApexCodec_Configurable *comp,
+ uint32_t indices[],
+ size_t numIndices,
+ ApexCodec_LinearBuffer *config,
+ size_t *written) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t **indices,
+ size_t *numIndices) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t index,
+ ApexCodec_ParamAttribute *attr,
+ const char **name,
+ uint32_t **dependencies,
+ size_t *numDependencies) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+ ApexCodec_ParamDescriptors *descriptors) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+ ApexCodec_Configurable *comp,
+ ApexCodec_ParamDescriptors **descriptors) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+ ApexCodec_Configurable *comp,
+ ApexCodec_SupportedValuesQuery *queries,
+ size_t numQueries) {
+ return APEXCODEC_STATUS_OMITTED;
+}
+
+#pragma clang diagnostic pop
\ No newline at end of file
diff --git a/media/module/libapexcodecs/include/apex/ApexCodecs.h b/media/module/libapexcodecs/include/apex/ApexCodecs.h
new file mode 100644
index 0000000..b9f2e83
--- /dev/null
+++ b/media/module/libapexcodecs/include/apex/ApexCodecs.h
@@ -0,0 +1,768 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/cdefs.h>
+#include <errno.h>
+#include <stdint.h>
+
+#include <android/api-level.h>
+#include <android/hardware_buffer.h>
+#include <android/versioning.h>
+
+__BEGIN_DECLS
+
+/**
+ * An API to access and operate codecs implemented within an APEX module,
+ * used only by the OS when using the codecs within a client process
+ * (instead of via a HAL).
+ *
+ * NOTE: Many of the constants and types mirror the ones in the Codec 2.0 API.
+ */
+
+/**
+ * Error code for ApexCodec APIs.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Status : int32_t {
+ APEXCODEC_STATUS_OK = 0,
+
+ /* bad input */
+ APEXCODEC_STATUS_BAD_VALUE = EINVAL,
+ APEXCODEC_STATUS_BAD_INDEX = ENXIO,
+ APEXCODEC_STATUS_CANNOT_DO = ENOTSUP,
+
+ /* bad sequencing of events */
+ APEXCODEC_STATUS_DUPLICATE = EEXIST,
+ APEXCODEC_STATUS_NOT_FOUND = ENOENT,
+ APEXCODEC_STATUS_BAD_STATE = EPERM,
+ APEXCODEC_STATUS_BLOCKING = EWOULDBLOCK,
+ APEXCODEC_STATUS_CANCELED = EINTR,
+
+ /* bad environment */
+ APEXCODEC_STATUS_NO_MEMORY = ENOMEM,
+ APEXCODEC_STATUS_REFUSED = EACCES,
+
+ APEXCODEC_STATUS_TIMED_OUT = ETIMEDOUT,
+
+ /* bad versioning */
+ APEXCODEC_STATUS_OMITTED = ENOSYS,
+
+ /* unknown fatal */
+ APEXCODEC_STATUS_CORRUPTED = EFAULT,
+ APEXCODEC_STATUS_NO_INIT = ENODEV,
+} ApexCodec_Status;
+
+/**
+ * Enum that represents the kind of component
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Kind : uint32_t {
+ /**
+ * The component is of a kind that is not listed below.
+ */
+ APEXCODEC_KIND_OTHER = 0x0,
+ /**
+ * The component is a decoder, which decodes coded bitstream
+ * into raw buffers.
+ *
+ * Introduced in API 36.
+ */
+ APEXCODEC_KIND_DECODER = 0x1,
+ /**
+ * The component is an encoder, which encodes raw buffers
+ * into coded bitstream.
+ *
+ * Introduced in API 36.
+ */
+ APEXCODEC_KIND_ENCODER = 0x2,
+} ApexCodec_Kind;
+
+typedef enum ApexCodec_Domain : uint32_t {
+ /**
+ * A component domain that is not listed below.
+ *
+ * Introduced in API 36.
+ */
+ APEXCODEC_DOMAIN_OTHER = 0x0,
+ /**
+ * A component domain that operates on video.
+ *
+ * Introduced in API 36.
+ */
+ APEXCODEC_DOMAIN_VIDEO = 0x1,
+ /**
+ * A component domain that operates on audio.
+ *
+ * Introduced in API 36.
+ */
+ APEXCODEC_DOMAIN_AUDIO = 0x2,
+ /**
+ * A component domain that operates on image.
+ *
+ * Introduced in API 36.
+ */
+ APEXCODEC_DOMAIN_IMAGE = 0x3,
+} ApexCodec_Domain;
+
+/**
+ * Handle for component traits such as name, media type, kind (decoder/encoder),
+ * domain (audio/video/image), etc.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentTraits {
+ /**
+ * The name of the component.
+ */
+ const char *name;
+ /**
+ * The supported media type of the component.
+ */
+ const char *mediaType;
+ /**
+ * The kind of the component.
+ */
+ ApexCodec_Kind kind;
+ /**
+ * The domain on which the component operates.
+ */
+ ApexCodec_Domain domain;
+} ApexCodec_ComponentTraits;
+
+/**
+ * An opaque struct that represents a component store.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentStore ApexCodec_ComponentStore;
+
+/**
+ * Get the component store object. This function never fails.
+ *
+ * \return component store object.
+ */
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore()
+ __INTRODUCED_IN(36);
+
+/**
+ * Get the traits object of a component at given index. ApexCodecs_Traits_*
+ * functions are used to extract information from the traits object.
+ *
+ * Returns nullptr if index is out of bounds. The returned object is owned by
+ * ApexCodec_ComponentStore object and the client should not delete it.
+ *
+ * The client can iterate through the traits objects by calling this function
+ * with an index incrementing from 0 until it gets a nullptr.
+ *
+ * \param index index of the traits object to query
+ * \return traits object at the index, or nullptr if the index is out of bounds.
+ */
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+ ApexCodec_ComponentStore *store, size_t index) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a codec.
+ */
+typedef struct ApexCodec_Component ApexCodec_Component;
+
+/**
+ * Create a component by the name.
+ *
+ * \param store the component store
+ * \param name the name of the component
+ * \param component out-param to be filled with the component; must not be null
+ * \return APEXCODEC_STATUS_OK if successful
+ * APEXCODEC_STATUS_NOT_FOUND if the name is not found
+ */
+ApexCodec_Status ApexCodec_Component_create(
+ ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp)
+ __INTRODUCED_IN(36);
+
+/**
+ * Destroy the component by the handle. It is invalid to call component methods on the handle
+ * after calling this method. It is no-op to call this method with |comp| == nullptr.
+ *
+ * \param comp the handle for the component
+ */
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Start the component. The component is ready to process buffers after this call.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_start(
+ ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Flush the component's internal states. This operation preserves the existing configurations.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_flush(
+ ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Resets the component to the initial state, right after creation. Note that the configuration
+ * will also revert to the initial state, so if there are configurations required those should be
+ * set again to use the component.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_reset(
+ ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a configurable part of the component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Configurable ApexCodec_Configurable;
+
+/**
+ * Return the configurable object for the given ApexCodec_Component.
+ * The returned object has the same lifecycle as |comp|.
+ *
+ * \param comp the handle for the component
+ * \return the configurable object handle
+ */
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+ ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Enum that represents the flags for ApexCodec_Buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferFlags : uint32_t {
+ APEXCODEC_FLAG_DROP_FRAME = (1 << 0),
+ APEXCODEC_FLAG_END_OF_STREAM = (1 << 1),
+ APEXCODEC_FLAG_DISCARD_FRAME = (1 << 2),
+ APEXCODEC_FLAG_INCOMPLETE = (1 << 3),
+ APEXCODEC_FLAG_CORRECTED = (1 << 4),
+ APEXCODEC_FLAG_CORRUPT = (1 << 5),
+ APEXCODEC_FLAG_CODEC_CONFIG = (1u << 31),
+} ApexCodec_BufferFlags;
+
+/**
+ * Enum that represents the type of buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferType : uint32_t {
+ APEXCODEC_BUFFER_TYPE_INVALID,
+ APEXCODEC_BUFFER_TYPE_LINEAR,
+ APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS,
+ APEXCODEC_BUFFER_TYPE_GRAPHIC,
+ APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS,
+} ApexCodec_BufferType;
+
+/**
+ * Struct that represents the memory for ApexCodec_Buffer.
+ *
+ * All memory regions have the simple 1D representation.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_LinearBuffer {
+ /**
+ * A pointer to the start of the buffer. This is not aligned.
+ */
+ uint8_t *data;
+ /**
+ * Size of the buffer. The memory region between |data| (inclusive) and
+ * |data + size| (exclusive) is assumed to be valid for read/write.
+ */
+ size_t size;
+} ApexCodec_LinearBuffer;
+
+/**
+ * Struct that represents a buffer for ApexCodec_Component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Buffer {
+ /**
+ * Flags associated with the buffer.
+ */
+ ApexCodec_BufferFlags flags;
+ /**
+ * For input buffers client assign a unique sequential index for each buffer. For output buffers
+ * it is the same as the associated input buffer's frame index.
+ */
+ uint64_t frameIndex;
+ /**
+ * A timestamp associated with the buffer in microseconds.
+ */
+ uint64_t timestampUs;
+ /**
+ * The type of the buffer. The component may reject request to process a buffer with the wrong
+ * type. For example, a video decoder will reject an input buffer with type BUFFER_TYPE_GRAPHIC,
+ * or an output buffer with type BUFFER_TYPE_LINEAR.
+ */
+ ApexCodec_BufferType type;
+ /**
+ * The actual memory for the buffer.
+ */
+ union {
+ ApexCodec_LinearBuffer linear;
+ AHardwareBuffer *graphic;
+ } memory;
+ /**
+ * Config updates associated with the buffer. For input buffers these are sent to the component
+ * at the specific input frame. For output buffers these are config updates as a result of
+ * processing the buffer.
+ */
+ ApexCodec_LinearBuffer configUpdates;
+} ApexCodec_Buffer;
+
+/**
+ * Enum that represents the query type for the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesQueryType : uint32_t {
+ /** Query all possible supported values regardless of current configuration */
+ APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE,
+ /** Query supported values at current configuration */
+ APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT,
+} ApexCodec_SupportedValuesQueryType;
+
+/**
+ * Enum that represents the type of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesType : uint32_t {
+ /** The supported values are empty. */
+ APEXCODEC_SUPPORTED_VALUES_EMPTY,
+ /**
+ * The supported values are represented by a range defined with {min, max, step, num, den}.
+ *
+ * If step is 0 and num and denom are both 1, the supported values are any value, for which
+ * min <= value <= max.
+ *
+ * Otherwise, the range represents a geometric/arithmetic/multiply-accumulate series, where
+ * successive supported values can be derived from previous values (starting at min), using the
+ * following formula:
+ * v[0] = min
+ * v[i] = v[i-1] * num / denom + step for i >= 1, while min < v[i] <= max.
+ */
+ APEXCODEC_SUPPORTED_VALUES_RANGE,
+ /** The supported values are represented by a list of values. */
+ APEXCODEC_SUPPORTED_VALUES_VALUES,
+ /** The supported values are represented by a list of flags. */
+ APEXCODEC_SUPPORTED_VALUES_FLAGS,
+} ApexCodec_SupportedValuesType;
+
+/**
+ * Enum that represents numeric types of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesNumberType : uint32_t {
+ APEXCODEC_SUPPORTED_VALUES_TYPE_NONE = 0,
+ APEXCODEC_SUPPORTED_VALUES_TYPE_INT32 = 1,
+ APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 = 2,
+ // RESERVED = 3,
+ APEXCODEC_SUPPORTED_VALUES_TYPE_INT64 = 4,
+ APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 = 5,
+ // RESERVED = 6,
+ APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT = 7,
+} ApexCodec_SupportedValuesNumberType;
+
+/**
+ * Union of primitive types.
+ *
+ * Introduced in API 36.
+ */
+typedef union {
+ int32_t i32;
+ uint32_t u32;
+ int64_t i64;
+ uint64_t u64;
+ float f;
+} ApexCodec_Value;
+
+/**
+ * An opaque struct that represents the supported values of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValues ApexCodec_SupportedValues;
+
+/**
+ * Extract information from ApexCodec_SupportedValues object.
+ *
+ * \param [in] supportedValues the supported values object
+ * \param [out] type pointer to be filled with the type of the supported values
+ * \param [out] numberType pointer to be filled with the numeric type of the supported values
+ * \param [out] values pointer to be filled with the array of the actual supported values.
+ * if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: nullptr
+ * if type == APEXCODEC_SUPPORTED_VALUES_RANGE: {min, max, step, num, den}
+ * if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS:
+ * the array of supported values/flags
+ * the array is owned by the |supportedValues| object and the client
+ * should not free it.
+ * \param [out] numValues pointer to be filled with the number of values.
+ * if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: 0
+ * if type == APEXCODEC_SUPPORTED_VALUES_RANGE: 5
+ * if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS: varies
+ */
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+ ApexCodec_SupportedValues *supportedValues,
+ ApexCodec_SupportedValuesType *type,
+ ApexCodec_SupportedValuesNumberType *numberType,
+ ApexCodec_Value **values,
+ uint32_t *numValues) __INTRODUCED_IN(36);
+
+/**
+ * Release the supported values object.
+ *
+ * \param values the supported values object
+ */
+void ApexCodec_SupportedValues_release(
+ ApexCodec_SupportedValues *values) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the result of ApexCodec_Configurable_config.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SettingResults ApexCodec_SettingResults;
+
+/**
+ * Enum that represents the failure code of ApexCodec_SettingResults.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SettingResultFailure : uint32_t {
+ /** parameter type is not supported */
+ APEXCODEC_SETTING_RESULT_BAD_TYPE,
+ /** parameter is not supported on the specific port */
+ APEXCODEC_SETTING_RESULT_BAD_PORT,
+ /** parameter is not supported on the specific stream */
+ APEXCODEC_SETTING_RESULT_BAD_INDEX,
+ /** parameter is read-only */
+ APEXCODEC_SETTING_RESULT_READ_ONLY,
+ /** parameter mismatches input data */
+ APEXCODEC_SETTING_RESULT_MISMATCH,
+ /** strict parameter does not accept value for the field at all */
+ APEXCODEC_SETTING_RESULT_BAD_VALUE,
+ /** strict parameter field value conflicts with another settings */
+ APEXCODEC_SETTING_RESULT_CONFLICT,
+ /** strict parameter field is out of range due to other settings */
+ APEXCODEC_SETTING_RESULT_UNSUPPORTED,
+ /**
+ * field does not accept the requested parameter value at all. It has been corrected to
+ * the closest supported value. This failure mode is provided to give guidance as to what
+ * are the currently supported values for this field (which may be a subset of the at-all-
+ * potential values)
+ */
+ APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE,
+ /**
+ * requested parameter value is in conflict with an/other setting(s)
+ * and has been corrected to the closest supported value. This failure
+ * mode is given to provide guidance as to what are the currently supported values as well
+ * as to optionally provide suggestion to the client as to how to enable the requested
+ * parameter value.
+ */
+ APEXCODEC_SETTING_RESULT_INFO_CONFLICT,
+} ApexCodec_SettingResultFailure;
+
+/**
+ * Struct that represents a field and its supported values of a parameter.
+ *
+ * The offset and size of the field are where the field is located in the blob representation of
+ * the parameter, as used in the ApexCodec_Configurable_query() and ApexCodec_Configurable_config(),
+ * for example.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamFieldValues {
+ /** index of the param */
+ uint32_t index;
+ /** offset of the param field */
+ uint32_t offset;
+ /** size of the param field */
+ uint32_t size;
+ /** currently supported values of the param field */
+ ApexCodec_SupportedValues *values;
+} ApexCodec_ParamFieldValues;
+
+/**
+ * Extract the result of ApexCodec_Configurable_config.
+ * The client can iterate through the results with index starting from 0 until this function returns
+ * APEXCODEC_STATUS_NOT_FOUND.
+ *
+ * \param [in] result the result object
+ * \param [in] index the index of the result to extract, starts from 0.
+ * \param [out] failure pointer to be filled with the failure code
+ * \param [out] field pointer to be filled with the field that failed.
+ * |field->value| is owned by the |result| object and the client should not
+ * free it.
+ * \param [out] conflicts pointer to be filled with the array of conflicts.
+ * nullptr if |numConflicts| is 0.
+ * the array and its content is owned by the |result| object and the client
+ * should not free it.
+ * \param [out] numConflicts pointer to be filled with the number of conflicts
+ * may be 0 if there are no conflicts
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_NOT_FOUND if index is out of range
+ */
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+ ApexCodec_SettingResults *results,
+ size_t index,
+ ApexCodec_SettingResultFailure *failure,
+ ApexCodec_ParamFieldValues *field,
+ ApexCodec_ParamFieldValues **conflicts,
+ size_t *numConflicts) __INTRODUCED_IN(36);
+
+/**
+ * Release the setting result object.
+ *
+ * \param result the setting result object
+ */
+void ApexCodec_SettingResults_release(
+ ApexCodec_SettingResults *results) __INTRODUCED_IN(36);
+
+/**
+ * Process one frame from |input|, and produce one frame to |output| if possible.
+ * When successfully filled, |output->memory.linear| has the size adjusted to the produced
+ * output size, in case of linear buffers. |input->configUpdates| is applied with the input
+ * buffer; |output->configUpdates| contains config updates as a result of processing the frame.
+ *
+ * \param comp the component to process the buffers
+ * \param input the input buffer; when nullptr, the component should fill |output| if there are
+ * any pending output buffers.
+ * \param output the output buffer, should not be nullptr.
+ * \param consumed the number of consumed bytes from the input buffer
+ * set to 0 if no input buffer has been consumed, including |input| is nullptr.
+ * for graphic buffers, any non-zero value means that the input buffer is consumed.
+ * \param produced the number of bytes produced on the output buffer
+ * set to 0 if no output buffer has been produced.
+ * for graphic buffers, any non-zero value means that the output buffer is filled.
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY if the output buffer is not suitable to hold the output frame
+ * the client should retry with a new output buffer;
+ * configUpdates should have the information to update
+ * the buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE if the parameters are bad
+ * \return APEXCODEC_STATUS_BAD_STATE if the component is not in the right state
+ * to process the frame
+ * \return APEXCODEC_STATUS_CORRUPTED if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Component_process(
+ ApexCodec_Component *comp,
+ const ApexCodec_Buffer *input,
+ ApexCodec_Buffer *output,
+ size_t *consumed,
+ size_t *produced) __INTRODUCED_IN(36);
+
+/**
+ * Configure the component with the given config.
+ *
+ * Configurations are Codec 2.0 configs in binary blobs,
+ * concatenated if there are multiple configs.
+ *
+ * frameworks/av/media/codec2/core/include/C2Param.h contains more details about the configuration
+ * blob layout.
+ *
+ * The component may correct the configured parameters to the closest supported values, and could
+ * fail in case there are no values that the component can auto-correct to. |result| contains the
+ * information about the failures. See ApexCodec_SettingResultFailure and ApexCodec_SettingResults
+ * for more details.
+ *
+ * \param [in] comp the handle for the component
+ * \param [inout] config the config blob; after the call, the config blob is updated to the actual
+ * config by the component.
+ * \param [out] result the result of the configuration.
+ * the client should call ApexCodec_SettingResult_getResultAtIndex()
+ * to extract the result. The result object is owned by the client and should
+ * be released with ApexCodec_SettingResult_release().
+ * |result| may be nullptr if empty.
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE if the config is invalid
+ * \return APEXCODEC_STATUS_BAD_STATE if the component is not in the right state to be configured
+ * \return APEXCODEC_STATUS_CORRUPTED if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_config(
+ ApexCodec_Configurable *comp,
+ ApexCodec_LinearBuffer *config,
+ ApexCodec_SettingResults **results) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the given indices.
+ *
+ * Parameter indices are defined in frameworks/av/media/codec2/core/include/C2Config.h.
+ *
+ * \param [in] comp the handle for the component
+ * \param [in] indices the array of indices to query
+ * \param [in] numIndices the size of the indices array
+ * \param [inout] config the output buffer for the config blob, allocated by the client.
+ * if the |config->size| was insufficient, it is set to the required size
+ * and |config->data| remains unchanged.
+ * \param [out] written the number of bytes written to |config|.
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY if |config.size| is too small; |config.size| is updated to the
+ * requested buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE if the parameters are bad. e.g. |indices|, |config|,
+ * |config->data| or |written| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_query(
+ ApexCodec_Configurable *comp,
+ uint32_t indices[],
+ size_t numIndices,
+ ApexCodec_LinearBuffer *config,
+ size_t *written) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents a parameter descriptor.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamDescriptors ApexCodec_ParamDescriptors;
+
+/**
+ * Enum that represents the attributes of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_ParamAttribute : uint32_t {
+ /** parameter is required to be specified */
+ APEXCODEC_PARAM_IS_REQUIRED = 1u << 0,
+ /** parameter retains its value */
+ APEXCODEC_PARAM_IS_PERSISTENT = 1u << 1,
+ /** parameter is strict */
+ APEXCODEC_PARAM_IS_STRICT = 1u << 2,
+ /** parameter is read-only */
+ APEXCODEC_PARAM_IS_READ_ONLY = 1u << 3,
+ /** parameter shall not be visible to clients */
+ APEXCODEC_PARAM_IS_HIDDEN = 1u << 4,
+ /** parameter shall not be used by framework (other than testing) */
+ APEXCODEC_PARAM_IS_INTERNAL = 1u << 5,
+ /** parameter is publicly const (hence read-only) */
+ APEXCODEC_PARAM_IS_CONST = 1u << 6 | APEXCODEC_PARAM_IS_READ_ONLY,
+} ApexCodec_ParamAttribute;
+
+/**
+ * Get the parameter indices of the param descriptors.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [out] indices the pointer to be filled with the array of the indices;
+ * the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numIndices the size of the indices array
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE if parameters are bad. e.g. |descriptors|, |indices| or
+ * |numIndices| is nullptr.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t **indices,
+ size_t *numIndices) __INTRODUCED_IN(36);
+
+/**
+ * Get the descriptor of the param.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [in] index the index of the param
+ * \param [out] attr the attribute of the param
+ * \param [out] name the pointer to be filled with the name of the param
+ * the string is owned by |descriptors| and should not be freed by the client.
+ * \param [out] dependencies the pointer to be filled with an array of the parameter indices
+ * that the parameter with |index| depends on.
+ * may be null if empty.
+ * the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numDependencies the number of dependencies
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE if parameters are bad. e.g. |descriptors|, |attr|, |name|,
+ * |dependencies| or |numDependencies| is nullptr.
+ * \return APEXCODEC_STATUS_BAD_INDEX if the index is not included in the param descriptors.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t index,
+ ApexCodec_ParamAttribute *attr,
+ const char **name,
+ uint32_t **dependencies,
+ size_t *numDependencies) __INTRODUCED_IN(36);
+
+/**
+ * Release the param descriptors object.
+ *
+ * \param descriptors the param descriptors object
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+ ApexCodec_ParamDescriptors *descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the supported parameters.
+ *
+ * \param comp the handle for the component
+ * \param descriptors the pointer to be filled with the param descriptors object
+ * the object should be released with ApexCodec_ParamDescriptors_release().
+ * \return APEXCODEC_STATUS_OK if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE if parameters are bad. e.g. |descriptors| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+ ApexCodec_Configurable *comp,
+ ApexCodec_ParamDescriptors **descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the query for the supported values of a parameter.
+ *
+ * The offset of the field can be found in the layout of the parameter blob.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValuesQuery {
+ /* in-params */
+
+ /** index of the param */
+ uint32_t index;
+ /** offset to the param field */
+ size_t offset;
+ /** query type */
+ ApexCodec_SupportedValuesQueryType type;
+
+ /* out-params */
+
+ /** status of the query */
+ ApexCodec_Status status;
+
+ /** supported values. must be released with ApexCodec_SupportedValues_release(). */
+ ApexCodec_SupportedValues *values;
+} ApexCodec_SupportedValuesQuery;
+
+/**
+ * Query the component for the supported values of the given indices.
+ *
+ * \param comp the handle for the component
+ * \param queries the array of queries
+ * \param numQueries the size of the queries array
+ * \return APEXCODEC_STATUS_OK if successful
+ * APEXCODEC_STATUS_CORRUPTED if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+ ApexCodec_Configurable *comp,
+ ApexCodec_SupportedValuesQuery *queries,
+ size_t numQueries) __INTRODUCED_IN(36);
+
+__END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/libapexcodecs.map.txt b/media/module/libapexcodecs/libapexcodecs.map.txt
new file mode 100644
index 0000000..672cf89
--- /dev/null
+++ b/media/module/libapexcodecs/libapexcodecs.map.txt
@@ -0,0 +1,26 @@
+LIBAPEXCODECS_36 { # introduced=36
+ global:
+ ApexCodec_Component_create; # apex
+ ApexCodec_Component_destroy; # apex
+ ApexCodec_Component_flush; # apex
+ ApexCodec_Component_getConfigurable; # apex
+ ApexCodec_Component_process; # apex
+ ApexCodec_Component_start; # apex
+ ApexCodec_Component_reset; # apex
+ ApexCodec_Configurable_config; # apex
+ ApexCodec_Configurable_query; # apex
+ ApexCodec_Configurable_querySupportedParams; # apex
+ ApexCodec_Configurable_querySupportedValues; # apex
+ ApexCodec_GetComponentStore; # apex
+ ApexCodec_ParamDescriptors_getDescriptor; # apex
+ ApexCodec_ParamDescriptors_getIndices; # apex
+ ApexCodec_ParamDescriptors_release; # apex
+ ApexCodec_SettingResults_getResultAtIndex; # apex
+ ApexCodec_SettingResults_release; # apex
+ ApexCodec_SupportedValues_getTypeAndValues; # apex
+ ApexCodec_SupportedValues_release; # apex
+ ApexCodec_Traits_get; # apex
+
+ local:
+ *;
+};
\ No newline at end of file
diff --git a/media/module/libapexcodecs/tests/Android.bp b/media/module/libapexcodecs/tests/Android.bp
new file mode 100644
index 0000000..162d12c
--- /dev/null
+++ b/media/module/libapexcodecs/tests/Android.bp
@@ -0,0 +1,30 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+ default_team: "trendy_team_android_media_codec_framework",
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+ name: "libapexcodecs_tests",
+ shared_libs: [
+ "libapexcodecs-testing",
+ "libcodec2",
+ ],
+
+ srcs: ["ApexCodecsTest.cpp"],
+}
diff --git a/media/module/libapexcodecs/tests/ApexCodecsTest.cpp b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
new file mode 100644
index 0000000..cd5ebba
--- /dev/null
+++ b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
@@ -0,0 +1,100 @@
+#include <C2.h>
+#include <C2Component.h>
+
+#include <apex/ApexCodecs.h>
+
+// static_asserts for enum values match
+static_assert((uint32_t)APEXCODEC_STATUS_OK == (uint32_t)C2_OK);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_VALUE == (uint32_t)C2_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_INDEX == (uint32_t)C2_BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_STATUS_CANNOT_DO == (uint32_t)C2_CANNOT_DO);
+static_assert((uint32_t)APEXCODEC_STATUS_DUPLICATE == (uint32_t)C2_DUPLICATE);
+static_assert((uint32_t)APEXCODEC_STATUS_NOT_FOUND == (uint32_t)C2_NOT_FOUND);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_STATE == (uint32_t)C2_BAD_STATE);
+static_assert((uint32_t)APEXCODEC_STATUS_BLOCKING == (uint32_t)C2_BLOCKING);
+static_assert((uint32_t)APEXCODEC_STATUS_CANCELED == (uint32_t)C2_CANCELED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_MEMORY == (uint32_t)C2_NO_MEMORY);
+static_assert((uint32_t)APEXCODEC_STATUS_REFUSED == (uint32_t)C2_REFUSED);
+static_assert((uint32_t)APEXCODEC_STATUS_TIMED_OUT == (uint32_t)C2_TIMED_OUT);
+static_assert((uint32_t)APEXCODEC_STATUS_OMITTED == (uint32_t)C2_OMITTED);
+static_assert((uint32_t)APEXCODEC_STATUS_CORRUPTED == (uint32_t)C2_CORRUPTED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_INIT == (uint32_t)C2_NO_INIT);
+
+static_assert((uint32_t)APEXCODEC_KIND_OTHER == (uint32_t)C2Component::KIND_OTHER);
+static_assert((uint32_t)APEXCODEC_KIND_DECODER == (uint32_t)C2Component::KIND_DECODER);
+static_assert((uint32_t)APEXCODEC_KIND_ENCODER == (uint32_t)C2Component::KIND_ENCODER);
+
+static_assert((uint32_t)APEXCODEC_DOMAIN_OTHER == (uint32_t)C2Component::DOMAIN_OTHER);
+static_assert((uint32_t)APEXCODEC_DOMAIN_VIDEO == (uint32_t)C2Component::DOMAIN_VIDEO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_AUDIO == (uint32_t)C2Component::DOMAIN_AUDIO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_IMAGE == (uint32_t)C2Component::DOMAIN_IMAGE);
+
+static_assert((uint32_t)APEXCODEC_FLAG_DROP_FRAME == (uint32_t)C2FrameData::FLAG_DROP_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_END_OF_STREAM == (uint32_t)C2FrameData::FLAG_END_OF_STREAM);
+static_assert((uint32_t)APEXCODEC_FLAG_DISCARD_FRAME == (uint32_t)C2FrameData::FLAG_DISCARD_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_INCOMPLETE == (uint32_t)C2FrameData::FLAG_INCOMPLETE);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRECTED == (uint32_t)C2FrameData::FLAG_CORRECTED);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRUPT == (uint32_t)C2FrameData::FLAG_CORRUPT);
+static_assert((uint32_t)APEXCODEC_FLAG_CODEC_CONFIG == (uint32_t)C2FrameData::FLAG_CODEC_CONFIG);
+
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_INVALID ==
+ (uint32_t)C2BufferData::INVALID);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR ==
+ (uint32_t)C2BufferData::LINEAR);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS ==
+ (uint32_t)C2BufferData::LINEAR_CHUNKS);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC ==
+ (uint32_t)C2BufferData::GRAPHIC);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS ==
+ (uint32_t)C2BufferData::GRAPHIC_CHUNKS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT ==
+ (uint32_t)C2FieldSupportedValuesQuery::CURRENT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE ==
+ (uint32_t)C2FieldSupportedValuesQuery::POSSIBLE);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_EMPTY ==
+ (uint32_t)C2FieldSupportedValues::EMPTY);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_RANGE ==
+ (uint32_t)C2FieldSupportedValues::RANGE);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_VALUES ==
+ (uint32_t)C2FieldSupportedValues::VALUES);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_FLAGS ==
+ (uint32_t)C2FieldSupportedValues::FLAGS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_NONE == (uint32_t)C2Value::NO_INIT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT32 == (uint32_t)C2Value::INT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 == (uint32_t)C2Value::UINT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT64 == (uint32_t)C2Value::INT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 == (uint32_t)C2Value::UINT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT == (uint32_t)C2Value::FLOAT);
+
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_TYPE ==
+ (uint32_t)C2SettingResult::BAD_TYPE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_PORT ==
+ (uint32_t)C2SettingResult::BAD_PORT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_INDEX ==
+ (uint32_t)C2SettingResult::BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_READ_ONLY ==
+ (uint32_t)C2SettingResult::READ_ONLY);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_MISMATCH ==
+ (uint32_t)C2SettingResult::MISMATCH);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_VALUE ==
+ (uint32_t)C2SettingResult::BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_CONFLICT ==
+ (uint32_t)C2SettingResult::CONFLICT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_UNSUPPORTED ==
+ (uint32_t)C2SettingResult::UNSUPPORTED);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE ==
+ (uint32_t)C2SettingResult::INFO_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_CONFLICT ==
+ (uint32_t)C2SettingResult::INFO_CONFLICT);
+
+static_assert((uint32_t)APEXCODEC_PARAM_IS_REQUIRED == (uint32_t)C2ParamDescriptor::IS_REQUIRED);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_PERSISTENT ==
+ (uint32_t)C2ParamDescriptor::IS_PERSISTENT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_STRICT == (uint32_t)C2ParamDescriptor::IS_STRICT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_READ_ONLY == (uint32_t)C2ParamDescriptor::IS_READ_ONLY);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_HIDDEN == (uint32_t)C2ParamDescriptor::IS_HIDDEN);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_INTERNAL == (uint32_t)C2ParamDescriptor::IS_INTERNAL);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_CONST == (uint32_t)C2ParamDescriptor::IS_CONST);
\ No newline at end of file
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 6b5336e..bdb6cdb 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,10 +1,9 @@
set noparent
-aprasath@google.com
anothermark@google.com
-kumarashishg@google.com
-sarup@google.com
+febinthattil@google.com
+aprasath@google.com
jsharkey@android.com
jameswei@google.com
rmojumder@google.com
-
+kumarashishg@google.com
\ No newline at end of file
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 679b111..ddb93fe 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -45,6 +45,7 @@
namespace android {
namespace {
+constexpr auto PERMISSION_GRANTED = permission::PermissionChecker::PERMISSION_GRANTED;
constexpr auto PERMISSION_HARD_DENIED = permission::PermissionChecker::PERMISSION_HARD_DENIED;
}
@@ -78,19 +79,32 @@
int32_t getOpForSource(audio_source_t source) {
switch (source) {
- case AUDIO_SOURCE_HOTWORD:
- return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
+ case AUDIO_SOURCE_FM_TUNER:
+ return AppOpsManager::OP_NONE;
case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
case AUDIO_SOURCE_REMOTE_SUBMIX:
return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
case AUDIO_SOURCE_VOICE_DOWNLINK:
return AppOpsManager::OP_RECORD_INCOMING_PHONE_AUDIO;
+ case AUDIO_SOURCE_HOTWORD:
+ return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
case AUDIO_SOURCE_DEFAULT:
default:
return AppOpsManager::OP_RECORD_AUDIO;
}
}
+bool isRecordOpRequired(audio_source_t source) {
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ return false;
+ default:
+ return true;
+ }
+}
+
std::optional<AttributionSourceState> resolveAttributionSource(
const AttributionSourceState& callerAttributionSource, const uint32_t virtualDeviceId) {
AttributionSourceState nextAttributionSource = callerAttributionSource;
@@ -122,7 +136,8 @@
return std::optional<AttributionSourceState>{myAttributionSource};
}
- static int checkRecordingInternal(const AttributionSourceState &attributionSource,
+
+static int checkRecordingInternal(const AttributionSourceState &attributionSource,
const uint32_t virtualDeviceId,
const String16 &msg, bool start, audio_source_t source) {
// Okay to not track in app ops as audio server or media server is us and if
@@ -131,32 +146,47 @@
// user is active, but it is a core system service so let it through.
// TODO(b/141210120): UserManager.DISALLOW_RECORD_AUDIO should not affect system user 0
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
- if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return true;
-
- // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
- // may open a record track on behalf of a client. Note that pid may be a tid.
- // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource, virtualDeviceId);
- if (!resolvedAttributionSource.has_value()) {
- return false;
- }
+ if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return PERMISSION_GRANTED;
const int32_t attributedOpCode = getOpForSource(source);
+ if (isRecordOpRequired(source)) {
+ // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+ // may open a record track on behalf of a client. Note that pid may be a tid.
+ // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+ std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
+ if (!resolvedAttributionSource.has_value()) {
+ return PERMISSION_HARD_DENIED;
+ }
- permission::PermissionChecker permissionChecker;
- int permitted;
- if (start) {
- permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
- sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
- attributedOpCode);
+ permission::PermissionChecker permissionChecker;
+ int permitted;
+ if (start) {
+ permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+ sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+ attributedOpCode);
+ } else {
+ permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
+ sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+ attributedOpCode);
+ }
+
+ return permitted;
} else {
- permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
- sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
- attributedOpCode);
+ if (attributedOpCode == AppOpsManager::OP_NONE) return PERMISSION_GRANTED; // nothing to do
+ AppOpsManager ap{};
+ PermissionController pc{};
+ return ap.startOpNoThrow(
+ attributedOpCode, attributionSource.uid,
+ resolveCallingPackage(pc,
+ String16{attributionSource.packageName.value_or("").c_str()},
+ attributionSource.uid),
+ false,
+ attributionSource.attributionTag.has_value()
+ ? String16{attributionSource.attributionTag.value().c_str()}
+ : String16{},
+ msg);
}
-
- return permitted;
}
static constexpr int DEVICE_ID_DEFAULT = 0;
@@ -188,19 +218,32 @@
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return;
- // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
- // may open a record track on behalf of a client. Note that pid may be a tid.
- // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- const std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource, virtualDeviceId);
- if (!resolvedAttributionSource.has_value()) {
- return;
- }
-
const int32_t attributedOpCode = getOpForSource(source);
- permission::PermissionChecker permissionChecker;
- permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
- resolvedAttributionSource.value());
+ if (isRecordOpRequired(source)) {
+ // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+ // may open a record track on behalf of a client. Note that pid may be a tid.
+ // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+ const std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
+ if (!resolvedAttributionSource.has_value()) {
+ return;
+ }
+
+ permission::PermissionChecker permissionChecker;
+ permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
+ resolvedAttributionSource.value());
+ } else {
+ if (attributedOpCode == AppOpsManager::OP_NONE) return; // nothing to do
+ AppOpsManager ap{};
+ PermissionController pc{};
+ ap.finishOp(attributedOpCode, attributionSource.uid,
+ resolveCallingPackage(
+ pc, String16{attributionSource.packageName.value_or("").c_str()},
+ attributionSource.uid),
+ attributionSource.attributionTag.has_value()
+ ? String16{attributionSource.attributionTag.value().c_str()}
+ : String16{});
+ }
}
bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource) {
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 461e190..9abdbf1 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -118,6 +118,7 @@
const AttributionSourceState& attributionSource, const String16& caller);
void anonymizeBluetoothAddress(char *address);
+bool isRecordOpRequired(audio_source_t source);
int32_t getOpForSource(audio_source_t source);
AttributionSourceState getCallingAttributionSource();
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
index 4243b9c..7d4f62e 100644
--- a/media/utils/include/mediautils/SharedMemoryAllocator.h
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -28,6 +28,7 @@
#include <type_traits>
#include <unordered_map>
+#include <android-base/thread_annotations.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <log/log_main.h>
@@ -425,8 +426,57 @@
[[no_unique_address]] SecondaryAllocator mSecondary;
};
+// Wrap an allocator with a lock if backs multiple allocators through indirection
+template <typename Allocator>
+class LockedAllocator {
+ public:
+ static size_t alignment() { return Allocator::alignment(); }
+
+ explicit LockedAllocator(Allocator allocator) : mAllocator(allocator) {}
+
+ LockedAllocator() = default;
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ static_assert(std::is_base_of_v<android::mediautils::BasicAllocRequest, std::decay_t<T>>);
+ std::lock_guard l_{mMutex};
+ return mAllocator.allocate(std::forward<T>(request));
+ }
+
+ void deallocate(const AllocationType& allocation) {
+ std::lock_guard l_{mMutex};
+ mAllocator.deallocate(allocation);
+ }
+
+ template <typename Enable = void>
+ auto deallocate_all()
+ -> std::enable_if_t<shared_allocator_impl::has_deallocate_all<Allocator>, Enable> {
+ std::lock_guard l_{mMutex};
+ mAllocator.deallocate_all();
+ }
+
+ template <typename Enable = bool>
+ auto owns(const AllocationType& allocation) const
+ -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+ std::lock_guard l_{mMutex};
+ return mAllocator.owns(allocation);
+ }
+
+ template <typename Enable = std::string>
+ auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+ std::lock_guard l_{mMutex};
+ return mAllocator.dump();
+ }
+
+ private:
+ std::mutex mMutex;
+ [[no_unique_address]] Allocator mAllocator GUARDED_BY(mMutex);
+};
+
// An allocator which is backed by a shared_ptr to an allocator, so multiple
// allocators can share the same backing allocator (and thus the same state).
+// When the same backing allocator is used by multiple higher level allocators,
+// locking at the sharing level is necessary.
template <typename Allocator>
class IndirectAllocator {
public:
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index a1a0634..2322780 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -541,7 +541,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -553,7 +553,7 @@
status_t ret = NO_INIT;
if (af != 0) {
ret = af->openMmapStream(
- direction, attr, config, client, deviceId,
+ direction, attr, config, client, deviceIds,
sessionId, callback, interface, handle);
}
return ret;
@@ -563,7 +563,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -636,7 +636,8 @@
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
- deviceId, &portId, &secondaryOutputs, &isSpatialized,
+ deviceIds, &portId, &secondaryOutputs,
+ &isSpatialized,
&isBitPerfect,
&volume,
&muted);
@@ -648,12 +649,17 @@
ALOGW_IF(!secondaryOutputs.empty(),
"%s does not support secondary outputs, ignoring them", __func__);
} else {
+ audio_port_handle_t deviceId = getFirstDeviceId(*deviceIds);
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
actualSessionId,
adjAttributionSource,
config,
- AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
+ AUDIO_INPUT_FLAG_MMAP_NOIRQ, &deviceId, &portId);
+ deviceIds->clear();
+ if (deviceId != AUDIO_PORT_HANDLE_NONE) {
+ deviceIds->push_back(deviceId);
+ }
}
if (ret != NO_ERROR) {
return ret;
@@ -667,7 +673,7 @@
const sp<IAfMmapThread> thread = mMmapThreads.valueFor(io);
if (thread != 0) {
interface = IAfMmapThread::createMmapStreamInterfaceAdapter(thread);
- thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceId, portId);
+ thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceIds, portId);
*handle = portId;
*sessionId = actualSessionId;
config->sample_rate = thread->sampleRate();
@@ -1166,6 +1172,7 @@
adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
}
+ DeviceIdVector selectedDeviceIds;
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
@@ -1176,11 +1183,14 @@
output.sessionId = sessionId;
output.outputId = AUDIO_IO_HANDLE_NONE;
- output.selectedDeviceId = input.selectedDeviceId;
+ if (input.selectedDeviceId != AUDIO_PORT_HANDLE_NONE) {
+ selectedDeviceIds.push_back(input.selectedDeviceId);
+ }
lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
adjAttributionSource, &input.config, input.flags,
- &output.selectedDeviceId, &portId, &secondaryOutputs,
+ &selectedDeviceIds, &portId, &secondaryOutputs,
&isSpatialized, &isBitPerfect, &volume, &muted);
+ output.selectedDeviceId = getFirstDeviceId(selectedDeviceIds);
if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 042194f..133410e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -439,7 +439,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index a13819c..3163d4c 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -661,7 +661,7 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) EXCLUDES_ThreadBase_Mutex = 0;
virtual void disconnect() EXCLUDES_ThreadBase_Mutex = 0;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 060c72b..e42b39e 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -338,28 +338,32 @@
// under #ifdef __cplusplus #endif
static std::string patchSinksToString(const struct audio_patch *patch)
{
- std::stringstream ss;
+ std::string s;
for (size_t i = 0; i < patch->num_sinks; ++i) {
- if (i > 0) {
- ss << "|";
+ if (i > 0) s.append("|");
+ if (patch->sinks[i].ext.device.address[0]) {
+ s.append("(").append(toString(patch->sinks[i].ext.device.type))
+ .append(", ").append(patch->sinks[i].ext.device.address).append(")");
+ } else {
+ s.append(toString(patch->sinks[i].ext.device.type));
}
- ss << "(" << toString(patch->sinks[i].ext.device.type)
- << ", " << patch->sinks[i].ext.device.address << ")";
}
- return ss.str();
+ return s;
}
static std::string patchSourcesToString(const struct audio_patch *patch)
{
- std::stringstream ss;
+ std::string s;
for (size_t i = 0; i < patch->num_sources; ++i) {
- if (i > 0) {
- ss << "|";
+ if (i > 0) s.append("|");
+ if (patch->sources[i].ext.device.address[0]) {
+ s.append("(").append(toString(patch->sources[i].ext.device.type))
+ .append(", ").append(patch->sources[i].ext.device.address).append(")");
+ } else {
+ s.append(toString(patch->sources[i].ext.device.type));
}
- ss << "(" << toString(patch->sources[i].ext.device.type)
- << ", " << patch->sources[i].ext.device.address << ")";
}
- return ss.str();
+ return s;
}
static std::string toString(audio_latency_mode_t mode) {
@@ -10393,13 +10397,13 @@
audio_stream_type_t streamType __unused,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId)
{
mAttr = *attr;
mSessionId = sessionId;
mCallback = callback;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
mPortId = portId;
}
@@ -10492,7 +10496,7 @@
audio_stream_type_t stream = streamType_l();
audio_output_flags_t flags =
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
- audio_port_handle_t deviceId = mDeviceId;
+ DeviceIdVector deviceIds = mDeviceIds;
std::vector<audio_io_handle_t> secondaryOutputs;
bool isSpatialized;
bool isBitPerfect;
@@ -10503,7 +10507,7 @@
adjAttributionSource,
&config,
flags,
- &deviceId,
+ &deviceIds,
&portId,
&secondaryOutputs,
&isSpatialized,
@@ -10519,7 +10523,7 @@
config.sample_rate = mSampleRate;
config.channel_mask = mChannelMask;
config.format = mFormat;
- audio_port_handle_t deviceId = mDeviceId;
+ audio_port_handle_t deviceId = getFirstDeviceId(mDeviceIds);
mutex().unlock();
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
@@ -10872,7 +10876,7 @@
// store new device and send to effects
audio_devices_t type = AUDIO_DEVICE_NONE;
- audio_port_handle_t deviceId;
+ DeviceIdVector deviceIds;
AudioDeviceTypeAddrVector sinkDeviceTypeAddrs;
AudioDeviceTypeAddr sourceDeviceTypeAddr;
uint32_t numDevices = 0;
@@ -10886,12 +10890,12 @@
type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
sinkDeviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
patch->sinks[i].ext.device.address);
+ deviceIds.push_back(patch->sinks[i].id);
}
- deviceId = patch->sinks[0].id;
numDevices = mPatch.num_sinks;
} else {
type = patch->sources[0].ext.device.type;
- deviceId = patch->sources[0].id;
+ deviceIds.push_back(patch->sources[0].id);
numDevices = mPatch.num_sources;
sourceDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
sourceDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
@@ -10917,11 +10921,11 @@
// For mmap streams, once the routing has changed, they will be disconnected. It should be
// okay to notify the client earlier before the new patch creation.
- if (mDeviceId != deviceId) {
+ if (mDeviceIds != deviceIds) {
if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
// The aaudioservice handle the routing changed event asynchronously. In that case,
// it is safe to hold the lock here.
- callback->onRoutingChanged(deviceId);
+ callback->onRoutingChanged(deviceIds);
}
}
@@ -10941,7 +10945,7 @@
*handle = AUDIO_PATCH_HANDLE_NONE;
}
- if (numDevices == 0 || mDeviceId != deviceId) {
+ if (numDevices == 0 || mDeviceIds != deviceIds) {
if (isOutput()) {
sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
mOutDeviceTypeAddrs = sinkDeviceTypeAddrs;
@@ -10951,7 +10955,7 @@
mInDeviceTypeAddr = sourceDeviceTypeAddr;
}
mPatch = *patch;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
}
// Force meteadata update after a route change
mActiveTracks.setHasChanged();
@@ -11106,7 +11110,8 @@
if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
// The aaudioservice handle the routing changed event asynchronously. In that case,
// it is safe to hold the lock here.
- callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ DeviceIdVector emptyDeviceIdVector;
+ callback->onRoutingChanged(emptyDeviceIdVector);
} else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
@@ -11198,11 +11203,11 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId)
{
audio_utils::lock_guard l(mutex());
- MmapThread::configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+ MmapThread::configure_l(attr, streamType, sessionId, callback, deviceIds, portId);
mStreamType = streamType;
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 0c5a2c3..1d6e244 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -2243,17 +2243,17 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) override EXCLUDES_ThreadBase_Mutex {
audio_utils::lock_guard l(mutex());
- configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+ configure_l(attr, streamType, sessionId, callback, deviceIds, portId);
}
void configure_l(const audio_attributes_t* attr,
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) REQUIRES(mutex());
void disconnect() final EXCLUDES_ThreadBase_Mutex;
@@ -2363,9 +2363,9 @@
void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
/**
- * @brief mDeviceId current device port unique identifier
+ * @brief mDeviceIds current device port unique identifiers
*/
- audio_port_handle_t mDeviceId GUARDED_BY(mutex()) = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mDeviceIds GUARDED_BY(mutex());
audio_attributes_t mAttr GUARDED_BY(mutex());
audio_session_t mSessionId GUARDED_BY(mutex());
@@ -2397,7 +2397,7 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) final EXCLUDES_ThreadBase_Mutex;
AudioStreamOut* clearOutput() final EXCLUDES_ThreadBase_Mutex;
diff --git a/services/audioflinger/afutils/AllocatorFactory.h b/services/audioflinger/afutils/AllocatorFactory.h
index 7534607..4c290a0 100644
--- a/services/audioflinger/afutils/AllocatorFactory.h
+++ b/services/audioflinger/afutils/AllocatorFactory.h
@@ -33,25 +33,36 @@
constexpr inline size_t SHARED_SIZE_SMALL = SHARED_SIZE - SHARED_SIZE_LARGE; // 20 MiB
constexpr inline size_t SMALL_THRESHOLD = 1024 * 40; // 40 KiB
+template <typename Policy>
+inline auto getSharedPool() {
+ using namespace mediautils;
+ return std::make_shared<LockedAllocator<PolicyAllocator<MemoryHeapBaseAllocator, Policy>>>();
+}
+
+// The following pools are global but lazy initialized. Stored in shared_ptr since they are
+// referred by clients, but they could also be leaked.
+
+// Pool from which every client gets their dedicated, exclusive quota.
inline auto getDedicated() {
using namespace mediautils;
- static const auto allocator =
- std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<DED_SIZE>>>();
+ static const auto allocator = getSharedPool<SizePolicy<DED_SIZE>>();
return allocator;
}
+// Pool from which clients with large allocation sizes can fall back to when their dedicated
+// allocation is surpassed. More likely to fill.
inline auto getSharedLarge() {
using namespace mediautils;
- static const auto allocator = std::make_shared<
- PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<SHARED_SIZE_LARGE>>>();
+ static const auto allocator = getSharedPool<SizePolicy<SHARED_SIZE_LARGE>>();
return allocator;
}
+// Pool from which clients with reasonable allocation sizes can fall back to when
+// their dedicated allocation is surpassed, so that small buffer clients are always served.
inline auto getSharedSmall() {
using namespace mediautils;
static const auto allocator =
- std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator,
- SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>>();
+ getSharedPool<SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>();
return allocator;
}
@@ -78,8 +89,7 @@
getSharedLarge(), "Large Shared");
};
const auto makeSmallShared = []() {
- return wrapWithPolicySnooping<
- SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
+ return wrapWithPolicySnooping<SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
getSharedSmall(), "Small Shared");
};
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index c047a89..8dd247a 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -144,7 +144,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index 50ceadf..4a65069 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,4 +1,5 @@
# Bug component: 48436
+atneya@google.com
elaurent@google.com
jiabin@google.com
jmtrivi@google.com
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index c4bf64a..229c5e2 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -131,6 +131,8 @@
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_BEACON, ""},
{AUDIO_CONTENT_TYPE_ULTRASOUND, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_SPEAKER_CLEANUP, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_NONE, ""}
}
}
diff --git a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
index a1b4470..b72e517 100644
--- a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
+++ b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
@@ -122,18 +122,24 @@
}
ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
- const AudioHalCapCriterionV2::Type& aidl) {
+ const AudioHalCapCriterionV2& aidl) {
switch (aidl.getTag()) {
- case AudioHalCapCriterionV2::Type::availableDevicesType:
+ case AudioHalCapCriterionV2::availableInputDevices:
return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
- aidl.get<AudioHalCapCriterionV2::Type::availableDevicesType>());
- case AudioHalCapCriterionV2::Type::availableDevicesAddressesType:
- return aidl.get<AudioHalCapCriterionV2::Type::availableDevicesAddressesType>().template
- get<AudioDeviceAddress::id>();
- case AudioHalCapCriterionV2::Type::telephonyModeType:
- return toString(aidl.get<AudioHalCapCriterionV2::Type::telephonyModeType>());
- case AudioHalCapCriterionV2::Type::forcedConfigType:
- return toString(aidl.get<AudioHalCapCriterionV2::Type::forcedConfigType>());
+ aidl.get<AudioHalCapCriterionV2::availableInputDevices>().values[0]);
+ case AudioHalCapCriterionV2::availableOutputDevices:
+ return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+ aidl.get<AudioHalCapCriterionV2::availableOutputDevices>().values[0]);
+ case AudioHalCapCriterionV2::availableInputDevicesAddresses:
+ return aidl.get<AudioHalCapCriterionV2::availableInputDevicesAddresses>().values[0].
+ template get<AudioDeviceAddress::id>();
+ case AudioHalCapCriterionV2::availableOutputDevicesAddresses:
+ return aidl.get<AudioHalCapCriterionV2::availableOutputDevicesAddresses>().values[0].
+ template get<AudioDeviceAddress::id>();
+ case AudioHalCapCriterionV2::telephonyMode:
+ return toString(aidl.get<AudioHalCapCriterionV2::telephonyMode>().values[0]);
+ case AudioHalCapCriterionV2::forceConfigForUse:
+ return toString(aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0]);
default:
return unexpected(BAD_VALUE);
}
@@ -168,11 +174,11 @@
}
isFirstCriterionRule = false;
std::string selectionCriterion = VALUE_OR_RETURN(
- aidl2legacy_AudioHalCapCriterionV2_CapName(criterionRule.criterion));
+ aidl2legacy_AudioHalCapCriterionV2_CapName(criterionRule.criterionAndValue));
std::string matchesWhen;
std::string value = VALUE_OR_RETURN(
aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
- criterionRule.criterionTypeValue));
+ criterionRule.criterionAndValue));
switch (criterionRule.matchingRule) {
case AudioHalCapRule::MatchingRule::IS:
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 403d1ba..1082d31 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -156,40 +156,21 @@
return EngineBase::setForceUse(usage, config);
}
-bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const {
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices) const {
+ // SCO is considered active if:
+ // 1) a SCO device is connected
+ // 2) the preferred device for PHONE strategy is BT SCO: this is controlled only by java
+ // AudioService and is only true if the SCO audio link as been confirmed active by BT.
if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
return false;
}
- // SCO is active if:
- // 1) we are in a call and SCO is the preferred device for PHONE strategy
- if (isInCall() && audio_is_bluetooth_out_sco_device(
+
+ if (!audio_is_bluetooth_out_sco_device(
getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
- return true;
+ return false;
}
- // 2) A strategy for which the preferred device is SCO is active
- for (const auto &ps : getOrderedProductStrategies()) {
- if (outputs.isStrategyActive(ps) &&
- !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
- .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
- return true;
- }
- }
- // 3) a ringtone is active and SCO is used for ringing
- if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
- && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_SCO)) {
- return true;
- }
- // 4) an active input is routed from SCO
- DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
- const auto &inputs = getApmObserver()->getInputs();
- if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
- AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
- return true;
- }
- return false;
+ return true;
}
void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
@@ -200,7 +181,7 @@
if (com::android::media::audioserver::use_bt_sco_for_media()) {
// remove A2DP and LE Audio devices whenever BT SCO is in use
- if (isBtScoActive(availableOutputDevices, outputs)) {
+ if (isBtScoActive(availableOutputDevices)) {
availableOutputDevices.remove(
availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
availableOutputDevices.remove(
@@ -486,6 +467,18 @@
// Get the last connected device of wired and bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
+ if (com::android::media::audioserver::use_bt_sco_for_media()) {
+ if (isBtScoActive(availableOutputDevices)
+ && !(devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllA2dpSet()).isEmpty()
+ && devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllBleSet()).isEmpty())) {
+ devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+ { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+ }
+ }
} else {
// Get the last connected device of wired except bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
@@ -493,15 +486,6 @@
}
}
- if (com::android::media::audioserver::use_bt_sco_for_media()) {
- if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
- devices2 = availableOutputDevices.getFirstDevicesFromTypes(
- { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
- }
- }
-
if ((devices2.isEmpty()) &&
(getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
devices2 = availableOutputDevices.getDevicesFromType(
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 862b5fd..e9c71dd 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -95,8 +95,7 @@
DeviceVector getDisabledDevicesForInputSource(
const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
- bool isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const;
+ bool isBtScoActive(DeviceVector& availableOutputDevices) const;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
};
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index a5f37b0..1c6248a 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -198,7 +198,7 @@
virtual ~AudioPolicyManagerFuzzer() = default;
virtual bool initialize();
virtual void SetUpManagerConfig();
- bool getOutputForAttr(audio_port_handle_t *selectedDeviceId, audio_format_t format,
+ bool getOutputForAttr(DeviceIdVector *selectedDeviceIds, audio_format_t format,
audio_channel_mask_t channelMask, int sampleRate,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_io_handle_t *output = nullptr,
@@ -248,7 +248,7 @@
void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mConfig->setDefault(); }
bool AudioPolicyManagerFuzzer::getOutputForAttr(
- audio_port_handle_t *selectedDeviceId, audio_format_t format, audio_channel_mask_t channelMask,
+ DeviceIdVector *selectedDeviceIds, audio_format_t format, audio_channel_mask_t channelMask,
int sampleRate, audio_output_flags_t flags, audio_io_handle_t *output,
audio_port_handle_t *portId, audio_attributes_t attr) {
audio_io_handle_t localOutput;
@@ -273,7 +273,7 @@
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
- &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+ &config, &flags, selectedDeviceIds, portId, {}, &outputType, &isSpatialized,
&isBitPerfect, &volume, &muted) != OK) {
return false;
}
@@ -726,8 +726,8 @@
std::string tags(mFdp->ConsumeBool() ? "" : "addr=remote_submix_media");
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&playbackRoutedPortId, mAudioConfig.format, mAudioConfig.channel_mask,
+ DeviceIdVector playbackRoutedPortIds;
+ getOutputForAttr(&playbackRoutedPortIds, mAudioConfig.format, mAudioConfig.channel_mask,
mAudioConfig.sample_rate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/,
nullptr /*portId*/, attr);
}
@@ -807,13 +807,13 @@
findDevicePort(AUDIO_PORT_ROLE_SINK, getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
mMixAddress, &injectionPort);
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_usage_t usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getOutputForAttr(&selectedDeviceId, mAudioConfig.format, mAudioConfig.channel_mask,
+ getOutputForAttr(&selectedDeviceIds, mAudioConfig.format, mAudioConfig.channel_mask,
mAudioConfig.sample_rate /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE,
nullptr /*output*/, &mPortId, attr);
ret = mManager->startOutput(mPortId);
@@ -903,15 +903,17 @@
audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
findDevicePort(role, type, address, &devicePort);
- audio_port_handle_t routedPortId = devicePort.id;
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
- getOutputForAttr(&routedPortId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+ DeviceIdVector routedPortIds = { devicePort.id };
+ getOutputForAttr(&routedPortIds,
+ getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
getValueFromVector<uint32_t>(mFdp, kSamplingRates),
AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
+ audio_port_handle_t routedPortId = devicePort.id;
getInputForAttr({}, tracker.getRiid(), &routedPortId,
getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelInMasks),
@@ -984,10 +986,10 @@
if (ret != NO_ERROR) {
return;
}
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+ getOutputForAttr(&selectedDeviceIds, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
getValueFromVector<uint32_t>(mFdp, kSamplingRates), flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 54ef5d5..04ebb57 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1262,7 +1262,7 @@
uid_t uid,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
@@ -1270,7 +1270,8 @@
bool *isBitPerfect)
{
DeviceVector outputDevices;
- const audio_port_handle_t requestedPortId = *selectedDeviceId;
+ audio_port_handle_t requestedPortId = getFirstDeviceId(*selectedDeviceIds);
+ selectedDeviceIds->clear();
DeviceVector msdDevices = getMsdAudioOutDevices();
const sp<DeviceDescriptor> requestedDevice =
mAvailableOutputDevices.getDeviceFromId(requestedPortId);
@@ -1347,8 +1348,9 @@
if (policyDesc != nullptr) {
policyDesc->mPolicyMix = primaryMix;
*output = policyDesc->mIoHandle;
- *selectedDeviceId = policyMixDevice != nullptr ? policyMixDevice->getId()
- : AUDIO_PORT_HANDLE_NONE;
+ if (policyMixDevice != nullptr) {
+ selectedDeviceIds->push_back(policyMixDevice->getId());
+ }
if ((policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != AUDIO_OUTPUT_FLAG_DIRECT) {
// Remove direct flag as it is not on a direct output.
*flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1485,11 +1487,13 @@
return INVALID_OPERATION;
}
- *selectedDeviceId = getFirstDeviceId(outputDevices);
for (auto &outputDevice : outputDevices) {
- if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
- *selectedDeviceId = outputDevice->getId();
- break;
+ if (std::find(selectedDeviceIds->begin(), selectedDeviceIds->end(),
+ outputDevice->getId()) == selectedDeviceIds->end()) {
+ selectedDeviceIds->push_back(outputDevice->getId());
+ if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
+ std::swap(selectedDeviceIds->front(), selectedDeviceIds->back());
+ }
}
}
@@ -1499,7 +1503,8 @@
*outputType = API_OUTPUT_LEGACY;
}
- ALOGV("%s returns output %d selectedDeviceId %d", __func__, *output, *selectedDeviceId);
+ ALOGV("%s returns output %d selectedDeviceIds %s", __func__, *output,
+ toString(*selectedDeviceIds).c_str());
return NO_ERROR;
}
@@ -1511,7 +1516,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
@@ -1526,20 +1531,22 @@
}
const uid_t uid = VALUE_OR_RETURN_STATUS(
aidl2legacy_int32_t_uid_t(attributionSource.uid));
- const audio_port_handle_t requestedPortId = *selectedDeviceId;
audio_attributes_t resultAttr;
bool isRequestedDeviceForExclusiveUse = false;
std::vector<sp<AudioPolicyMix>> secondaryMixes;
- const sp<DeviceDescriptor> requestedDevice =
- mAvailableOutputDevices.getDeviceFromId(requestedPortId);
+ DeviceIdVector requestedDeviceIds = *selectedDeviceIds;
// Prevent from storing invalid requested device id in clients
- const audio_port_handle_t sanitizedRequestedPortId =
- requestedDevice != nullptr ? requestedPortId : AUDIO_PORT_HANDLE_NONE;
- *selectedDeviceId = sanitizedRequestedPortId;
+ DeviceIdVector sanitizedRequestedPortIds;
+ for (auto deviceId : *selectedDeviceIds) {
+ if (mAvailableOutputDevices.getDeviceFromId(deviceId) != nullptr) {
+ sanitizedRequestedPortIds.push_back(deviceId);
+ }
+ }
+ *selectedDeviceIds = sanitizedRequestedPortIds;
status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
- config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+ config, flags, selectedDeviceIds, &isRequestedDeviceForExclusiveUse,
secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized,
isBitPerfect);
if (status != NO_ERROR) {
@@ -1564,9 +1571,10 @@
*portId = PolicyAudioPort::getNextUniqueId();
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
+ // TODO(b/367816690): Add device id sets to TrackClientDescriptor
sp<TrackClientDescriptor> clientDesc =
new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
- sanitizedRequestedPortId, *stream,
+ getFirstDeviceId(sanitizedRequestedPortIds), *stream,
mEngine->getProductStrategyForAttributes(resultAttr),
toVolumeSource(resultAttr),
*flags, isRequestedDeviceForExclusiveUse,
@@ -1577,8 +1585,9 @@
*volume = Volume::DbToAmpl(outputDesc->getCurVolume(toVolumeSource(resultAttr)));
*muted = outputDesc->isMutedByGroup(toVolumeSource(resultAttr));
- ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
- *output, requestedPortId, *selectedDeviceId, *portId);
+ ALOGV("%s() returns output %d requestedPortIds %s selectedDeviceIds %s for port ID %d",
+ __func__, *output, toString(requestedDeviceIds).c_str(),
+ toString(*selectedDeviceIds).c_str(), *portId);
return NO_ERROR;
}
@@ -3564,19 +3573,26 @@
ALOGI("%s: deviceType 0x%X, enabled %d, streamToDriveAbs %d", __func__, deviceType, enabled,
streamToDriveAbs);
- if (!enabled) {
- mAbsoluteVolumeDrivingStreams.erase(deviceType);
- return NO_ERROR;
- }
-
audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
- if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
- ALOGW("%s: no attributes for stream %s, bailing out", __func__,
- toString(streamToDriveAbs).c_str());
- return BAD_VALUE;
+ if (enabled) {
+ if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+ ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+ toString(streamToDriveAbs).c_str());
+ return BAD_VALUE;
+ }
+
+ mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+ } else {
+ mAbsoluteVolumeDrivingStreams.erase(deviceType);
}
- mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+ // apply the stream volumes regarding the new absolute mode to all the outputs
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ ALOGV("%s: apply stream volumes for portId %d", __func__, desc->getId());
+ applyStreamVolumes(desc, {deviceType}, static_cast<int>(desc->latency()) * 2);
+ }
+
return NO_ERROR;
}
@@ -3775,7 +3791,7 @@
if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
isVoiceVolSrc, isBtScoVolSrc, __func__)
&& (isVoiceVolSrc || isBtScoVolSrc)) {
- bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+ bool voiceVolumeManagedByHost = !isBtScoVolSrc &&
!audio_is_ble_out_device(mCallRxSourceClient->sinkDevice()->type());
setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
}
@@ -5596,14 +5612,14 @@
: audio_channel_mask_in_to_out(sourceMask);
config.format = sourceDesc->config().format;
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
bool isRequestedDeviceForExclusiveUse = false;
output_type_t outputType;
bool isSpatialized;
bool isBitPerfect;
getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
&stream, sourceDesc->uid(), &config, &flags,
- &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+ &selectedDeviceIds, &isRequestedDeviceForExclusiveUse,
nullptr, &outputType, &isSpatialized, &isBitPerfect);
if (output == AUDIO_IO_HANDLE_NONE) {
ALOGV("%s no output for device %s",
@@ -8334,7 +8350,9 @@
VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
if (vsToDriveAbs == volumeSource) {
// attenuation is applied by the abs volume controller
- return (index != 0) ? volumeDbMax : volumeDb;
+ // do not mute LE broadcast to allow the secondary device to continue playing
+ return (index != 0 || volumeDevice == AUDIO_DEVICE_OUT_BLE_BROADCAST) ? volumeDbMax
+ : volumeDb;
} else {
IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
@@ -8581,7 +8599,7 @@
deviceTypes, delayMs, force, isVoiceVolSrc);
if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
- bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+ bool voiceVolumeManagedByHost = !isBtScoVolSrc &&
!isSingleDeviceType(deviceTypes, audio_is_ble_out_device);
setVoiceVolume(index, curves, voiceVolumeManagedByHost, delayMs);
}
@@ -8716,8 +8734,12 @@
bool AudioPolicyManager::isValidAttributes(const audio_attributes_t *paa)
{
+ if ((paa->flags & AUDIO_FLAG_SCO) != 0) {
+ ALOGW("%s: deprecated use of AUDIO_FLAG_SCO in attributes flags %d", __func__, paa->flags);
+ }
+
// has flags that map to a stream type?
- if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO | AUDIO_FLAG_BEACON)) != 0) {
+ if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_BEACON)) != 0) {
return true;
}
@@ -8745,6 +8767,7 @@
case AUDIO_USAGE_SAFETY:
case AUDIO_USAGE_VEHICLE_STATUS:
case AUDIO_USAGE_ANNOUNCEMENT:
+ case AUDIO_USAGE_SPEAKER_CLEANUP:
break;
default:
return false;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 1ca0c32..e0cafd4 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -123,7 +123,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
@@ -893,15 +893,7 @@
return mAvailableInputDevices.getDevicesFromHwModule(
mPrimaryOutput->getModuleHandle());
}
- /**
- * @brief getFirstDeviceId of the Device Vector
- * @return if the collection is not empty, it returns the first device Id,
- * otherwise AUDIO_PORT_HANDLE_NONE
- */
- audio_port_handle_t getFirstDeviceId(const DeviceVector &devices) const
- {
- return (devices.size() > 0) ? devices.itemAt(0)->getId() : AUDIO_PORT_HANDLE_NONE;
- }
+
String8 getFirstDeviceAddress(const DeviceVector &devices) const
{
return (devices.size() > 0) ?
@@ -1142,7 +1134,7 @@
uid_t uid,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 1ffa176..3589de1 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -24,6 +24,7 @@
#include <android/content/AttributionSourceState.h>
#include <android_media_audiopolicy.h>
#include <com_android_media_audio.h>
+#include <cutils/properties.h>
#include <error/expected_utils.h>
#include <media/AidlConversion.h>
#include <media/AudioPolicy.h>
@@ -369,7 +370,7 @@
const AttributionSourceState& attributionSource,
const AudioConfig& configAidl,
int32_t flagsAidl,
- int32_t selectedDeviceIdAidl,
+ const std::vector<int32_t>& selectedDeviceIdsAidl,
media::GetOutputForAttrResponse* _aidl_return)
{
audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
@@ -381,8 +382,9 @@
aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
audio_output_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_output_flags_t_mask(flagsAidl));
- audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_int32_t_audio_port_handle_t(selectedDeviceIdAidl));
+ DeviceIdVector selectedDeviceIds = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<DeviceIdVector>(selectedDeviceIdsAidl,
+ aidl2legacy_int32_t_audio_port_handle_t));
audio_io_handle_t output;
audio_port_handle_t portId;
@@ -425,6 +427,17 @@
}
}
+ //TODO this permission check should extend to all system usages
+ if (attr.usage == AUDIO_USAGE_SPEAKER_CLEANUP) {
+ if (!(audioserver_permissions() ?
+ CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+ : modifyAudioRoutingAllowed())) {
+ ALOGE("%s: permission denied: SPEAKER_CLEANUP not allowed for uid %d pid %d",
+ __func__, attributionSource.uid, attributionSource.pid);
+ return binderStatusFromStatusT(PERMISSION_DENIED);
+ }
+ }
+
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
bool isSpatialized = false;
@@ -435,7 +448,7 @@
&stream,
attributionSource,
&config,
- &flags, &selectedDeviceId, &portId,
+ &flags, &selectedDeviceIds, &portId,
&secondaryOutputs,
&outputType,
&isSpatialized,
@@ -482,20 +495,24 @@
}
if (result == NO_ERROR) {
- attr = VALUE_OR_RETURN_BINDER_STATUS(
- mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+ // usecase validator is disabled by default
+ if (property_get_bool("ro.audio.usecase_validator_enabled", false /* default */)) {
+ attr = VALUE_OR_RETURN_BINDER_STATUS(
+ mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+ }
sp<AudioPlaybackClient> client =
new AudioPlaybackClient(attr, output, attributionSource, session,
- portId, selectedDeviceId, stream, isSpatialized, config.channel_mask);
+ portId, selectedDeviceIds, stream, isSpatialized, config.channel_mask);
mAudioPlaybackClients.add(portId, client);
_aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_io_handle_t_int32_t(output));
_aidl_return->stream = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
- _aidl_return->selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ _aidl_return->selectedDeviceIds = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<std::vector<int32_t>>(selectedDeviceIds,
+ legacy2aidl_audio_port_handle_t_int32_t));
_aidl_return->portId = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_port_handle_t_int32_t(portId));
_aidl_return->secondaryOutputs = VALUE_OR_RETURN_BINDER_STATUS(
@@ -853,8 +870,9 @@
return binderStatusFromStatusT(status);
}
+ DeviceIdVector selectedDeviceIds = { selectedDeviceId };
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
- selectedDeviceId, attributionSource,
+ selectedDeviceIds, attributionSource,
virtualDeviceId,
canCaptureOutput, canCaptureHotword,
mOutputCommandThread);
@@ -888,6 +906,17 @@
return {};
}
+std::string AudioPolicyService::getDeviceTypeStrForPortIds(DeviceIdVector portIds) {
+ std::string output = {};
+ for (auto it = portIds.begin(); it != portIds.end(); ++it) {
+ if (it != portIds.begin()) {
+ output += ", ";
+ }
+ output += getDeviceTypeStrForPortId(*it);
+ }
+ return output;
+}
+
Status AudioPolicyService::startInput(int32_t portIdAidl)
{
audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
@@ -909,13 +938,12 @@
std::stringstream msg;
msg << "Audio recording on session " << client->session;
+
const auto permitted = startRecording(client->attributionSource, client->virtualDeviceId,
String16(msg.str().c_str()), client->attributes.source);
// check calling permissions
- if (permitted == PERMISSION_HARD_DENIED && client->attributes.source != AUDIO_SOURCE_FM_TUNER
- && client->attributes.source != AUDIO_SOURCE_REMOTE_SUBMIX
- && client->attributes.source != AUDIO_SOURCE_ECHO_REFERENCE) {
+ if (permitted == PERMISSION_HARD_DENIED) {
ALOGE("%s permission denied: recording not allowed for attribution source %s",
__func__, client->attributionSource.toString().c_str());
return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -978,6 +1006,8 @@
"android.media.audiopolicy.active.session";
static constexpr char kAudioPolicyActiveDevice[] =
"android.media.audiopolicy.active.device";
+ static constexpr char kAudioPolicyActiveDevices[] =
+ "android.media.audiopolicy.active.devices";
mediametrics::Item *item = mediametrics::Item::create(kAudioPolicy);
if (item != NULL) {
@@ -995,8 +1025,8 @@
item->setCString(kAudioPolicyRqstPkg,
std::to_string(client->attributionSource.uid).c_str());
}
- item->setCString(
- kAudioPolicyRqstDevice, getDeviceTypeStrForPortId(client->deviceId).c_str());
+ item->setCString(kAudioPolicyRqstDevice,
+ getDeviceTypeStrForPortId(getFirstDeviceId(client->deviceIds)).c_str());
int count = mAudioRecordClients.size();
for (int i = 0; i < count ; i++) {
@@ -1018,7 +1048,9 @@
other->attributionSource.uid).c_str());
}
item->setCString(kAudioPolicyActiveDevice,
- getDeviceTypeStrForPortId(other->deviceId).c_str());
+ getDeviceTypeStrForPortId(getFirstDeviceId(other->deviceIds)).c_str());
+ item->setCString(kAudioPolicyActiveDevices,
+ getDeviceTypeStrForPortIds(other->deviceIds).c_str());
}
}
item->selfrecord();
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 44a0e7d..eeac9a6 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -117,7 +117,7 @@
int32_t session,
const AttributionSourceState &attributionSource,
const AudioConfig& config,
- int32_t flags, int32_t selectedDeviceId,
+ int32_t flags, const std::vector<int32_t>& selectedDeviceIds,
media::GetOutputForAttrResponse* _aidl_return) override;
binder::Status startOutput(int32_t portId) override;
binder::Status stopOutput(int32_t portId) override;
@@ -474,6 +474,8 @@
std::string getDeviceTypeStrForPortId(audio_port_handle_t portId);
+ std::string getDeviceTypeStrForPortIds(DeviceIdVector portIds);
+
status_t getAudioPolicyEffects(sp<AudioPolicyEffects>& audioPolicyEffects);
app_state_t apmStatFromAmState(int amState);
@@ -1015,10 +1017,10 @@
const audio_io_handle_t io,
const AttributionSourceState& attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId) :
+ const DeviceIdVector deviceIds) :
attributes(attributes), io(io), attributionSource(
attributionSource), session(session), portId(portId),
- deviceId(deviceId), active(false) {}
+ deviceIds(deviceIds), active(false) {}
~AudioClient() override = default;
@@ -1027,7 +1029,7 @@
const AttributionSourceState attributionSource; //client attributionsource
const audio_session_t session; // audio session ID
const audio_port_handle_t portId;
- const audio_port_handle_t deviceId; // selected input device port ID
+ const DeviceIdVector deviceIds; // selected input device port IDs
bool active; // Playback/Capture is active or inactive
};
private:
@@ -1042,10 +1044,10 @@
AudioPlaybackClient(const audio_attributes_t attributes,
const audio_io_handle_t io, AttributionSourceState attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- audio_port_handle_t deviceId, audio_stream_type_t stream,
+ DeviceIdVector deviceIds, audio_stream_type_t stream,
bool isSpatialized, audio_channel_mask_t channelMask) :
AudioClient(attributes, io, attributionSource, session, portId,
- deviceId), stream(stream), isSpatialized(isSpatialized),
+ deviceIds), stream(stream), isSpatialized(isSpatialized),
channelMask(channelMask) {}
~AudioPlaybackClient() override = default;
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index 733f0d6..fd344d9 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -19,6 +19,7 @@
#include "AudioRecordClient.h"
#include "AudioPolicyService.h"
#include "binder/AppOpsManager.h"
+#include "mediautils/ServiceUtilities.h"
#include <android_media_audiopolicy.h>
#include <algorithm>
@@ -118,16 +119,20 @@
}
return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
- getOpForSource(attr.source), commandThread);
+ getOpForSource(attr.source),
+ isRecordOpRequired(attr.source),
+ commandThread);
}
OpRecordAudioMonitor::OpRecordAudioMonitor(
const AttributionSourceState &attributionSource,
const uint32_t virtualDeviceId, const audio_attributes_t &attr,
int32_t appOp,
+ bool shouldMonitorRecord,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
mHasOp(true), mAttributionSource(attributionSource),
mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+ mShouldMonitorRecord(shouldMonitorRecord),
mCommandThread(commandThread) {
}
@@ -160,7 +165,7 @@
});
};
reg(mAppOp);
- if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+ if (mAppOp != AppOpsManager::OP_RECORD_AUDIO && mShouldMonitorRecord) {
reg(AppOpsManager::OP_RECORD_AUDIO);
}
}
@@ -186,7 +191,7 @@
});
};
bool hasIt = check(mAppOp);
- if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+ if (mAppOp != AppOpsManager::OP_RECORD_AUDIO && mShouldMonitorRecord) {
hasIt = hasIt && check(AppOpsManager::OP_RECORD_AUDIO);
}
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index 76aff41..76bc17a 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -47,6 +47,7 @@
uint32_t virtualDeviceId,
const audio_attributes_t &attr,
int32_t appOp,
+ bool shouldMonitorRecord,
wp<AudioPolicyService::AudioCommandThread> commandThread);
void onFirstRef() override;
@@ -74,6 +75,7 @@
const uint32_t mVirtualDeviceId;
const audio_attributes_t mAttr;
const int32_t mAppOp;
+ const bool mShouldMonitorRecord;
wp<AudioPolicyService::AudioCommandThread> mCommandThread;
};
@@ -85,13 +87,13 @@
AudioRecordClient(const audio_attributes_t attributes,
const audio_io_handle_t io,
const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId,
+ const DeviceIdVector deviceIds,
const AttributionSourceState& attributionSource,
const uint32_t virtualDeviceId,
bool canCaptureOutput, bool canCaptureHotword,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
- session, portId, deviceId), attributionSource(attributionSource),
+ session, portId, deviceIds), attributionSource(attributionSource),
virtualDeviceId(virtualDeviceId),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
canCaptureHotword(canCaptureHotword), silenced(false),
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index e901cfd..c37540c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -191,7 +191,7 @@
// When explicit routing is needed, selectedDeviceId needs to be set as the wanted port
// id. Otherwise, selectedDeviceId needs to be initialized as AUDIO_PORT_HANDLE_NONE.
void getOutputForAttr(
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_format_t format,
audio_channel_mask_t channelMask,
int sampleRate,
@@ -284,7 +284,7 @@
}
void AudioPolicyManagerTest::getOutputForAttr(
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_format_t format,
audio_channel_mask_t channelMask,
int sampleRate,
@@ -314,7 +314,7 @@
AttributionSourceState attributionSource = createAttributionSourceState(uid);
ASSERT_EQ(OK, mManager->getOutputForAttr(
&attr, output, session, &stream, attributionSource, &config, &flags,
- selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+ selectedDeviceIds, portId, {}, &outputType, &isSpatialized,
isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume,
&muted));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
@@ -648,42 +648,42 @@
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
+ selectedDeviceIds.clear();
+ getOutputForAttr(&selectedDeviceIds,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
+ ASSERT_NE(mMsdOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
@@ -691,32 +691,33 @@
// Switch between formats that are supported and not supported by MSD.
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
}
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
- ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(mMsdOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount) + 2, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
}
@@ -1138,14 +1139,14 @@
&mediaAttr, usbPortId, uid, &mixerAttributes[0]));
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
AUDIO_SESSION_NONE, uid);
status_t status = mManager->startOutput(portId);
if (status == DEAD_OBJECT) {
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
AUDIO_SESSION_NONE, uid);
status = mManager->startOutput(portId);
@@ -1172,6 +1173,56 @@
"", "", AUDIO_FORMAT_LDAC));
}
+template <typename T>
+bool hasDuplicates(const T& container) {
+ return std::unordered_set<typename T::value_type>(container.begin(),
+ container.end()).size() != container.size();
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, UniqueSelectedDeviceIds) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ auto devices = mManager->getAvailableOutputDevices();
+ audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t speakerPortId = AUDIO_PORT_HANDLE_NONE;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+ usbPortId = device->getId();
+ }
+ if (device->type() == AUDIO_DEVICE_OUT_SPEAKER) {
+ speakerPortId = device->getId();
+ }
+ }
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, speakerPortId);
+
+ const uid_t uid = 1234;
+ const audio_attributes_t mediaAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+ .usage = AUDIO_USAGE_ALARM,
+ };
+
+ audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+ AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output,
+ &portId, mediaAttr, AUDIO_SESSION_NONE, uid));
+ EXPECT_FALSE(selectedDeviceIds.empty());
+ EXPECT_NE(std::find(selectedDeviceIds.begin(), selectedDeviceIds.end(), usbPortId),
+ selectedDeviceIds.end());
+ EXPECT_NE(std::find(selectedDeviceIds.begin(), selectedDeviceIds.end(), speakerPortId),
+ selectedDeviceIds.end());
+ EXPECT_FALSE(hasDuplicates(selectedDeviceIds));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+}
+
TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -1953,14 +2004,15 @@
const DPTestParam param = GetParam();
const audio_attributes_t& attr = param.attributes;
- audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ DeviceIdVector playbackRoutedPortIds;
+ getOutputForAttr(&playbackRoutedPortIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
attr, param.session);
if (param.expected_match) {
- EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
+ ASSERT_EQ(mInjectionPort.id, playbackRoutedPortIds[0]);
} else {
- EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
+ ASSERT_GT(playbackRoutedPortIds.size(), 0);
+ ASSERT_NE(mInjectionPort.id, playbackRoutedPortIds[0]);
}
}
@@ -2129,7 +2181,7 @@
audio_config_t audioConfig;
audio_io_handle_t mOutput;
audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
- audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mSelectedDeviceIds;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t mOutputType;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -2154,7 +2206,7 @@
ASSERT_EQ(INVALID_OPERATION,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
}
@@ -2174,7 +2226,7 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
}
@@ -2206,10 +2258,9 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
- ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
auto outputDesc = mManager->getOutputs().valueFor(mOutput);
ASSERT_NE(nullptr, outputDesc);
ASSERT_EQ(mmapDirectFlags, outputDesc->getFlags().output);
@@ -2223,10 +2274,10 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
- ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
+ ASSERT_EQ(usbDevicePort.id, mSelectedDeviceIds[0]);
outputDesc = mManager->getOutputs().valueFor(mOutput);
ASSERT_NE(nullptr, outputDesc);
ASSERT_NE(mmapDirectFlags, outputDesc->getFlags().output);
@@ -2253,7 +2304,7 @@
ASSERT_EQ(INVALID_OPERATION,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
}
@@ -2298,13 +2349,13 @@
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
mMixAddress, &injectionPort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_usage_t usage = AUDIO_USAGE_VIRTUAL_SOURCE;
audio_attributes_t attr =
{AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
ASSERT_EQ(NO_ERROR, mManager->startOutput(mPortId));
ASSERT_EQ(injectionPort.id, getDeviceIdFromPatch(mClient->getLastAddedPatch()));
@@ -2502,19 +2553,21 @@
? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
ASSERT_TRUE(findDevicePort(role, type, address, &devicePort));
- audio_port_handle_t routedPortId = devicePort.id;
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
- getOutputForAttr(&routedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ DeviceIdVector routedPortIds = { devicePort.id };
+ getOutputForAttr(&routedPortIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
+ ASSERT_EQ(devicePort.id, routedPortIds[0]);
} else if (audio_is_input_device(type)) {
+ audio_port_handle_t routedPortId = devicePort.id;
RecordingActivityTracker tracker;
audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
getInputForAttr({}, &input, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
AUDIO_INPUT_FLAG_NONE);
+ ASSERT_EQ(devicePort.id, routedPortId);
}
- ASSERT_EQ(devicePort.id, routedPortId);
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
type, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -2775,24 +2828,24 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
ASSERT_EQ(AUDIO_FORMAT_E_AC3_JOC, outDesc->getFormat());
ASSERT_EQ(AUDIO_CHANNEL_OUT_5POINT1, outDesc->getChannelMask());
ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ selectedDeviceIds.clear();
output = AUDIO_IO_HANDLE_NONE;
portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
ASSERT_EQ(AUDIO_FORMAT_PCM_16_BIT, outDesc->getFormat());
@@ -2812,25 +2865,25 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 mediaDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusMediaOutput, &mediaDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(mediaDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(mediaDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterRegisteringPolicyMix) {
@@ -2845,25 +2898,25 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterUserAffinities) {
@@ -2878,10 +2931,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice};
@@ -2889,17 +2942,18 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_NE(navDevicePort.id, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithExcludeUserIdCriteria) {
@@ -2914,11 +2968,11 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false),
- createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
+ createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
@@ -2926,14 +2980,15 @@
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t navigationAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ DeviceIdVector selectedDeviceIds;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, navigationAttribute);
- ASSERT_NE(navDevicePort.id, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputExcludeUserIdCriteria) {
@@ -2948,30 +3003,30 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false),
- createUserIdCriterion(0 /* userId */, /* exclude */ true)};
+ createUserIdCriterion(0 /* userId */, /* exclude */ true)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
- sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ sCarBusNavigationOutput, &navDevicePort));
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
+ GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -2983,10 +3038,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -2995,21 +3050,21 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
+ GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3021,10 +3076,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -3033,21 +3088,21 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t alarmAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3070,23 +3125,23 @@
audio_port_v7 primaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusMediaOutput, &primaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user11AppUid);
- ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3112,23 +3167,23 @@
audio_port_v7 secondaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarRearZoneOneOutput, &secondaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user11AppUid);
- ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3157,19 +3212,19 @@
audio_port_v7 tertiaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarRearZoneTwoOutput, &tertiaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user15AppUid = multiuser_get_uid(/* user_id */ 15, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user15AppUid);
- ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithNoMatchingMix) {
@@ -3184,10 +3239,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -3196,17 +3251,17 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t alarmAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrForMMapWithPolicyMatched) {
@@ -3218,13 +3273,13 @@
std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusMmapOutput, audioConfig, mediaMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusMmapOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 mmapDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
- sCarBusMmapOutput, &mmapDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ sCarBusMmapOutput, &mmapDevicePort));
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
@@ -3232,12 +3287,13 @@
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
getOutputForAttr(
- &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ &selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT),
&output, &portId, mediaAttribute);
- ASSERT_EQ(mmapDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(mmapDevicePort.id, selectedDeviceIds[0]);
+
}
class AudioPolicyManagerTVTest : public AudioPolicyManagerTestWithConfigurationFile {
@@ -3257,10 +3313,10 @@
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
AUDIO_DEVICE_OUT_AUX_DIGITAL, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
"" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
@@ -3339,7 +3395,7 @@
void AudioPolicyManagerPhoneTest::testOutputMixPortSelectionForAttr(
audio_output_flags_t flags, audio_format_t format, int samplingRate, bool isMusic,
const char* expectedMixPortName) {
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -3347,7 +3403,7 @@
attr.content_type = AUDIO_CONTENT_TYPE_MUSIC;
attr.usage = AUDIO_USAGE_MEDIA;
}
- getOutputForAttr(&selectedDeviceId, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
+ getOutputForAttr(&selectedDeviceIds, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
&output, &portId, attr);
EXPECT_NO_FATAL_FAILURE(verifyMixPortNameAndFlags(output, expectedMixPortName));
mManager->releaseOutput(portId);
@@ -3916,7 +3972,7 @@
audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mSelectedDeviceIds;
audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
static constexpr audio_attributes_t sMediaAttr = {
@@ -3975,12 +4031,12 @@
reset();
bool isBitPerfect;
- getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&mSelectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
&mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
status_t status = mManager->startOutput(mBitPerfectPortId);
if (status == DEAD_OBJECT) {
- getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&mSelectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
&mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
status = mManager->startOutput(mBitPerfectPortId);
@@ -3996,8 +4052,8 @@
void AudioPolicyManagerTestBitPerfectBase::reset() {
mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
- mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+ mSelectedDeviceIds.clear();
}
void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
@@ -4017,7 +4073,7 @@
EXPECT_EQ(expected,
mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
&stream, attributionSource, &config, &flags,
- &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+ &mSelectedDeviceIds, &mBitPerfectPortId, {}, &outputType,
&isSpatialized, &isBitPerfect, &volume, &muted));
}
@@ -4027,13 +4083,13 @@
TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
const uid_t anotherUid = 5678;
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
bool isBitPerfect;
// When there is no active bit-perfect playback, the output selection will follow default
// routing strategy.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4047,14 +4103,14 @@
// If the playback is from preferred mixer attributes owner but the request doesn't match
// preferred mixer attributes, it will not be bit-perfect.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
EXPECT_EQ(mBitPerfectOutput, output);
// When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4066,9 +4122,9 @@
.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
};
audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ selectedDeviceIds.clear();
portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4076,7 +4132,7 @@
// When configuration matches preferred mixer attributes, which is bit-perfect, but the client
// is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4102,9 +4158,9 @@
.content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
bool isBitPerfect;
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
&systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4124,7 +4180,7 @@
.content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
.usage = AUDIO_USAGE_NOTIFICATION,
};
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, ¬ificationOutput,
¬ificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
&isBitPerfect);
@@ -4191,11 +4247,11 @@
.content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
.usage = GetParam(),
};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
ASSERT_NO_FATAL_FAILURE(
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
EXPECT_NE(mBitPerfectOutput, output);
EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 6da1606..8d44e06 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -2254,9 +2254,12 @@
int callingPid = getCallingPid();
int callingUid = getCallingUid();
bool systemNativeClient = false;
+ AttributionSourceState resolvedClientAttribution(clientAttribution);
if (callerHasSystemUid() && (clientPackageNameMaybe.size() == 0)) {
std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
clientPackageNameMaybe = systemClient;
+ // Pass in packageName since AttributionAndPermissionUtils can't resolve vndk clients.
+ resolvedClientAttribution.packageName = clientPackageNameMaybe;
systemNativeClient = true;
}
@@ -2272,10 +2275,10 @@
bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
- AttributionSourceState resolvedClientAttribution(clientAttribution);
- if (!flags::use_context_attribution_source()) {
+ if (!flags::data_delivery_permission_checks()) {
resolvedClientAttribution.pid = USE_CALLING_PID;
}
+
ret = resolveAttributionSource(resolvedClientAttribution, __FUNCTION__, cameraId);
if (!ret.isOk()) {
logRejected(cameraId, getCallingPid(), clientAttribution.packageName.value_or(""),
@@ -4108,7 +4111,7 @@
mAppOpsManager = std::make_unique<AppOpsManager>();
}
- mUidIsTrusted = isTrustedCallingUid(getClientUid());
+ mUidIsTrusted = isTrustedCallingUid(mClientAttribution.uid);
}
CameraService::BasicClient::~BasicClient() {
@@ -4278,7 +4281,7 @@
ATRACE_CALL();
// Don't start watching until we're streaming when using permissionChecker for data delivery
- if (!flags::check_full_attribution_source_chain()) {
+ if (!flags::data_delivery_permission_checks()) {
ALOGD("%s: Start camera ops, package name = %s, client UID = %d", __FUNCTION__,
getPackageName().c_str(), getClientUid());
@@ -4300,7 +4303,7 @@
}
}
} else {
- // TODO: Remove when removing the check_full_attribution_source_chain flag
+ // TODO: Remove when removing the data_delivery_permission_checks flag
ALOGD("%s: Bypassing checkOp for uid %d", __FUNCTION__, getClientUid());
}
@@ -4334,7 +4337,7 @@
getPackageName().c_str(), getClientUid());
if (mAppOpsManager != nullptr) {
- if (flags::check_full_attribution_source_chain()) {
+ if (flags::data_delivery_permission_checks()) {
ALOGD("%s: Start data delivery for uid %d", __FUNCTION__, getClientUid());
const PermissionChecker::PermissionResult result =
@@ -4378,7 +4381,7 @@
// noteAppOp is only used for when camera mute is not supported, in order
// to trigger the sensor privacy "Unblock" dialog
- if (flags::check_full_attribution_source_chain()) {
+ if (flags::data_delivery_permission_checks()) {
// Ignore the result, since we're only triggering the dialog
ALOGD("%s: Check data delivery permissions for uid %d", __FUNCTION__, getClientUid());
hasPermissionsForCameraForDataDelivery(std::string(), mClientAttribution);
@@ -4410,7 +4413,7 @@
}
if (mAppOpsManager != nullptr) {
- if (flags::check_full_attribution_source_chain()) {
+ if (flags::data_delivery_permission_checks()) {
ALOGD("%s: finishDataDelivery for uid %d", __FUNCTION__, getClientUid());
finishDataDelivery(mClientAttribution);
@@ -4455,7 +4458,7 @@
}
// When using the data delivery permission checks, the open state does not involve AppOps
- if (!flags::check_full_attribution_source_chain()) {
+ if (!flags::data_delivery_permission_checks()) {
// Always stop watching, even if no camera op is active
if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
mAppOpsManager->stopWatchingMode(mOpsCallback);
@@ -4483,7 +4486,7 @@
}
PermissionChecker::PermissionResult res;
- if (flags::check_full_attribution_source_chain()) {
+ if (flags::data_delivery_permission_checks()) {
int32_t appOpMode = AppOpsManager::MODE_ALLOWED;
std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
[&](const auto& attr) {
@@ -4519,7 +4522,7 @@
// Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
// If not visible, but still active, then we want to block instead of muting the camera.
int32_t procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
- if (flags::check_full_attribution_source_chain()) {
+ if (flags::data_delivery_permission_checks()) {
// Use the proc state of the last uid in the chain (ultimately receiving the data)
// when determining whether to mute or block
int32_t uid = -1;
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
index 61b150d..550b3ab 100644
--- a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -30,4 +30,5 @@
std::vector<camera_metadata_tag> extension_metadata_keys{
ANDROID_EXTENSION_STRENGTH,
ANDROID_EXTENSION_CURRENT_TYPE,
+ ANDROID_EXTENSION_NIGHT_MODE_INDICATOR,
};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index b07d8d5..1835f2f 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -93,6 +93,17 @@
ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE,
+ ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ } },
};
/**
@@ -125,4 +136,11 @@
ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE,
+ ANDROID_COLOR_CORRECTION_COLOR_TINT,
+ ANDROID_CONTROL_AE_PRIORITY_MODE,
+ ANDROID_CONTROL_ZOOM_METHOD,
+ ANDROID_EXTENSION_NIGHT_MODE_INDICATOR,
+ } },
};
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 0f4ba65..768eaf8 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -27,54 +27,79 @@
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
#include <camera/StringUtils.h>
#include <com_android_graphics_libgui_flags.h>
+#include <com_android_internal_camera_flags.h>
#include <gui/Surface.h>
#include <libyuv.h>
#include <utils/Log.h>
#include <utils/Trace.h>
+#include <ultrahdr/jpegr.h>
+#include <ultrahdr/ultrahdrcommon.h>
-#include <mediadrm/ICrypto.h>
#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MetaData.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/MediaDefs.h>
-#include <media/stagefright/MediaCodecConstants.h>
+#include <mediadrm/ICrypto.h>
+#include <memory>
+#include "HeicCompositeStream.h"
+#include "HeicEncoderInfoManager.h"
#include "common/CameraDeviceBase.h"
+#include "system/camera_metadata.h"
#include "utils/ExifUtils.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/Utils.h"
-#include "HeicEncoderInfoManager.h"
-#include "HeicCompositeStream.h"
using aidl::android::hardware::camera::device::CameraBlob;
using aidl::android::hardware::camera::device::CameraBlobId;
+namespace flags = com::android::internal::camera::flags;
+
namespace android {
namespace camera3 {
HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
- wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
- CompositeStream(device, cb),
- mUseHeic(false),
- mNumOutputTiles(1),
- mOutputWidth(0),
- mOutputHeight(0),
- mMaxHeicBufferSize(0),
- mGridWidth(HeicEncoderInfoManager::kGridWidth),
- mGridHeight(HeicEncoderInfoManager::kGridHeight),
- mGridRows(1),
- mGridCols(1),
- mUseGrid(false),
- mAppSegmentStreamId(-1),
- mAppSegmentSurfaceId(-1),
- mMainImageStreamId(-1),
- mMainImageSurfaceId(-1),
- mYuvBufferAcquired(false),
- mStreamSurfaceListener(new StreamSurfaceListener()),
- mDequeuedOutputBufferCnt(0),
- mCodecOutputCounter(0),
- mQuality(-1),
- mGridTimestampUs(0),
- mStatusId(StatusTracker::NO_STATUS_ID) {
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb)
+ : CompositeStream(device, cb),
+ mUseHeic(false),
+ mNumOutputTiles(1),
+ mNumGainmapOutputTiles(1),
+ mOutputWidth(0),
+ mOutputHeight(0),
+ mGainmapOutputWidth(0),
+ mGainmapOutputHeight(0),
+ mMaxHeicBufferSize(0),
+ mGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGainmapGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGainmapGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGridRows(1),
+ mGridCols(1),
+ mGainmapGridRows(1),
+ mGainmapGridCols(1),
+ mUseGrid(false),
+ mGainmapUseGrid(false),
+ mAppSegmentStreamId(-1),
+ mAppSegmentSurfaceId(-1),
+ mMainImageStreamId(-1),
+ mMainImageSurfaceId(-1),
+ mYuvBufferAcquired(false),
+ mStreamSurfaceListener(new StreamSurfaceListener()),
+ mDequeuedOutputBufferCnt(0),
+ mCodecOutputCounter(0),
+ mCodecGainmapOutputCounter(0),
+ mQuality(-1),
+ mGridTimestampUs(0),
+ mStatusId(StatusTracker::NO_STATUS_ID) {
+ mStaticInfo = device->info();
+ camera_metadata_entry halHeicSupport = mStaticInfo.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count == 1 &&
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
+ // The camera device supports the HEIC stream combination,
+ // use the standard stream combintion.
+ mAppSegmentSupported = true;
+ }
}
HeicCompositeStream::~HeicCompositeStream() {
@@ -84,6 +109,7 @@
mInputAppSegmentBuffers.clear();
mCodecOutputBuffers.clear();
+ mGainmapCodecOutputBuffers.clear();
mAppSegmentStreamId = -1;
mAppSegmentSurfaceId = -1;
@@ -97,7 +123,8 @@
}
bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
- return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+ return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) ||
+ (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace))) &&
(streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
}
@@ -120,7 +147,8 @@
return false;
}
- return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
+ return ((format == HAL_PIXEL_FORMAT_BLOB) && ((dataspace == HAL_DATASPACE_HEIF) ||
+ (dataspace == static_cast<int>(kUltraHDRDataSpace))));
}
status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
@@ -130,13 +158,27 @@
std::vector<int> *surfaceIds,
int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
+
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
return NO_INIT;
}
- status_t res = initializeCodec(width, height, device);
+ ANativeWindow* anw = consumers[0].mSurface.get();
+ int dataspace;
+ status_t res;
+ if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+ if ((dataspace == static_cast<int>(kUltraHDRDataSpace)) && flags::camera_heif_gainmap()) {
+ mHDRGainmapEnabled = true;
+ mInternalDataSpace = static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG);
+ }
+
+ res = initializeCodec(width, height, device);
if (res != OK) {
ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -144,42 +186,48 @@
}
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
- mAppSegmentConsumer->setFrameAvailableListener(this);
- mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
- mAppSegmentSurface = mAppSegmentConsumer->getSurface();
- sp<IGraphicBufferProducer> producer = mAppSegmentSurface->getIGraphicBufferProducer();
+ if (mAppSegmentSupported) {
+ mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = mAppSegmentConsumer->getSurface();
+ }
+ sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr ?
+ mAppSegmentSurface->getIGraphicBufferProducer() : nullptr;
#else
sp<IGraphicBufferProducer> producer;
sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
- mAppSegmentConsumer->setFrameAvailableListener(this);
- mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
- mAppSegmentSurface = new Surface(producer);
+ if (mAppSegmentSupported) {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = new Surface(producer);
+ }
#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mStaticInfo = device->info();
-
- res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
- kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
- /*isShared*/false, /*isMultiResolution*/false,
- /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
- OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- OutputConfiguration::MIRROR_MODE_AUTO,
- colorSpace,
- useReadoutTimestamp);
- if (res == OK) {
- mAppSegmentSurfaceId = (*surfaceIds)[0];
- } else {
- ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
+ if (mAppSegmentSupported) {
+ std::vector<int> sourceSurfaceId;
+ res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
+ kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
+ sensorPixelModesUsed, &sourceSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
+ if (res == OK) {
+ mAppSegmentSurfaceId = sourceSurfaceId[0];
+ } else {
+ ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
}
- if (!mUseGrid) {
+ if (!mUseGrid && !mHDRGainmapEnabled) {
res = mCodec->createInputSurface(&producer);
if (res != OK) {
ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
@@ -206,21 +254,32 @@
return res;
}
- std::vector<int> sourceSurfaceId;
- //Use YUV_888 format if framework tiling is needed.
- int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ if (mHDRGainmapEnabled) {
+ res = mGainmapCodec->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start gainmap codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ //Use YUV_420 format if framework tiling is needed.
+ int srcStreamFmt = mHDRGainmapEnabled ?
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : mUseGrid ?
+ HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, mInternalDataSpace,
+ rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
- /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ /*consumerUsage*/0, mHDRGainmapEnabled ?
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 :
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
colorSpace,
useReadoutTimestamp);
if (res == OK) {
- mMainImageSurfaceId = sourceSurfaceId[0];
+ mMainImageSurfaceId = (*surfaceIds)[0];
mMainImageStreamId = *id;
} else {
ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
@@ -236,11 +295,13 @@
return res;
}
- res = registerCompositeStreamListener(mAppSegmentStreamId);
- if (res != OK) {
- ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
+ if (mAppSegmentSupported) {
+ res = registerCompositeStreamListener(mAppSegmentStreamId);
+ if (res != OK) {
+ ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
}
initCopyRowFunction(width);
@@ -299,6 +360,9 @@
mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
+ if (mHDRGainmapEnabled) {
+ mCodecGainmapOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
+ }
} else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
@@ -346,13 +410,13 @@
mInputAppSegmentBuffers.push_back(item.mTimestamp);
mInputReadyCondition.signal();
}
- } else if (item.mDataSpace == kHeifDataSpace) {
- ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
+ } else if (item.mDataSpace == mInternalDataSpace) {
+ ALOGV("%s: YUV_420 buffer with ts: %" PRIu64 " ms. arrived!",
__func__, ns2ms(item.mTimestamp));
Mutex::Autolock l(mMutex);
- if (!mUseGrid) {
- ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
+ if (!mUseGrid && !mHDRGainmapEnabled) {
+ ALOGE("%s: YUV_420 internal stream is only supported for HEVC tiling",
__FUNCTION__);
return;
}
@@ -367,6 +431,7 @@
status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ bool gainmapEnabled = false;
if (compositeOutput == nullptr) {
return BAD_VALUE;
}
@@ -381,30 +446,44 @@
return OK;
}
- compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
+ if (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace)) {
+ gainmapEnabled = true;
+ }
- // JPEG APPS segments Blob stream info
- (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
- (*compositeOutput)[0].height = 1;
- (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
- (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
- (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ compositeOutput->clear();
+ compositeOutput->push_back({});
// YUV/IMPLEMENTATION_DEFINED stream info
- (*compositeOutput)[1].width = streamInfo.width;
- (*compositeOutput)[1].height = streamInfo.height;
- (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- (*compositeOutput)[1].dataSpace = kHeifDataSpace;
- (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
+ (*compositeOutput)[0].width = streamInfo.width;
+ (*compositeOutput)[0].height = streamInfo.height;
+ (*compositeOutput)[0].format = gainmapEnabled ?
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : useGrid ?
+ HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ (*compositeOutput)[0].dataSpace = gainmapEnabled ?
+ static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG) : kHeifDataSpace;
+ (*compositeOutput)[0].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
+
+ camera_metadata_ro_entry halHeicSupport = ch.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count == 1 &&
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
+
+ compositeOutput->push_back({});
+ // JPEG APPS segments Blob stream info
+ (*compositeOutput)[1].width = calcAppSegmentMaxSize(ch);
+ (*compositeOutput)[1].height = 1;
+ (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[1].dataSpace = kAppSegmentDataSpace;
+ (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ }
+
return NO_ERROR;
}
bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
- static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName, bool allowSWCodec) {
+ static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance(allowSWCodec);
return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
}
@@ -421,7 +500,7 @@
}
void HeicCompositeStream::onHeicOutputFrameAvailable(
- const CodecOutputBufferInfo& outputBufferInfo) {
+ const CodecOutputBufferInfo& outputBufferInfo, bool isGainmap) {
Mutex::Autolock l(mMutex);
ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
@@ -431,31 +510,34 @@
if (!mErrorState) {
if ((outputBufferInfo.size > 0) &&
((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
- mCodecOutputBuffers.push_back(outputBufferInfo);
+ isGainmap ? mGainmapCodecOutputBuffers.push_back(outputBufferInfo) :
+ mCodecOutputBuffers.push_back(outputBufferInfo);
mInputReadyCondition.signal();
} else {
ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
outputBufferInfo.size, outputBufferInfo.flags);
- mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
}
} else {
- mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
}
}
-void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
+void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index, bool isGainmap) {
Mutex::Autolock l(mMutex);
- if (!mUseGrid) {
+ if (!mUseGrid && !mHDRGainmapEnabled) {
ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
return;
}
- mCodecInputBuffers.push_back(index);
+ isGainmap ? mGainmapCodecInputBuffers.push_back(index) : mCodecInputBuffers.push_back(index);
mInputReadyCondition.signal();
}
-void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
+void HeicCompositeStream::onHeicGainmapFormatChanged(sp<AMessage>& newFormat) {
if (newFormat == nullptr) {
ALOGE("%s: newFormat must not be null!", __FUNCTION__);
return;
@@ -470,6 +552,66 @@
// For HEVC codec, below keys need to be filled out or overwritten so that the
// muxer can handle them as HEIC output image.
newFormat->setString(KEY_MIME, mimeHeic);
+ newFormat->setInt32(KEY_WIDTH, mGainmapOutputWidth);
+ newFormat->setInt32(KEY_HEIGHT, mGainmapOutputHeight);
+ }
+
+ if (mGainmapUseGrid) {
+ int32_t gridRows, gridCols, tileWidth, tileHeight;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
+ newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
+ newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
+ mGainmapGridWidth = tileWidth;
+ mGainmapGridHeight = tileHeight;
+ mGainmapGridRows = gridRows;
+ mGainmapGridCols = gridCols;
+ } else {
+ newFormat->setInt32(KEY_TILE_WIDTH, mGainmapGridWidth);
+ newFormat->setInt32(KEY_TILE_HEIGHT, mGainmapGridHeight);
+ newFormat->setInt32(KEY_GRID_ROWS, mGainmapGridRows);
+ newFormat->setInt32(KEY_GRID_COLUMNS, mGainmapGridCols);
+ }
+ int32_t left, top, right, bottom;
+ if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ newFormat->setRect("crop", 0, 0, mGainmapOutputWidth - 1, mGainmapOutputHeight - 1);
+ }
+ }
+ newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
+
+ int32_t gridRows, gridCols;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
+ mNumGainmapOutputTiles = gridRows * gridCols;
+ } else {
+ mNumGainmapOutputTiles = 1;
+ }
+
+ mGainmapFormat = newFormat;
+
+ ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
+ mInputReadyCondition.signal();
+}
+
+
+void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap) {
+ if (newFormat == nullptr) {
+ ALOGE("%s: newFormat must not be null!", __FUNCTION__);
+ return;
+ }
+
+ if (isGainmap) {
+ return onHeicGainmapFormatChanged(newFormat);
+ }
+ Mutex::Autolock l(mMutex);
+
+ AString mime;
+ AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
+ newFormat->findString(KEY_MIME, &mime);
+ if (mime != mimeHeic) {
+ // For HEVC codec, below keys need to be filled out or overwritten so that the
+ // muxer can handle them as HEIC output image.
+ newFormat->setString(KEY_MIME, mimeHeic);
newFormat->setInt32(KEY_WIDTH, mOutputWidth);
newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
}
@@ -577,10 +719,12 @@
status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
- if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
- outputStreamIds->push_back(mAppSegmentStreamId);
+ if (mAppSegmentSupported) {
+ if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mAppSegmentStreamId);
+ }
+ (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
}
- (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
outputStreamIds->push_back(mMainImageStreamId);
@@ -600,7 +744,9 @@
return BAD_VALUE;
}
- compositeStreamIds->push_back(mAppSegmentStreamId);
+ if (mAppSegmentSupported) {
+ compositeStreamIds->push_back(mAppSegmentStreamId);
+ }
compositeStreamIds->push_back(mMainImageStreamId);
return OK;
@@ -762,6 +908,31 @@
mCodecOutputBuffers.erase(it);
}
+ while (!mGainmapCodecOutputBuffers.empty()) {
+ auto it = mGainmapCodecOutputBuffers.begin();
+ // Assume encoder input to output is FIFO, use a queue to look up
+ // frameNumber when handling codec outputs.
+ int64_t bufferFrameNumber = -1;
+ if (mCodecGainmapOutputBufferFrameNumbers.empty()) {
+ ALOGV("%s: Failed to find buffer frameNumber for gainmap codec output buffer!",
+ __FUNCTION__);
+ break;
+ } else {
+ // Direct mapping between camera frame number and codec timestamp (in us).
+ bufferFrameNumber = mCodecGainmapOutputBufferFrameNumbers.front();
+ mCodecGainmapOutputCounter++;
+ if (mCodecGainmapOutputCounter == mNumGainmapOutputTiles) {
+ mCodecGainmapOutputBufferFrameNumbers.pop();
+ mCodecGainmapOutputCounter = 0;
+ }
+
+ mPendingInputFrames[bufferFrameNumber].gainmapCodecOutputBuffers.push_back(*it);
+ ALOGV("%s: [%" PRId64 "]: Pushing gainmap codecOutputBuffers (frameNumber %" PRId64 ")",
+ __FUNCTION__, bufferFrameNumber, it->timeUs);
+ }
+ mGainmapCodecOutputBuffers.erase(it);
+ }
+
while (!mCaptureResults.empty()) {
auto it = mCaptureResults.begin();
// Negative frame number indicates that something went wrong during the capture result
@@ -772,6 +943,9 @@
if (mPendingInputFrames[frameNumber].timestamp == it->first) {
mPendingInputFrames[frameNumber].result =
std::make_unique<CameraMetadata>(std::get<1>(it->second));
+ if (!mAppSegmentSupported) {
+ mPendingInputFrames[frameNumber].exifError = true;
+ }
} else {
ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
"shutter and capture result! before: %" PRId64 ", after: %" PRId64,
@@ -825,6 +999,27 @@
break;
}
}
+
+ // Distribute codec input buffers to be filled out from YUV output
+ for (auto it = mPendingInputFrames.begin();
+ it != mPendingInputFrames.end() && mGainmapCodecInputBuffers.size() > 0; it++) {
+ InputFrame& inputFrame(it->second);
+ if (inputFrame.gainmapCodecInputCounter < mGainmapGridRows * mGainmapGridCols) {
+ // Available input tiles that are required for the current input
+ // image.
+ size_t newInputTiles = std::min(mGainmapCodecInputBuffers.size(),
+ mGainmapGridRows * mGainmapGridCols - inputFrame.gainmapCodecInputCounter);
+ for (size_t i = 0; i < newInputTiles; i++) {
+ CodecInputBufferInfo inputInfo = { mGainmapCodecInputBuffers[0],
+ mGridTimestampUs++, inputFrame.gainmapCodecInputCounter };
+ inputFrame.gainmapCodecInputBuffers.push_back(inputInfo);
+
+ mGainmapCodecInputBuffers.erase(mGainmapCodecInputBuffers.begin());
+ inputFrame.gainmapCodecInputCounter++;
+ }
+ break;
+ }
+ }
}
bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
@@ -845,7 +1040,8 @@
(it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
!it.second.appSegmentWritten && it.second.result != nullptr &&
it.second.muxer != nullptr;
- bool codecOutputReady = !it.second.codecOutputBuffers.empty();
+ bool codecOutputReady = !it.second.codecOutputBuffers.empty() ||
+ !it.second.gainmapCodecOutputBuffers.empty();
bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
(!it.second.codecInputBuffers.empty());
bool hasOutputBuffer = it.second.muxer != nullptr ||
@@ -856,6 +1052,9 @@
if (it.second.format == nullptr && mFormat != nullptr) {
it.second.format = mFormat->dup();
}
+ if (it.second.gainmapFormat == nullptr && mGainmapFormat != nullptr){
+ it.second.gainmapFormat = mGainmapFormat->dup();
+ }
newInputAvailable = true;
break;
}
@@ -886,11 +1085,15 @@
(inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
!inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
inputFrame.muxer != nullptr;
- bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
+ bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0 ||
+ inputFrame.gainmapCodecOutputBuffers.size() > 0;
bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
!inputFrame.codecInputBuffers.empty();
+ bool gainmapCodecInputReady = inputFrame.gainmapImage.get() != nullptr &&
+ !inputFrame.gainmapCodecInputBuffers.empty();
bool hasOutputBuffer = inputFrame.muxer != nullptr ||
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
+ bool hasGainmapMetadata = !inputFrame.isoGainmapMetadata.empty();
ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
" dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
@@ -899,6 +1102,15 @@
// Handle inputs for Hevc tiling
if (codecInputReady) {
+ if (mHDRGainmapEnabled && (inputFrame.baseBuffer.get() == nullptr)) {
+ auto res = generateBaseImageAndGainmap(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Error generating SDR base image and HDR gainmap: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
res = processCodecInputFrame(inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
@@ -907,6 +1119,15 @@
}
}
+ if (gainmapCodecInputReady) {
+ res = processCodecGainmapInputFrame(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process gainmap codec input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
return OK;
}
@@ -923,6 +1144,31 @@
}
}
+ // Write the HDR gainmap metadata
+ if (hasGainmapMetadata) {
+ uint8_t kGainmapMetaMarker[] = {'t', 'm', 'a', 'p', '\0', '\0'};
+ sp<ABuffer> aBuffer =
+ new ABuffer(inputFrame.isoGainmapMetadata.size() + sizeof(kGainmapMetaMarker));
+ memcpy(aBuffer->data(), kGainmapMetaMarker, sizeof(kGainmapMetaMarker));
+ memcpy(aBuffer->data() + sizeof(kGainmapMetaMarker), inputFrame.isoGainmapMetadata.data(),
+ inputFrame.isoGainmapMetadata.size());
+
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecColorRange);
+ auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
+ inputFrame.timestamp,
+ MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ if (res != OK) {
+ ALOGE("%s: Failed to write HDR gainmap metadata to muxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ inputFrame.isoGainmapMetadata.clear();
+ }
+
// Write JPEG APP segments data to the muxer.
if (appSegmentReady) {
res = processAppSegment(frameNumber, inputFrame);
@@ -943,7 +1189,17 @@
}
}
- if (inputFrame.pendingOutputTiles == 0) {
+ // Write media codec gainmap bitstream buffers to muxer.
+ while (!inputFrame.gainmapCodecOutputBuffers.empty()) {
+ res = processOneCodecGainmapOutputFrame(frameNumber, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec gainmap output frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ if ((inputFrame.pendingOutputTiles == 0) && (inputFrame.gainmapPendingOutputTiles == 0)) {
if (inputFrame.appSegmentWritten) {
res = processCompletedInputFrame(frameNumber, inputFrame);
if (res != OK) {
@@ -1001,6 +1257,16 @@
inputFrame.trackIndex = trackId;
inputFrame.pendingOutputTiles = mNumOutputTiles;
+ if (inputFrame.gainmapFormat.get() != nullptr) {
+ trackId = inputFrame.muxer->addTrack(inputFrame.gainmapFormat);
+ if (trackId < 0) {
+ ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
+ return NO_INIT;
+ }
+ inputFrame.gainmapTrackIndex = trackId;
+ inputFrame.gainmapPendingOutputTiles = mNumGainmapOutputTiles;
+ }
+
res = inputFrame.muxer->start();
if (res != OK) {
ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
@@ -1085,9 +1351,101 @@
inputFrame.appSegmentWritten = true;
// Release the buffer now so any pending input app segments can be processed
- mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
- inputFrame.appSegmentBuffer.data = nullptr;
- inputFrame.exifError = false;
+ if (!inputFrame.exifError) {
+ mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
+ inputFrame.appSegmentBuffer.data = nullptr;
+ inputFrame.exifError = false;
+ }
+
+ return OK;
+}
+
+status_t HeicCompositeStream::generateBaseImageAndGainmap(InputFrame &inputFrame) {
+ ultrahdr::JpegR jpegR(nullptr /*gles ctx*/, kGainmapScale);
+ inputFrame.baseBuffer = std::make_unique<ultrahdr::uhdr_raw_image_ext_t>(
+ kUltraHdrOutputFmt, kUltraHdrOutputGamut, kUltraHdrInputTransfer, kUltraHdrOutputRange,
+ inputFrame.yuvBuffer.width, inputFrame.yuvBuffer.height, 8/*stride*/);
+
+ uhdr_raw_image_t hdr_intent;
+ hdr_intent.fmt = kUltraHdrInputFmt;
+ hdr_intent.cg = kUltraHdrInputGamut;
+ hdr_intent.ct = kUltraHdrInputTransfer;
+ hdr_intent.range = kUltraHdrInputRange;
+ hdr_intent.w = inputFrame.yuvBuffer.width;
+ hdr_intent.h = inputFrame.yuvBuffer.height;
+ hdr_intent.planes[UHDR_PLANE_Y] = inputFrame.yuvBuffer.data;
+ hdr_intent.planes[UHDR_PLANE_UV] = inputFrame.yuvBuffer.dataCb;
+ hdr_intent.planes[UHDR_PLANE_V] = nullptr;
+ //libUltraHDR expects the stride in pixels
+ hdr_intent.stride[UHDR_PLANE_Y] = inputFrame.yuvBuffer.stride / 2;
+ hdr_intent.stride[UHDR_PLANE_UV] = inputFrame.yuvBuffer.chromaStride / 2;
+ hdr_intent.stride[UHDR_PLANE_V] = 0;
+ auto res = jpegR.toneMap(&hdr_intent, inputFrame.baseBuffer.get());
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: Base image tonemapped successfully", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed during HDR to SDR tonemap: %d", __FUNCTION__, res.error_code);
+ return BAD_VALUE;
+ }
+
+ inputFrame.baseImage = std::make_unique<CpuConsumer::LockedBuffer>();
+ *inputFrame.baseImage = inputFrame.yuvBuffer;
+ inputFrame.baseImage->data = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_Y]);
+ inputFrame.baseImage->dataCb = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_U]);
+ inputFrame.baseImage->dataCr = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_V]);
+ inputFrame.baseImage->chromaStep = 1;
+ inputFrame.baseImage->stride = inputFrame.baseBuffer->stride[UHDR_PLANE_Y];
+ inputFrame.baseImage->chromaStride = inputFrame.baseBuffer->stride[UHDR_PLANE_UV];
+ inputFrame.baseImage->dataSpace = HAL_DATASPACE_V0_JFIF;
+
+ ultrahdr::uhdr_gainmap_metadata_ext_t metadata;
+ res = jpegR.generateGainMap(inputFrame.baseBuffer.get(), &hdr_intent, &metadata,
+ inputFrame.gainmap, false /*sdr_is_601*/, true /*use_luminance*/);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap generated successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed HDR gainmap: %d", __FUNCTION__, res.error_code);
+ return BAD_VALUE;
+ }
+ // Ensure the gaimap U/V planes are all 0
+ inputFrame.gainmapChroma = std::make_unique<uint8_t[]>(
+ inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
+ memset(inputFrame.gainmapChroma.get(), 0, inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
+
+ ultrahdr::uhdr_gainmap_metadata_frac iso_secondary_metadata;
+ res = ultrahdr::uhdr_gainmap_metadata_frac::gainmapMetadataFloatToFraction(
+ &metadata, &iso_secondary_metadata);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap converted to fractions successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed to convert HDR gainmap to fractions: %d", __FUNCTION__,
+ res.error_code);
+ return BAD_VALUE;
+ }
+
+ res = ultrahdr::uhdr_gainmap_metadata_frac::encodeGainmapMetadata(&iso_secondary_metadata,
+ inputFrame.isoGainmapMetadata);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap encoded to ISO format successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed to encode HDR gainmap to ISO format: %d", __FUNCTION__,
+ res.error_code);
+ return BAD_VALUE;
+ }
+
+ inputFrame.gainmapImage = std::make_unique<CpuConsumer::LockedBuffer>();
+ *inputFrame.gainmapImage = inputFrame.yuvBuffer;
+ inputFrame.gainmapImage->data = reinterpret_cast<uint8_t*>(
+ inputFrame.gainmap->planes[UHDR_PLANE_Y]);
+ inputFrame.gainmapImage->dataCb = inputFrame.gainmapChroma.get();
+ inputFrame.gainmapImage->dataCr = inputFrame.gainmapChroma.get() + 1;
+ inputFrame.gainmapImage->chromaStep = 2;
+ inputFrame.gainmapImage->stride = inputFrame.gainmap->stride[UHDR_PLANE_Y];
+ inputFrame.gainmapImage->chromaStride = inputFrame.gainmap->w;
+ inputFrame.gainmapImage->dataSpace = HAL_DATASPACE_V0_JFIF;
return OK;
}
@@ -1115,7 +1473,9 @@
" timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
inputBuffer.timeUs);
- res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
+ auto yuvInput = (inputFrame.baseImage.get() != nullptr) ?
+ *inputFrame.baseImage.get() : inputFrame.yuvBuffer;
+ res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
if (res != OK) {
ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
strerror(-res), res);
@@ -1135,6 +1495,50 @@
return OK;
}
+status_t HeicCompositeStream::processCodecGainmapInputFrame(InputFrame &inputFrame) {
+ for (auto& inputBuffer : inputFrame.gainmapCodecInputBuffers) {
+ sp<MediaCodecBuffer> buffer;
+ auto res = mGainmapCodec->getInputBuffer(inputBuffer.index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Copy one tile from source to destination.
+ size_t tileX = inputBuffer.tileIndex % mGainmapGridCols;
+ size_t tileY = inputBuffer.tileIndex / mGainmapGridCols;
+ size_t top = mGainmapGridHeight * tileY;
+ size_t left = mGainmapGridWidth * tileX;
+ size_t width = (tileX == static_cast<size_t>(mGainmapGridCols) - 1) ?
+ mGainmapOutputWidth - tileX * mGainmapGridWidth : mGainmapGridWidth;
+ size_t height = (tileY == static_cast<size_t>(mGainmapGridRows) - 1) ?
+ mGainmapOutputHeight - tileY * mGainmapGridHeight : mGainmapGridHeight;
+ ALOGV("%s: gainmap inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, "
+ "height %zu, timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
+ inputBuffer.timeUs);
+
+ auto yuvInput = *inputFrame.gainmapImage;
+ res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
+ if (res != OK) {
+ ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mGainmapCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
+ inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ inputFrame.gainmapCodecInputBuffers.clear();
+ return OK;
+}
+
status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
auto it = inputFrame.codecOutputBuffers.begin();
@@ -1152,6 +1556,13 @@
}
sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
+ if (mHDRGainmapEnabled) {
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecColorRange);
+ }
res = inputFrame.muxer->writeSampleData(
aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
if (res != OK) {
@@ -1174,6 +1585,54 @@
return OK;
}
+status_t HeicCompositeStream::processOneCodecGainmapOutputFrame(int64_t frameNumber,
+ InputFrame &inputFrame) {
+ auto it = inputFrame.gainmapCodecOutputBuffers.begin();
+ sp<MediaCodecBuffer> buffer;
+ status_t res = mGainmapCodec->getOutputBuffer(it->index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting Heic gainmap codec output buffer at index %d: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+ if (buffer == nullptr) {
+ ALOGE("%s: Invalid Heic gainmap codec output buffer at index %d",
+ __FUNCTION__, it->index);
+ return BAD_VALUE;
+ }
+
+ uint8_t kGainmapMarker[] = {'g', 'm', 'a', 'p', '\0', '\0'};
+ sp<ABuffer> aBuffer = new ABuffer(buffer->size() + sizeof(kGainmapMarker));
+ memcpy(aBuffer->data(), kGainmapMarker, sizeof(kGainmapMarker));
+ memcpy(aBuffer->data() + sizeof(kGainmapMarker), buffer->data(), buffer->size());
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecGainmapColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecGainmapColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecGainmapColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecGainmapColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecGainmapColorRange);
+ res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.gainmapTrackIndex,
+ inputFrame.timestamp,
+ MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ if (res != OK) {
+ ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+
+ mGainmapCodec->releaseOutputBuffer(it->index);
+ if (inputFrame.gainmapPendingOutputTiles == 0) {
+ ALOGW("%s: Codec generated more gainmap tiles than expected!", __FUNCTION__);
+ } else {
+ inputFrame.gainmapPendingOutputTiles--;
+ }
+
+ inputFrame.gainmapCodecOutputBuffers.erase(inputFrame.gainmapCodecOutputBuffers.begin());
+
+ ALOGV("%s: [%" PRId64 "]: Gainmap output buffer index %d",
+ __FUNCTION__, frameNumber, it->index);
+ return OK;
+}
+
status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface;
@@ -1256,6 +1715,13 @@
inputFrame->codecOutputBuffers.erase(it);
}
+ while (!inputFrame->gainmapCodecOutputBuffers.empty()) {
+ auto it = inputFrame->gainmapCodecOutputBuffers.begin();
+ ALOGV("%s: release gainmap output buffer index %d", __FUNCTION__, it->index);
+ mGainmapCodec->releaseOutputBuffer(it->index);
+ inputFrame->gainmapCodecOutputBuffers.erase(it);
+ }
+
if (inputFrame->yuvBuffer.data != nullptr) {
mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
inputFrame->yuvBuffer.data = nullptr;
@@ -1267,6 +1733,11 @@
inputFrame->codecInputBuffers.erase(it);
}
+ while (!inputFrame->gainmapCodecInputBuffers.empty()) {
+ auto it = inputFrame->gainmapCodecInputBuffers.begin();
+ inputFrame->gainmapCodecInputBuffers.erase(it);
+ }
+
if (inputFrame->error || mErrorState) {
ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
notifyError(frameNumber, inputFrame->requestId);
@@ -1292,7 +1763,8 @@
while (it != mPendingInputFrames.end()) {
auto& inputFrame = it->second;
if (inputFrame.error ||
- (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
+ (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0 &&
+ inputFrame.gainmapPendingOutputTiles == 0)) {
releaseInputFrameLocked(it->first, &inputFrame);
it = mPendingInputFrames.erase(it);
inputFrameDone = true;
@@ -1318,6 +1790,110 @@
}
}
+status_t HeicCompositeStream::initializeGainmapCodec() {
+ ALOGV("%s", __FUNCTION__);
+
+ if (!mHDRGainmapEnabled) {
+ return OK;
+ }
+ uint32_t width = mOutputWidth / kGainmapScale;
+ uint32_t height = mOutputHeight / kGainmapScale;
+ bool useGrid = false;
+ bool useHeic = false;
+ AString hevcName;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
+ &useHeic, &useGrid, nullptr, &hevcName);
+ if (!isSizeSupported) {
+ ALOGE("%s: Encoder doesn't support size %u x %u!",
+ __FUNCTION__, width, height);
+ return BAD_VALUE;
+ }
+
+ // Create HEVC codec.
+ mGainmapCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
+ if (mGainmapCodec == nullptr) {
+ ALOGE("%s: Failed to create gainmap codec", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ // Create Looper and handler for Codec callback.
+ mGainmapCodecCallbackHandler = new CodecCallbackHandler(this, true /*isGainmap*/);
+ if (mGainmapCodecCallbackHandler == nullptr) {
+ ALOGE("%s: Failed to create gainmap codec callback handler", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ mGainmapCallbackLooper = new ALooper;
+ mGainmapCallbackLooper->setName("Camera3-HeicComposite-MediaCodecGainmapCallbackLooper");
+ auto res = mGainmapCallbackLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start gainmap media callback looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+ mGainmapCallbackLooper->registerHandler(mGainmapCodecCallbackHandler);
+
+ mGainmapAsyncNotify = new AMessage(kWhatCallbackNotify, mGainmapCodecCallbackHandler);
+ res = mGainmapCodec->setCallback(mGainmapAsyncNotify);
+ if (res != OK) {
+ ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Create output format and configure the Codec.
+ sp<AMessage> outputFormat = new AMessage();
+ outputFormat->setString(KEY_MIME, MIMETYPE_VIDEO_HEVC);
+ outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
+ outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
+ // Ask codec to skip timestamp check and encode all frames.
+ outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
+
+ int32_t gridWidth, gridHeight, gridRows, gridCols;
+ if (useGrid){
+ gridWidth = HeicEncoderInfoManager::kGridWidth;
+ gridHeight = HeicEncoderInfoManager::kGridHeight;
+ gridRows = (height + gridHeight - 1)/gridHeight;
+ gridCols = (width + gridWidth - 1)/gridWidth;
+ } else {
+ gridWidth = width;
+ gridHeight = height;
+ gridRows = 1;
+ gridCols = 1;
+ }
+
+ outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
+ outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
+ outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
+ outputFormat->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
+ outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
+ // This only serves as a hint to encoder when encoding is not real-time.
+ outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
+
+ res = mGainmapCodec->configure(outputFormat, nullptr /*nativeWindow*/,
+ nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
+ if (res != OK) {
+ ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mGainmapGridWidth = gridWidth;
+ mGainmapGridHeight = gridHeight;
+ mGainmapGridRows = gridRows;
+ mGainmapGridCols = gridCols;
+ mGainmapUseGrid = useGrid;
+ mGainmapOutputWidth = width;
+ mGainmapOutputHeight = height;
+ mMaxHeicBufferSize +=
+ ALIGN(mGainmapOutputWidth, HeicEncoderInfoManager::kGridWidth) *
+ ALIGN(mGainmapOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2;
+
+ return OK;
+}
+
status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice) {
ALOGV("%s", __FUNCTION__);
@@ -1331,6 +1907,12 @@
__FUNCTION__, width, height);
return BAD_VALUE;
}
+ if (mHDRGainmapEnabled) {
+ // HDR Gainmap tonemapping and generation can only be done in SW
+ // using P010 as input. HEIC codecs expect private/impl.defined
+ // which is opaque.
+ mUseHeic = false;
+ }
// Create Looper for MediaCodec.
auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
@@ -1417,7 +1999,7 @@
outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
outputFormat->setInt32(KEY_COLOR_FORMAT,
- useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
+ useGrid || mHDRGainmapEnabled ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
// This only serves as a hint to encoder when encoding is not real-time.
outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
@@ -1442,7 +2024,24 @@
ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
- return OK;
+ return initializeGainmapCodec();
+}
+
+void HeicCompositeStream::deinitGainmapCodec() {
+ ALOGV("%s", __FUNCTION__);
+ if (mGainmapCodec != nullptr) {
+ mGainmapCodec->stop();
+ mGainmapCodec->release();
+ mGainmapCodec.clear();
+ }
+
+ if (mGainmapCallbackLooper != nullptr) {
+ mGainmapCallbackLooper->stop();
+ mGainmapCallbackLooper.clear();
+ }
+
+ mGainmapAsyncNotify.clear();
+ mGainmapFormat.clear();
}
void HeicCompositeStream::deinitCodec() {
@@ -1453,6 +2052,8 @@
mCodec.clear();
}
+ deinitGainmapCodec();
+
if (mCodecLooper != nullptr) {
mCodecLooper->stop();
mCodecLooper.clear();
@@ -1873,7 +2474,7 @@
ALOGE("CB_INPUT_AVAILABLE: index is expected.");
break;
}
- parent->onHeicInputFrameAvailable(index);
+ parent->onHeicInputFrameAvailable(index, mIsGainmap);
break;
}
@@ -1912,7 +2513,7 @@
timeUs,
(uint32_t)flags};
- parent->onHeicOutputFrameAvailable(bufferInfo);
+ parent->onHeicOutputFrameAvailable(bufferInfo, mIsGainmap);
break;
}
@@ -1928,7 +2529,7 @@
if (format != nullptr) {
formatCopy = format->dup();
}
- parent->onHeicFormatChanged(formatCopy);
+ parent->onHeicFormatChanged(formatCopy, mIsGainmap);
break;
}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index fad968a..beb08b0 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -17,6 +17,9 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+#include <algorithm>
+#include <android/data_space.h>
+#include <memory>
#include <queue>
#include <gui/CpuConsumer.h>
@@ -27,6 +30,8 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaMuxer.h>
+#include <ultrahdr/ultrahdrcommon.h>
+#include <ultrahdr/gainmapmetadata.h>
#include "CompositeStream.h"
@@ -79,8 +84,13 @@
void getStreamStats(hardware::CameraStreamStats*) override {};
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr,
+ bool allowSWCodec = false);
static bool isInMemoryTempFileSupported();
+
+ // HDR Gainmap subsampling
+ static const size_t kGainmapScale = 4;
+
protected:
bool threadLoop() override;
@@ -108,12 +118,12 @@
class CodecCallbackHandler : public AHandler {
public:
- explicit CodecCallbackHandler(wp<HeicCompositeStream> parent) {
- mParent = parent;
- }
+ explicit CodecCallbackHandler(wp<HeicCompositeStream> parent, bool isGainmap = false) :
+ mParent(parent), mIsGainmap(isGainmap) {}
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
wp<HeicCompositeStream> mParent;
+ bool mIsGainmap;
};
enum {
@@ -122,30 +132,34 @@
bool mUseHeic;
sp<MediaCodec> mCodec;
- sp<ALooper> mCodecLooper, mCallbackLooper;
- sp<CodecCallbackHandler> mCodecCallbackHandler;
- sp<AMessage> mAsyncNotify;
- sp<AMessage> mFormat;
- size_t mNumOutputTiles;
+ sp<MediaCodec> mGainmapCodec;
+ sp<ALooper> mCodecLooper, mCallbackLooper, mGainmapCallbackLooper;
+ sp<CodecCallbackHandler> mCodecCallbackHandler, mGainmapCodecCallbackHandler;
+ sp<AMessage> mAsyncNotify, mGainmapAsyncNotify;
+ sp<AMessage> mFormat, mGainmapFormat;
+ size_t mNumOutputTiles, mNumGainmapOutputTiles;
- int32_t mOutputWidth, mOutputHeight;
+ int32_t mOutputWidth, mOutputHeight, mGainmapOutputWidth, mGainmapOutputHeight;
size_t mMaxHeicBufferSize;
- int32_t mGridWidth, mGridHeight;
- size_t mGridRows, mGridCols;
- bool mUseGrid; // Whether to use framework YUV frame tiling.
+ int32_t mGridWidth, mGridHeight, mGainmapGridWidth, mGainmapGridHeight;
+ size_t mGridRows, mGridCols, mGainmapGridRows, mGainmapGridCols;
+ bool mUseGrid, mGainmapUseGrid; // Whether to use framework YUV frame tiling.
static const int64_t kNoFrameDropMaxPtsGap = -1000000;
static const int32_t kNoGridOpRate = 30;
static const int32_t kGridOpRate = 120;
- void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo);
- void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode.
- void onHeicFormatChanged(sp<AMessage>& newFormat);
+ void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo, bool isGainmap);
+ void onHeicInputFrameAvailable(int32_t index, bool isGainmap);// Only called for YUV input mode.
+ void onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap);
+ void onHeicGainmapFormatChanged(sp<AMessage>& newFormat);
void onHeicCodecError();
status_t initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice);
void deinitCodec();
+ status_t initializeGainmapCodec();
+ void deinitGainmapCodec();
//
// Composite stream related structures, utility functions and callbacks.
@@ -155,33 +169,51 @@
int32_t quality;
CpuConsumer::LockedBuffer appSegmentBuffer;
- std::vector<CodecOutputBufferInfo> codecOutputBuffers;
+ std::vector<CodecOutputBufferInfo> codecOutputBuffers, gainmapCodecOutputBuffers;
std::unique_ptr<CameraMetadata> result;
// Fields that are only applicable to HEVC tiling.
CpuConsumer::LockedBuffer yuvBuffer;
- std::vector<CodecInputBufferInfo> codecInputBuffers;
+ std::vector<CodecInputBufferInfo> codecInputBuffers, gainmapCodecInputBuffers;
bool error; // Main input image buffer error
bool exifError; // Exif/APP_SEGMENT buffer error
int64_t timestamp;
int32_t requestId;
- sp<AMessage> format;
+ sp<AMessage> format, gainmapFormat;
sp<MediaMuxer> muxer;
int fenceFd;
int fileFd;
- ssize_t trackIndex;
+ ssize_t trackIndex, gainmapTrackIndex;
ANativeWindowBuffer *anb;
bool appSegmentWritten;
- size_t pendingOutputTiles;
- size_t codecInputCounter;
+ size_t pendingOutputTiles, gainmapPendingOutputTiles;
+ size_t codecInputCounter, gainmapCodecInputCounter;
- InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
- exifError(false), timestamp(-1), requestId(-1), fenceFd(-1),
- fileFd(-1), trackIndex(-1), anb(nullptr), appSegmentWritten(false),
- pendingOutputTiles(0), codecInputCounter(0) { }
+ std::unique_ptr<CpuConsumer::LockedBuffer> baseImage, gainmapImage;
+ std::unique_ptr<ultrahdr::uhdr_raw_image_ext> baseBuffer, gainmap;
+ std::unique_ptr<uint8_t[]> gainmapChroma;
+ std::vector<uint8_t> isoGainmapMetadata;
+
+ InputFrame()
+ : orientation(0),
+ quality(kDefaultJpegQuality),
+ error(false),
+ exifError(false),
+ timestamp(-1),
+ requestId(-1),
+ fenceFd(-1),
+ fileFd(-1),
+ trackIndex(-1),
+ gainmapTrackIndex(-1),
+ anb(nullptr),
+ appSegmentWritten(false),
+ pendingOutputTiles(0),
+ gainmapPendingOutputTiles(0),
+ codecInputCounter(0),
+ gainmapCodecInputCounter(0) {}
};
void compilePendingInputLocked();
@@ -192,9 +224,11 @@
status_t processInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCodecInputFrame(InputFrame &inputFrame);
+ status_t processCodecGainmapInputFrame(InputFrame &inputFrame);
status_t startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processAppSegment(int64_t frameNumber, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
+ status_t processOneCodecGainmapOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCompletedInputFrame(int64_t frameNumber, InputFrame &inputFrame);
void releaseInputFrameLocked(int64_t frameNumber, InputFrame *inputFrame /*out*/);
@@ -216,6 +250,7 @@
static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
static const android_dataspace kHeifDataSpace =
static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
+ android_dataspace mInternalDataSpace = kHeifDataSpace;
// Use the limit of pipeline depth in the API sepc as maximum number of acquired
// app segment buffers.
static const uint32_t kMaxAcquiredAppSegment = 8;
@@ -260,15 +295,15 @@
std::vector<int64_t> mInputAppSegmentBuffers;
// Keep all incoming HEIC blob buffer pending further processing.
- std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
- std::queue<int64_t> mCodecOutputBufferFrameNumbers;
- size_t mCodecOutputCounter;
+ std::vector<CodecOutputBufferInfo> mCodecOutputBuffers, mGainmapCodecOutputBuffers;
+ std::queue<int64_t> mCodecOutputBufferFrameNumbers, mCodecGainmapOutputBufferFrameNumbers;
+ size_t mCodecOutputCounter, mCodecGainmapOutputCounter;
int32_t mQuality;
// Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
std::vector<int64_t> mInputYuvBuffers;
// Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only)
- std::vector<int32_t> mCodecInputBuffers;
+ std::vector<int32_t> mCodecInputBuffers, mGainmapCodecInputBuffers;
// Artificial strictly incremental YUV grid timestamp to make encoder happy.
int64_t mGridTimestampUs;
@@ -286,6 +321,49 @@
// The status id for tracking the active/idle status of this composite stream
int mStatusId;
void markTrackerIdle();
+
+ //APP_SEGMENT stream supported
+ bool mAppSegmentSupported = false;
+
+ bool mHDRGainmapEnabled = false;
+
+ // UltraHDR tonemap color and format aspects
+ static const uhdr_img_fmt_t kUltraHdrInputFmt = UHDR_IMG_FMT_24bppYCbCrP010;
+ static const uhdr_color_gamut kUltraHdrInputGamut = UHDR_CG_BT_2100;
+ static const uhdr_color_transfer kUltraHdrInputTransfer = UHDR_CT_HLG;
+ static const uhdr_color_range kUltraHdrInputRange = UHDR_CR_FULL_RANGE;
+
+ static const uhdr_img_fmt_t kUltraHdrOutputFmt = UHDR_IMG_FMT_12bppYCbCr420;
+ static const uhdr_color_gamut kUltraHdrOutputGamut = UHDR_CG_DISPLAY_P3;
+ static const uhdr_color_transfer kUltraHdrOutputTransfer = UHDR_CT_SRGB;
+ static const uhdr_color_range kUltraHdrOutputRange = UHDR_CR_FULL_RANGE;
+
+ static const auto kUltraHDRDataSpace = ADATASPACE_HEIF_ULTRAHDR;
+
+ // MediaMuxer/Codec color and format aspects for base image and gainmap metadata
+ static const int32_t kCodecColorFormat = COLOR_FormatYUV420Flexible;
+ static const ColorAspects::Primaries kCodecColorPrimaries =
+ ColorAspects::Primaries::PrimariesEG432;
+ static const ColorAspects::MatrixCoeffs kCodecColorMatrix =
+ ColorAspects::MatrixCoeffs::MatrixUnspecified;
+ static const ColorAspects::Transfer kCodecColorTransfer =
+ ColorAspects::Transfer::TransferSRGB;
+ static const ColorAspects::Range kCodecColorRange =
+ ColorAspects::Range::RangeFull;
+
+ // MediaMuxer/Codec color and format aspects for gainmap as per ISO 23008-12:2024
+ static const int32_t kCodecGainmapColorFormat = COLOR_FormatYUV420Flexible;
+ static const ColorAspects::Primaries kCodecGainmapColorPrimaries =
+ ColorAspects::Primaries::PrimariesUnspecified;
+ static const ColorAspects::MatrixCoeffs kCodecGainmapColorMatrix =
+ ColorAspects::MatrixCoeffs::MatrixUnspecified;
+ static const ColorAspects::Transfer kCodecGainmapColorTransfer =
+ ColorAspects::Transfer::TransferUnspecified;
+ static const ColorAspects::Range kCodecGainmapColorRange =
+ ColorAspects::Range::RangeFull;
+
+
+ status_t generateBaseImageAndGainmap(InputFrame &inputFrame);
};
}; // namespace camera3
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
index d36ca3b..92072b0 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -20,6 +20,7 @@
#include <cstdint>
#include <regex>
+#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <log/log_main.h>
#include <system/graphics.h>
@@ -33,14 +34,16 @@
namespace android {
namespace camera3 {
-HeicEncoderInfoManager::HeicEncoderInfoManager() :
+namespace flags = com::android::internal::camera::flags;
+
+HeicEncoderInfoManager::HeicEncoderInfoManager(bool useSWCodec) :
mIsInited(false),
mMinSizeHeic(0, 0),
mMaxSizeHeic(INT32_MAX, INT32_MAX),
mHasHEVC(false),
mHasHEIC(false),
mDisableGrid(false) {
- if (initialize() == OK) {
+ if (initialize(useSWCodec) == OK) {
mIsInited = true;
}
}
@@ -72,14 +75,15 @@
(width <= 1920 && height <= 1080))) {
enableGrid = false;
}
- if (hevcName != nullptr) {
- *hevcName = mHevcName;
- }
} else {
// No encoder available for the requested size.
return false;
}
+ if (hevcName != nullptr) {
+ *hevcName = mHevcName;
+ }
+
if (stall != nullptr) {
// Find preferred encoder which advertise
// "measured-frame-rate-WIDTHxHEIGHT-range" key.
@@ -109,7 +113,7 @@
return true;
}
-status_t HeicEncoderInfoManager::initialize() {
+status_t HeicEncoderInfoManager::initialize(bool allowSWCodec) {
mDisableGrid = property_get_bool("camera.heic.disable_grid", false);
sp<IMediaCodecList> codecsList = MediaCodecList::getInstance();
if (codecsList == nullptr) {
@@ -119,7 +123,7 @@
sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
- if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC, allowSWCodec)) {
if (heicDetails != nullptr) {
ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
__FUNCTION__);
@@ -268,7 +272,7 @@
}
bool HeicEncoderInfoManager::getHevcCodecDetails(
- sp<IMediaCodecList> codecsList, const char* mime) {
+ sp<IMediaCodecList> codecsList, const char* mime, bool allowSWCodec) {
bool found = false;
ssize_t idx = 0;
while ((idx = codecsList->findCodecByType(mime, true /*encoder*/, idx)) >= 0) {
@@ -280,11 +284,13 @@
ALOGV("%s: [%s] codec found", __FUNCTION__,
info->getCodecName());
- // Filter out software ones as they may be too slow
- if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
- ALOGV("%s: [%s] Filter out software ones as they may be too slow", __FUNCTION__,
- info->getCodecName());
- continue;
+ if (!allowSWCodec) {
+ // Filter out software ones as they may be too slow
+ if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
+ ALOGV("%s: [%s] Filter out software ones as they may be too slow", __FUNCTION__,
+ info->getCodecName());
+ continue;
+ }
}
const sp<MediaCodecInfo::Capabilities> caps =
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index a65be9c..1e28eca 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -30,8 +30,8 @@
class HeicEncoderInfoManager {
public:
- static HeicEncoderInfoManager& getInstance() {
- static HeicEncoderInfoManager instance;
+ static HeicEncoderInfoManager& getInstance(bool useSWCodec) {
+ static HeicEncoderInfoManager instance(useSWCodec);
return instance;
}
@@ -51,10 +51,10 @@
typedef std::unordered_map<std::pair<int32_t, int32_t>,
std::pair<int32_t, int32_t>, SizePairHash> FrameRateMaps;
- HeicEncoderInfoManager();
+ HeicEncoderInfoManager(bool useSWCodec);
virtual ~HeicEncoderInfoManager();
- status_t initialize();
+ status_t initialize(bool allowSWCodec);
status_t getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps);
status_t getCodecSizeRange(const char* codecName, sp<AMessage> details,
std::pair<int32_t, int32_t>* minSize, std::pair<int32_t, int32_t>* maxSize,
@@ -62,7 +62,8 @@
FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
int32_t width, int32_t height) const;
sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
- bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime);
+ bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime,
+ bool allowSWCodec = false);
bool mIsInited;
std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index f5e960b..e17d700 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -44,6 +44,10 @@
namespace camera3 {
+// TODO: Remove this once the GFX native dataspace
+// dependencies are available
+enum { HEIC_ULTRAHDR, ADATASPACE_HEIF_ULTRAHDR = 0x1006 };
+
typedef enum camera_stream_configuration_mode {
CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index a03d199..48f43e6 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include "system/camera_metadata.h"
#include "system/graphics-base-v1.0.h"
#include "system/graphics-base-v1.1.h"
#define LOG_TAG "CameraProviderManager"
@@ -76,6 +77,10 @@
const float CameraProviderManager::kDepthARTolerance = .1f;
const bool CameraProviderManager::kFrameworkJpegRDisabled =
property_get_bool("ro.camera.disableJpegR", false);
+const bool CameraProviderManager::kFrameworkHeicUltraHDRDisabled =
+ property_get_bool("ro.camera.disableHeicUltraHDR", false);
+const bool CameraProviderManager::kFrameworkHeicAllowSWCodecs =
+ property_get_bool("ro.camera.enableSWHEVC", false);
CameraProviderManager::HidlServiceInteractionProxyImpl
CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -1246,6 +1251,169 @@
return false;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicUltraHDRTags(
+ bool maxResolution) {
+ if (!flags::camera_heif_gainmap() || kFrameworkHeicUltraHDRDisabled ||
+ mCompositeHeicUltraHDRDisabled ||
+ !camera3::HeicCompositeStream::isInMemoryTempFileSupported()) {
+ return OK;
+ }
+
+ const int32_t scalerSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+ const int32_t heicUltraHDRSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t heicUltraHDRStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS, maxResolution);
+ const int32_t heicUltraHDRFrameDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS, maxResolution);
+
+ auto& c = mCameraCharacteristics;
+ std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, filteredSizes;
+ auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (capabilities.count == 0) {
+ ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto end = capabilities.data.u8 + capabilities.count;
+ bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+ if (!isTenBitOutputSupported) {
+ // No 10-bit support, nothing more to do.
+ return OK;
+ }
+
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+ auto it = supportedP010Sizes.begin();
+ if (supportedP010Sizes.empty()) {
+ // Nothing to do in this case.
+ return OK;
+ }
+
+ std::vector<int32_t> heicUltraHDREntries;
+ int64_t stall = 0;
+ bool useHeic = false;
+ bool useGrid = false;
+ for (const auto& it : supportedP010Sizes) {
+ int32_t width = std::get<0>(it);
+ int32_t height = std::get<1>(it);
+ int32_t gainmapWidth = std::get<0>(it) / HeicCompositeStream::kGainmapScale;
+ int32_t gainmapHeight = std::get<1>(it) / HeicCompositeStream::kGainmapScale;
+ // Support gainmap sizes that are sufficiently aligned so CPU specific copy
+ // optimizations can be utilized without side effects.
+ if (((gainmapWidth % 64) == 0) && ((gainmapHeight % 2) == 0) &&
+ camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(width, height,
+ &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+ kFrameworkHeicAllowSWCodecs) &&
+ camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(gainmapWidth,
+ gainmapHeight, &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+ kFrameworkHeicAllowSWCodecs)) {
+ int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+ static_cast<int32_t> (std::get<1>(it)),
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT };
+ heicUltraHDREntries.insert(heicUltraHDREntries.end(), entry, entry + 4);
+ filteredSizes.push_back(it);
+ }
+ }
+
+ std::vector<int64_t> heicUltraHDRMinDurations, heicUltraHDRStallDurations;
+ auto ret = deriveBlobDurationEntries(c, maxResolution, filteredSizes,
+ &heicUltraHDRStallDurations, &heicUltraHDRMinDurations);
+ if (ret != OK) {
+ return ret;
+ }
+
+ return insertStreamConfigTags(heicUltraHDRSizesTag, heicUltraHDRFrameDurationsTag,
+ heicUltraHDRStallDurationsTag, heicUltraHDREntries,
+ heicUltraHDRMinDurations, heicUltraHDRStallDurations, &c);
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::insertStreamConfigTags(
+ int32_t sizeTag, int32_t minFrameDurationTag, int32_t stallDurationTag,
+ const std::vector<int32_t>& sizeEntries,
+ const std::vector<int64_t>& minFrameDurationEntries,
+ const std::vector<int64_t>& stallDurationEntries, CameraMetadata* c /*out*/) {
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ supportedChTags.reserve(chTags.count + 3);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32, chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(sizeTag);
+ supportedChTags.push_back(minFrameDurationTag);
+ supportedChTags.push_back(stallDurationTag);
+ c->update(sizeTag, sizeEntries.data(), sizeEntries.size());
+ c->update(minFrameDurationTag, minFrameDurationEntries.data(), minFrameDurationEntries.size());
+ c->update(stallDurationTag, stallDurationEntries.data(), stallDurationEntries.size());
+ c->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveBlobDurationEntries(
+ const CameraMetadata& c, bool maxResolution,
+ const std::vector<std::tuple<size_t, size_t>>& filteredSizes,
+ std::vector<int64_t>* filteredStallDurations /*out*/,
+ std::vector<int64_t>* filteredMinDurations /*out*/) {
+ std::vector<int64_t> blobMinDurations, blobStallDurations;
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+ // We use the jpeg stall and min frame durations to approximate the respective Heic UltraHDR
+ // durations.
+ getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB, filteredSizes,
+ &blobMinDurations);
+ getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB, filteredSizes,
+ &blobStallDurations);
+ if (blobStallDurations.empty() || blobMinDurations.empty() ||
+ filteredSizes.size() != blobMinDurations.size() ||
+ blobMinDurations.size() != blobStallDurations.size()) {
+ ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
+ "filteredSizes size: %zu",
+ __FUNCTION__, blobMinDurations.size(), blobStallDurations.size(),
+ filteredSizes.size());
+ return BAD_VALUE;
+ }
+
+ auto itDuration = blobMinDurations.begin();
+ auto itSize = filteredSizes.begin();
+ while (itDuration != blobMinDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t>(std::get<0>(*itSize)),
+ static_cast<int32_t>(std::get<1>(*itSize)), *itDuration};
+ filteredMinDurations->insert(filteredMinDurations->end(), entry, entry + 4);
+ itDuration++;
+ itSize++;
+ }
+
+ itDuration = blobStallDurations.begin();
+ itSize = filteredSizes.begin();
+ while (itDuration != blobStallDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t>(std::get<0>(*itSize)),
+ static_cast<int32_t>(std::get<1>(*itSize)), *itDuration};
+ filteredStallDurations->insert(filteredStallDurations->end(), entry, entry + 4);
+ itDuration++;
+ itSize++;
+ }
+
+ return OK;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
return OK;
@@ -1271,13 +1439,6 @@
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
auto& c = mCameraCharacteristics;
- std::vector<int32_t> supportedChTags;
- auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
- if (chTags.count == 0) {
- ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
- return BAD_VALUE;
- }
-
std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
if (capabilities.count == 0) {
@@ -1331,54 +1492,19 @@
jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
}
- std::vector<int64_t> blobMinDurations, blobStallDurations;
std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
-
- // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
- // durations.
- getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
- supportedP010Sizes, &blobMinDurations);
- getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
- supportedP010Sizes, &blobStallDurations);
- if (blobStallDurations.empty() || blobMinDurations.empty() ||
- supportedP010Sizes.size() != blobMinDurations.size() ||
- blobMinDurations.size() != blobStallDurations.size()) {
- ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
- "supportedP010Sizes size: %zu", __FUNCTION__, blobMinDurations.size(),
- blobStallDurations.size(), supportedP010Sizes.size());
- return BAD_VALUE;
+ auto ret = deriveBlobDurationEntries(c, maxResolution, supportedP010Sizes, &jpegRStallDurations,
+ &jpegRMinDurations);
+ if (ret != OK) {
+ return ret;
}
- auto itDuration = blobMinDurations.begin();
- auto itSize = supportedP010Sizes.begin();
- while (itDuration != blobMinDurations.end()) {
- int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
- static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
- jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
- itDuration++; itSize++;
+ ret = insertStreamConfigTags(jpegRSizesTag, jpegRMinFrameDurationsTag, jpegRStallDurationsTag,
+ jpegREntries, jpegRMinDurations, jpegRStallDurations, &c);
+ if (ret != OK) {
+ return ret;
}
- itDuration = blobStallDurations.begin();
- itSize = supportedP010Sizes.begin();
- while (itDuration != blobStallDurations.end()) {
- int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
- static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
- jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
- itDuration++; itSize++;
- }
-
- supportedChTags.reserve(chTags.count + 3);
- supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
- chTags.data.i32 + chTags.count);
- supportedChTags.push_back(jpegRSizesTag);
- supportedChTags.push_back(jpegRMinFrameDurationsTag);
- supportedChTags.push_back(jpegRStallDurationsTag);
- c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
- c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
- c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
- c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
- supportedChTags.size());
-
auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
if (colorSpaces.count > 0 && !maxResolution) {
bool displayP3Support = false;
@@ -1772,6 +1898,36 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAePriorityModeTags() {
+ status_t res = OK;
+ auto& c = mCameraCharacteristics;
+
+ auto entry = c.find(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES);
+ if (entry.count != 0) {
+ return res;
+ }
+
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> aePriorityAvailableModes = {
+ ANDROID_CONTROL_AE_PRIORITY_MODE_OFF };
+ supportedChTags.reserve(chTags.count + 1);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES);
+ c.update(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES,
+ aePriorityAvailableModes.data(), aePriorityAvailableModes.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
status_t res = OK;
auto& c = mCameraCharacteristics;
@@ -1833,6 +1989,67 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addColorCorrectionAvailableModesTag(
+ CameraMetadata& c) {
+ status_t res = OK;
+
+ // The COLOR_CORRECTION_AVAILABLE_MODES key advertises the
+ // supported color correction modes. Previously, if color correction was
+ // supported (COLOR_CORRECTION_MODE was not null), it was assumed
+ // that all existing options, TRANSFORM_MATRIX, FAST, and HIGH_QUALITY, were supported.
+ // However, a new optional mode, CCT, has been introduced. To indicate
+ // whether CCT is supported, the camera device must now explicitly list all
+ // available modes using the COLOR_CORRECTION_AVAILABLE_MODES key.
+ // If the camera device doesn't set COLOR_CORRECTION_AVAILABLE_MODES,
+ // this code falls back to checking for the COLOR_CORRECTION_MODE key.
+ // If present, this adds the required supported modes TRANSFORM_MATRIX,
+ // FAST, HIGH_QUALITY.
+ auto entry = c.find(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ if (entry.count != 0) {
+ return res;
+ }
+
+ auto reqKeys = c.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ if (reqKeys.count == 0) {
+ ALOGE("%s: No supported camera request keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ bool colorCorrectionModeAvailable = false;
+ for (size_t i = 0; i < reqKeys.count; i++) {
+ if (reqKeys.data.i32[i] == ANDROID_COLOR_CORRECTION_MODE) {
+ colorCorrectionModeAvailable = true;
+ break;
+ }
+ }
+
+ if (!colorCorrectionModeAvailable) {
+ return res;
+ }
+
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> colorCorrectionAvailableModes = {
+ ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
+ ANDROID_COLOR_CORRECTION_MODE_FAST,
+ ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY };
+ supportedChTags.reserve(chTags.count + 1);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ c.update(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ colorCorrectionAvailableModes.data(), colorCorrectionAvailableModes.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSessionConfigQueryVersionTag() {
sp<ProviderInfo> parentProvider = mParentProvider.promote();
if (parentProvider == nullptr) {
@@ -1841,14 +2058,25 @@
int versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_UPSIDE_DOWN_CAKE;
IPCTransport ipcTransport = parentProvider->getIPCTransport();
- int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
- if (ipcTransport == IPCTransport::AIDL
- && deviceVersion >= CAMERA_DEVICE_API_VERSION_1_3) {
- versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+ auto& c = mCameraCharacteristics;
+ status_t res = OK;
+ if (ipcTransport != IPCTransport::AIDL) {
+ res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
+ mSessionConfigQueryVersion = versionCode;
+ return res;
}
- auto& c = mCameraCharacteristics;
- status_t res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
+ int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_3) {
+ versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+ } else if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_4) {
+ if (flags::feature_combination_baklava()) {
+ versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_BAKLAVA;
+ } else {
+ versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+ }
+ }
+ res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
mSessionConfigQueryVersion = versionCode;
return res;
}
@@ -1915,7 +2143,7 @@
bool useGrid = false;
if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(
halStreamConfigs.data.i32[i+1], halStreamConfigs.data.i32[i+2],
- &useHeic, &useGrid, &stall)) {
+ &useHeic, &useGrid, &stall, nullptr /*hevcName*/, kFrameworkHeicAllowSWCodecs)) {
if (useGrid != (format == HAL_PIXEL_FORMAT_YCBCR_420_888)) {
continue;
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index b686a58..803df40 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -88,6 +88,7 @@
#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
#define CAMERA_DEVICE_API_VERSION_1_2 HARDWARE_DEVICE_API_VERSION(1, 2)
#define CAMERA_DEVICE_API_VERSION_1_3 HARDWARE_DEVICE_API_VERSION(1, 3)
+#define CAMERA_DEVICE_API_VERSION_1_4 HARDWARE_DEVICE_API_VERSION(1, 4)
#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
@@ -470,6 +471,9 @@
static const float kDepthARTolerance;
static const bool kFrameworkJpegRDisabled;
+ static const bool kFrameworkHeicUltraHDRDisabled;
+ static const bool kFrameworkHeicAllowSWCodecs;
+
private:
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;
@@ -629,6 +633,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
+ bool isCompositeHeicUltraHDRDisabled() const { return mCompositeHeicUltraHDRDisabled; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -685,14 +690,15 @@
mParentProvider(parentProvider), mTorchStrengthLevel(0),
mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
mHasFlashUnit(false), mSupportNativeZoomRatio(false),
- mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
+ mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false),
+ mCompositeHeicUltraHDRDisabled(false) {}
virtual ~DeviceInfo() {}
protected:
bool mHasFlashUnit; // const after constructor
bool mSupportNativeZoomRatio; // const after constructor
const std::vector<std::string>& mPublicCameraIds;
- bool mCompositeJpegRDisabled;
+ bool mCompositeJpegRDisabled, mCompositeHeicUltraHDRDisabled;
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
@@ -757,10 +763,24 @@
status_t addDynamicDepthTags(bool maxResolution = false);
status_t deriveHeicTags(bool maxResolution = false);
status_t deriveJpegRTags(bool maxResolution = false);
+ status_t deriveHeicUltraHDRTags(bool maxResolution = false);
+ status_t deriveBlobDurationEntries(
+ const CameraMetadata& c, bool maxResolution,
+ const std::vector<std::tuple<size_t, size_t>>& filteredSizes,
+ std::vector<int64_t>* filteredStallDurations /*out*/,
+ std::vector<int64_t>* filteredMinDurations /*out*/);
+ status_t insertStreamConfigTags(int32_t sizeTag, int32_t minFrameDurationTag,
+ int32_t stallDurationTag,
+ const std::vector<int32_t>& sizeEntries,
+ const std::vector<int64_t>& minFrameDurationEntries,
+ const std::vector<int64_t>& stallDurationEntries,
+ CameraMetadata* c /*out*/);
status_t addRotateCropTags();
status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
+ status_t addColorCorrectionAvailableModesTag(CameraMetadata& ch);
+ status_t addAePriorityModeTags();
status_t addSessionConfigQueryVersionTag();
static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 4bfe11d..3a0cfa1 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -517,6 +517,8 @@
mCompositeJpegRDisabled = mCameraCharacteristics.exists(
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+ mCompositeHeicUltraHDRDisabled = mCameraCharacteristics.exists(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS);
mSystemCameraKind = getSystemCameraKind();
@@ -548,6 +550,12 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ res = deriveHeicUltraHDRTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to derive Heic UltraHDR tags based on camera and "
+ "media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
@@ -567,6 +575,12 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
"maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
}
+ status = deriveHeicUltraHDRTags(/*maxResolution*/true);
+ if (OK != status) {
+ ALOGE("%s: Unable to derive Heic UltraHDR tags based on camera and "
+ "media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-status), status);
+ }
}
res = addRotateCropTags();
@@ -596,6 +610,22 @@
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
+ if (flags::ae_priority()) {
+ res = addAePriorityModeTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add CONTROL_AE_AVAILABLE_PRIORITY_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&
@@ -683,6 +713,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 6cedb04..756c255 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -675,6 +675,21 @@
ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
+ if (flags::ae_priority()) {
+ res = addAePriorityModeTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add CONTROL_AE_AVAILABLE_PRIORITY_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
@@ -785,6 +800,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0375aa5..c6434a5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -44,6 +44,7 @@
#include <utility>
+#include <android/data_space.h>
#include <android-base/stringprintf.h>
#include <sched.h>
#include <utils/Log.h>
@@ -2564,6 +2565,8 @@
// always occupy the initial entry.
if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
(outputStream->data_space ==
+ static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) ||
+ (outputStream->data_space ==
static_cast<android_dataspace_t>(
aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
bufferSizes[k] = static_cast<uint32_t>(
@@ -2885,7 +2888,7 @@
bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
- const std::set<std::string>& cameraIdsWithZoom,
+ const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
@@ -2894,7 +2897,7 @@
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
- requestTimeNs, outputSurfaces));
+ requestTimeNs, useZoomRatio, outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -4182,6 +4185,7 @@
}
bool isStillCapture = false;
bool isZslCapture = false;
+ bool useZoomRatio = false;
const camera_metadata_t* settings = halRequest->settings;
bool shouldUnlockSettings = false;
if (settings == nullptr) {
@@ -4201,6 +4205,14 @@
if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE)) {
isZslCapture = true;
}
+
+ if (flags::zoom_method()) {
+ e = camera_metadata_ro_entry_t();
+ find_camera_metadata_ro_entry(settings, ANDROID_CONTROL_ZOOM_METHOD, &e);
+ if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO)) {
+ useZoomRatio = true;
+ }
+ }
}
bool passSurfaceMap =
mUseHalBufManager || containsHalBufferManagedStream;
@@ -4214,7 +4226,7 @@
expectedDurationInfo.isFixedFps,
requestedPhysicalCameras, isStillCapture, isZslCapture,
captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
- mPrevCameraIdsWithZoom,
+ mPrevCameraIdsWithZoom, useZoomRatio,
passSurfaceMap ? uniqueSurfaceIdMap :
SurfaceMap{}, captureRequest->mRequestTimeNs);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 397ec5c..f1e088e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1275,8 +1275,8 @@
bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
- const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
- nsecs_t requestTimeNs);
+ const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
+ const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs);
/**
* Tracking for idle detection
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index dc663f3..2eba5a7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -26,6 +26,7 @@
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include <android/data_space.h>
#include <android-base/unique_fd.h>
#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
@@ -402,6 +403,8 @@
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
(getDataSpace() ==
+ static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) ||
+ (getDataSpace() ==
static_cast<android_dataspace_t>(
aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
if (mIPCTransport == IPCTransport::HIDL) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index f10df07..78f1698 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -304,7 +304,7 @@
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
- const std::set<std::string>& cameraIdsWithZoom,
+ const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
ATRACE_CALL();
if (pendingMetadata.isEmpty())
@@ -385,8 +385,9 @@
// Fix up result metadata to account for zoom ratio availabilities between
// HAL and app.
bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId) == cameraIdsWithZoom.end();
+ bool appUsesZoomRatio = !zoomRatioIs1 || useZoomRatio;
res = states.zoomRatioMappers[states.cameraId].updateCaptureResult(
- &captureResult.mMetadata, zoomRatioIs1);
+ &captureResult.mMetadata, appUsesZoomRatio);
if (res != OK) {
SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
frameNumber, strerror(-res), res);
@@ -452,9 +453,10 @@
}
}
- zoomRatioIs1 = cameraIdsWithZoom.find(cameraId) == cameraIdsWithZoom.end();
+ // Note: Physical camera continues to use SCALER_CROP_REGION to reflect
+ // zoom levels.
res = states.zoomRatioMappers[cameraId].updateCaptureResult(
- &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
+ &physicalMetadata.mPhysicalCameraMetadata, /*appUsesZoomRatio*/ false);
if (res != OK) {
SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
"frame %d: %s(%d)", cameraId.c_str(), frameNumber, strerror(-res), res);
@@ -829,7 +831,7 @@
sendCaptureResult(states, metadata, request.resultExtras,
collectedPartialResult, frameNumber,
hasInputBufferInRequest, request.zslCapture && request.stillCapture,
- request.rotateAndCropAuto, cameraIdsWithZoom,
+ request.rotateAndCropAuto, cameraIdsWithZoom, request.useZoomRatio,
request.physicalMetadatas);
}
}
@@ -1098,7 +1100,8 @@
r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
r.hasInputBuffer, r.zslCapture && r.stillCapture,
- r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
+ r.rotateAndCropAuto, cameraIdsWithZoom, r.useZoomRatio,
+ r.physicalMetadatas);
}
collectAndRemovePendingOutputBuffers(
states.useHalBufManager, states.halBufManagedStreamIds,
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 3626f20..62980c5 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -198,6 +198,9 @@
// Current output transformation
int32_t transform;
+ // Whether the app explicitly uses ZOOM_RATIO
+ bool useZoomRatio;
+
static const nsecs_t kDefaultMinExpectedDuration = 33333333; // 33 ms
static const nsecs_t kDefaultMaxExpectedDuration = 100000000; // 100 ms
@@ -220,14 +223,15 @@
rotateAndCropAuto(false),
autoframingAuto(false),
requestTimeNs(0),
- transform(-1) {
+ transform(-1),
+ useZoomRatio(false) {
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
const std::set<std::set<std::string>>& physicalCameraIdSet, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
- const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+ const std::set<std::string>& idsWithZoom, nsecs_t requestNs, bool useZoomRatio,
const SurfaceMap& outSurfaces = SurfaceMap{}) :
shutterTimestamp(0),
sensorTimestamp(0),
@@ -250,7 +254,8 @@
cameraIdsWithZoom(idsWithZoom),
requestTimeNs(requestNs),
outputSurfaces(outSurfaces),
- transform(-1) {
+ transform(-1),
+ useZoomRatio(useZoomRatio) {
}
};
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 2016284..5260ad3 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -19,6 +19,8 @@
#include <algorithm>
+#include <com_android_internal_camera_flags.h>
+
#include "device3/ZoomRatioMapper.h"
#include "utils/SessionConfigurationUtilsHost.h"
@@ -42,13 +44,25 @@
}
status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
+ status_t res = OK;
+
+ if (flags::zoom_method()) {
+ uint8_t zoomMethod = ANDROID_CONTROL_ZOOM_METHOD_AUTO;
+ res = request->update(ANDROID_CONTROL_ZOOM_METHOD, &zoomMethod, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update CONTROL_ZOOM_METHOD key: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
camera_metadata_entry_t entry;
entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
float defaultZoomRatio = 1.0f;
if (entry.count == 0) {
- return request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
+ res = request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
}
- return OK;
+ return res;
}
status_t ZoomRatioMapper::overrideZoomRatioTags(
@@ -57,40 +71,69 @@
return BAD_VALUE;
}
+ bool halSupportZoomRatio = false;
camera_metadata_entry_t entry;
entry = deviceInfo->find(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
if (entry.count != 2 && entry.count != 0) return BAD_VALUE;
-
// Hal has zoom ratio support
if (entry.count == 2) {
- *supportNativeZoomRatio = true;
- return OK;
+ halSupportZoomRatio = true;
}
- // Hal has no zoom ratio support
- *supportNativeZoomRatio = false;
-
- entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
- if (entry.count != 1) {
- ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
- __FUNCTION__);
- return OK;
- }
-
- float zoomRange[] = {1.0f, entry.data.f[0]};
- status_t res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
- if (res != OK) {
- ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
+ // Add ZOOM_METHOD request and result keys
std::vector<int32_t> requestKeys;
entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
if (entry.count > 0) {
requestKeys.insert(requestKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
}
- requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+ if (flags::zoom_method()) {
+ requestKeys.push_back(ANDROID_CONTROL_ZOOM_METHOD);
+ }
+ std::vector<int32_t> resultKeys;
+ entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+ if (entry.count > 0) {
+ resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+ }
+ if (flags::zoom_method()) {
+ resultKeys.push_back(ANDROID_CONTROL_ZOOM_METHOD);
+ }
+
+ // Add additional keys if the HAL doesn't support ZOOM_RATIO
+ status_t res = OK;
+ if (!halSupportZoomRatio) {
+ entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ if (entry.count != 1) {
+ ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
+ __FUNCTION__);
+ return OK;
+ }
+ float zoomRange[] = {1.0f, entry.data.f[0]};
+ res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
+ if (res != OK) {
+ ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+ resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+
+ std::vector<int32_t> charKeys;
+ entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (entry.count > 0) {
+ charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+ }
+ charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+ res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+ charKeys.data(), charKeys.size());
+ if (res != OK) {
+ ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Update available request and result keys
res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
requestKeys.data(), requestKeys.size());
if (res != OK) {
@@ -98,13 +141,6 @@
__FUNCTION__, strerror(-res), res);
return res;
}
-
- std::vector<int32_t> resultKeys;
- entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
- if (entry.count > 0) {
- resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
- }
- resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
resultKeys.data(), resultKeys.size());
if (res != OK) {
@@ -113,20 +149,7 @@
return res;
}
- std::vector<int32_t> charKeys;
- entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
- if (entry.count > 0) {
- charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
- }
- charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
- res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
- charKeys.data(), charKeys.size());
- if (res != OK) {
- ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
+ *supportNativeZoomRatio = halSupportZoomRatio;
return OK;
}
@@ -223,7 +246,6 @@
if (!mIsValid) return INVALID_OPERATION;
status_t res = OK;
- bool zoomRatioIs1 = true;
camera_metadata_entry_t entry;
int arrayHeight, arrayWidth = 0;
res = getArrayDimensionsToBeUsed(request, &arrayWidth, &arrayHeight);
@@ -231,9 +253,14 @@
return res;
}
entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
- if (entry.count == 1 && entry.data.f[0] != 1.0f) {
- zoomRatioIs1 = false;
-
+ bool zoomRatioIs1 = (entry.count == 0 || entry.data.f[0] == 1.0f);
+ bool useZoomRatio = !zoomRatioIs1;
+ if (flags::zoom_method()) {
+ entry = request->find(ANDROID_CONTROL_ZOOM_METHOD);
+ useZoomRatio |= (entry.count == 1
+ && entry.data.u8[0] == ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO);
+ }
+ if (useZoomRatio) {
// If cropRegion is windowboxing, override it with activeArray
camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
if (cropRegionEntry.count == 4) {
@@ -248,9 +275,9 @@
}
}
- if (mHalSupportsZoomRatio && zoomRatioIs1) {
+ if (mHalSupportsZoomRatio && !useZoomRatio) {
res = separateZoomFromCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
- } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
+ } else if (!mHalSupportsZoomRatio && useZoomRatio) {
res = combineZoomAndCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
}
@@ -263,7 +290,7 @@
return res;
}
-status_t ZoomRatioMapper::updateCaptureResult(CameraMetadata* result, bool requestedZoomRatioIs1) {
+status_t ZoomRatioMapper::updateCaptureResult(CameraMetadata* result, bool useZoomRatio) {
if (!mIsValid) return INVALID_OPERATION;
status_t res = OK;
@@ -273,9 +300,9 @@
if (res != OK) {
return res;
}
- if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
+ if (mHalSupportsZoomRatio && !useZoomRatio) {
res = combineZoomAndCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
- } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
+ } else if (!mHalSupportsZoomRatio && useZoomRatio) {
res = separateZoomFromCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
} else {
camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 1aa8e78..0ac2e09 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -64,7 +64,7 @@
/**
* Update capture result to handle both cropRegion and zoomRatio.
*/
- status_t updateCaptureResult(CameraMetadata *request, bool requestedZoomRatioIs1);
+ status_t updateCaptureResult(CameraMetadata *request, bool useZoomRatio);
public: // Visible for testing. Do not use concurently.
void scaleCoordinates(int32_t* coordPairs, int coordCount,
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index badd47a..a531e10 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -296,7 +296,7 @@
}
metadata.update(ANDROID_SCALER_CROP_REGION, test2xCropRegion[index], 4);
- res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false/*useZoomRatio*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_SCALER_CROP_REGION);
ASSERT_EQ(entry.count, 4U);
@@ -340,7 +340,7 @@
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
EXPECT_NEAR(entry.data.f[0], 2.0f, kMaxAllowedRatioError);
- res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false/*useZoomRatio*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
@@ -364,7 +364,7 @@
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
- res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false/*useZoomRatio*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_SCALER_CROP_REGION);
ASSERT_EQ(entry.count, 4U);
@@ -452,7 +452,7 @@
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
ASSERT_EQ(entry.data.f[0], zoomRatio);
- res = mapper.updateCaptureResult(&metadata, false/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, true/*useZoomRatio*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_SCALER_CROP_REGION);
ASSERT_EQ(entry.count, 4U);
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
index 4b63704..80af140 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -138,7 +138,7 @@
int32_t attributedOpCode, bool forDataDelivery, bool startDataDelivery,
bool checkAutomotive) {
AttributionSourceState clientAttribution = attributionSource;
- if (!flags::check_full_attribution_source_chain() && !clientAttribution.next.empty()) {
+ if (!flags::data_delivery_permission_checks() && !clientAttribution.next.empty()) {
clientAttribution.next.clear();
}
@@ -408,7 +408,7 @@
clientUid = callingUid;
} else {
validUid = isTrustedCallingUid(callingUid);
- if (flags::use_context_attribution_source()) {
+ if (flags::data_delivery_permission_checks()) {
validUid = validUid || (clientUid == callingUid);
}
}
@@ -426,7 +426,7 @@
clientPid = callingPid;
} else {
validPid = isTrustedCallingUid(callingUid);
- if (flags::use_context_attribution_source()) {
+ if (flags::data_delivery_permission_checks()) {
validPid = validPid || (clientPid == callingPid);
}
}
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 3361eaa..1c5d6da 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -92,8 +92,8 @@
virtual void restoreCallingIdentity(int64_t token);
/**
- * If flag::use_context_attribution_source() is enabled, check the calling attribution source
- * and resolve its package name, or fill in the pid/uid/package name if necessary.
+ * If flags::data_delivery_permission_checks() is enabled, check the calling attribution
+ * source and resolve its package name, or fill in the pid/uid/package name if necessary.
*
* @param resolvedAttributionSource The resolved attribution source.
* @param methodName The name of the method calling this function (for logging only).
@@ -263,8 +263,20 @@
binder::Status resolveAttributionSource(AttributionSourceState& resolvedAttributionSource,
const std::string& methodName,
const std::optional<std::string>& cameraIdMaybe) {
- return mAttributionAndPermissionUtils->resolveAttributionSource(resolvedAttributionSource,
- methodName, cameraIdMaybe);
+ std::string passedPackageName;
+ if (resolvedAttributionSource.packageName.has_value()) {
+ passedPackageName = resolvedAttributionSource.packageName.value();
+ }
+ auto ret = mAttributionAndPermissionUtils->resolveAttributionSource(
+ resolvedAttributionSource, methodName, cameraIdMaybe);
+ if (!ret.isOk()) {
+ return ret;
+ }
+ // Fix up package name
+ if (passedPackageName.size() != 0) {
+ resolvedAttributionSource.packageName = std::move(passedPackageName);
+ }
+ return ret;
}
// The word 'System' here does not refer to callers only on the system
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index d937fe9..ee4df4e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -17,6 +17,7 @@
#include <cutils/properties.h>
#include "SessionConfigurationUtils.h"
+#include <android/data_space.h>
#include "../api2/DepthCompositeStream.h"
#include "../api2/HeicCompositeStream.h"
#include "aidl/android/hardware/graphics/common/Dataspace.h"
@@ -40,6 +41,7 @@
namespace android {
namespace camera3 {
+namespace flags = com::android::internal::camera::flags;
void StreamConfiguration::getStreamConfigurations(
const CameraMetadata &staticInfo, int configuration,
@@ -167,11 +169,16 @@
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
const int32_t jpegRSizesTag = getAppropriateModeTag(
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t heicUltraHDRSizesTag = getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
+ bool isHeicUltraHDRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ADATASPACE_HEIF_ULTRAHDR));
camera_metadata_ro_entry streamConfigs =
(isJpegRDataSpace) ? info.find(jpegRSizesTag) :
+ (isHeicUltraHDRDataSpace) ? info.find(heicUltraHDRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -232,6 +239,8 @@
if (dataSpace == static_cast<android_dataspace_t>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
return true;
+ } else if (dataSpace == static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) {
+ return true;
}
return false;
@@ -341,6 +350,9 @@
static_cast<android_dataspace>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+ static_cast<android_dataspace>(ADATASPACE_HEIF_ULTRAHDR)) {
+ format64 = static_cast<int64_t>(HEIC_ULTRAHDR);
}
camera_metadata_ro_entry_t entry =
@@ -1241,6 +1253,14 @@
request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
}
+ if (flags::ae_priority()) {
+ // Fill in CONTROL_AE_PRIORITY_MODE if not available
+ if (!request->exists(ANDROID_CONTROL_AE_PRIORITY_MODE)) {
+ static const uint8_t kDefaultAePriorityMode = ANDROID_CONTROL_AE_PRIORITY_MODE_OFF;
+ request->update(ANDROID_CONTROL_AE_PRIORITY_MODE, &kDefaultAePriorityMode, 1);
+ }
+ }
+
return OK;
}
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 7d344f8..2f4e83a 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,6 +49,12 @@
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
index a61f553..da1c208 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -16,6 +16,7 @@
#include "VirtualCameraCaptureResult.h"
#include <cstdint>
+#include <memory>
#include "VirtualCameraCaptureRequest.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -34,7 +35,7 @@
} // namespace
-CameraMetadata createCaptureResultMetadata(
+std::unique_ptr<CameraMetadata> createCaptureResultMetadata(
const std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
@@ -109,9 +110,9 @@
if (metadata == nullptr) {
ALOGE("%s: Failed to build capture result metadata", __func__);
- return CameraMetadata();
+ return std::make_unique<CameraMetadata>();
}
- return std::move(*metadata);
+ return metadata;
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
index 9e5b4d7..c3978f7 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.h
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -18,21 +18,10 @@
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
#include <chrono>
-#include <cstdint>
#include <cstring>
-#include <future>
#include <memory>
-#include <mutex>
-#include <thread>
-#include <utility>
-#include <vector>
-#include "Exif.h"
-#include "GLES/gl.h"
#include "VirtualCameraCaptureRequest.h"
-#include "VirtualCameraDevice.h"
-#include "VirtualCameraRenderThread.h"
-#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
namespace android {
@@ -41,7 +30,7 @@
// Construct the Metadata for the Capture result based on the request
// settings, timestamp and reported sensore size
-::aidl::android::hardware::camera::device::CameraMetadata
+std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
Resolution reportedSensorSize);
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index becba90..58c6549 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -100,6 +100,9 @@
static constexpr UpdateTextureTask kUpdateTextureTask;
+// The number of nanosecond to wait for the first frame to be drawn on the input surface
+static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
+
NotifyMsg createShutterNotifyMsg(int frameNumber,
std::chrono::nanoseconds timestamp) {
NotifyMsg msg;
@@ -110,11 +113,13 @@
return msg;
}
-NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
+// Create a NotifyMsg for an error case. The default error is ERROR_BUFFER.
+NotifyMsg createErrorNotifyMsg(int frameNumber, int streamId,
+ ErrorCode errorCode = ErrorCode::ERROR_BUFFER) {
NotifyMsg msg;
msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
.errorStreamId = streamId,
- .errorCode = ErrorCode::ERROR_BUFFER});
+ .errorCode = errorCode});
return msg;
}
@@ -421,10 +426,15 @@
}
// Calculate the maximal amount of time we can afford to wait for next frame.
+ const bool isFirstFrameDrawn = mEglSurfaceTexture->isFirstFrameDrawn();
+ ALOGV("First Frame Drawn: %s", isFirstFrameDrawn ? "Yes" : "No");
+
const std::chrono::nanoseconds maxFrameDuration =
- getMaxFrameDuration(request.getRequestSettings());
+ isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
+ : kMaxWaitFirstFrame;
const std::chrono::nanoseconds elapsedDuration =
- timestamp - lastAcquisitionTimestamp;
+ isFirstFrameDrawn ? timestamp - lastAcquisitionTimestamp : 0ns;
+
if (elapsedDuration < maxFrameDuration) {
// We can afford to wait for next frame.
// Note that if there's already new frame in the input Surface, the call
@@ -434,6 +444,17 @@
timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::steady_clock::now().time_since_epoch());
if (!gotNewFrame) {
+ if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
+ // We don't have any input ever drawn. This is considered as an error
+ // case. Notify the framework of the failure and return early.
+ ALOGW("Timed out waiting for first frame to be drawn.");
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(), /* metadata = */ nullptr);
+ notifyTimeout(request, *captureResult);
+ submitCaptureResult(std::move(captureResult));
+ return;
+ }
+
ALOGV(
"%s: No new frame received on input surface after waiting for "
"%" PRIu64 "ns, repeating last frame.",
@@ -457,75 +478,20 @@
captureTimestamp.count(), timestamp.count());
}
- CaptureResult captureResult;
- captureResult.fmqResultSize = 0;
- captureResult.frameNumber = request.getFrameNumber();
- // Partial result needs to be set to 1 when metadata are present.
- captureResult.partialResult = 1;
- captureResult.inputBuffer.streamId = -1;
- captureResult.physicalCameraMetadata.resize(0);
- captureResult.result = createCaptureResultMetadata(
- captureTimestamp, request.getRequestSettings(), mReportedSensorSize);
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(),
+ createCaptureResultMetadata(
+ captureTimestamp, request.getRequestSettings(), mReportedSensorSize));
+ renderOutputBuffers(request, *captureResult);
- const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
- captureResult.outputBuffers.resize(buffers.size());
-
- for (int i = 0; i < buffers.size(); ++i) {
- const CaptureRequestBuffer& reqBuffer = buffers[i];
- StreamBuffer& resBuffer = captureResult.outputBuffers[i];
- resBuffer.streamId = reqBuffer.getStreamId();
- resBuffer.bufferId = reqBuffer.getBufferId();
- resBuffer.status = BufferStatus::OK;
-
- const std::optional<Stream> streamConfig =
- mSessionContext.getStreamConfig(reqBuffer.getStreamId());
-
- if (!streamConfig.has_value()) {
- resBuffer.status = BufferStatus::ERROR;
- continue;
- }
-
- auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(
- reqBuffer.getStreamId(), reqBuffer.getBufferId(),
- captureResult.result, request.getRequestSettings(),
- reqBuffer.getFence())
- : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence());
- if (!status.isOk()) {
- resBuffer.status = BufferStatus::ERROR;
- }
- }
-
- std::vector<NotifyMsg> notifyMsg{
- createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
- for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
- if (resBuffer.status != BufferStatus::OK) {
- notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
- resBuffer.streamId));
- }
- }
-
- auto status = mCameraDeviceCallback->notify(notifyMsg);
+ auto status = notifyShutter(request, *captureResult, captureTimestamp);
if (!status.isOk()) {
ALOGE("%s: notify call failed: %s", __func__,
status.getDescription().c_str());
return;
}
- std::vector<::aidl::android::hardware::camera::device::CaptureResult>
- captureResults(1);
- captureResults[0] = std::move(captureResult);
-
- status = mCameraDeviceCallback->processCaptureResult(captureResults);
- if (!status.isOk()) {
- ALOGE("%s: processCaptureResult call failed: %s", __func__,
- status.getDescription().c_str());
- return;
- }
-
- ALOGV("%s: Successfully called processCaptureResult", __func__);
+ submitCaptureResult(std::move(captureResult));
}
std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
@@ -558,22 +524,124 @@
std::chrono::nanoseconds timeSinceLastFrame) {
std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
- if (surfaceTimestamp.count() < 0 ||
- surfaceTimestamp.count() == lastSurfaceTimestamp) {
- if (lastSurfaceTimestamp > 0) {
- // The timestamps were provided by the producer but we are
- // repeating the last frame, so we increase the previous timestamp by
- // the elapsed time sinced its capture, otherwise the camera framework
- // will discard the frame.
- surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
- timeSinceLastFrame.count());
- }
+ if (lastSurfaceTimestamp > 0 &&
+ surfaceTimestamp.count() <= lastSurfaceTimestamp) {
+ // The timestamps were provided by the producer but we are
+ // repeating the last frame, so we increase the previous timestamp by
+ // the elapsed time sinced its capture, otherwise the camera framework
+ // will discard the frame.
+ surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
+ timeSinceLastFrame.count());
+ ALOGI(
+ "Surface's timestamp is stall. Artificially increasing the surface "
+ "timestamp by %lld",
+ timeSinceLastFrame.count());
}
mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
std::memory_order_relaxed);
return surfaceTimestamp;
}
+std::unique_ptr<CaptureResult> VirtualCameraRenderThread::createCaptureResult(
+ int frameNumber, std::unique_ptr<CameraMetadata> metadata) {
+ std::unique_ptr<CaptureResult> captureResult =
+ std::make_unique<CaptureResult>();
+ captureResult->fmqResultSize = 0;
+ captureResult->frameNumber = frameNumber;
+ // Partial result needs to be set to 1 when metadata are present.
+ captureResult->partialResult = 1;
+ captureResult->inputBuffer.streamId = -1;
+ captureResult->physicalCameraMetadata.resize(0);
+ captureResult->result = metadata != nullptr ? *metadata : CameraMetadata();
+ return captureResult;
+}
+
+void VirtualCameraRenderThread::renderOutputBuffers(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::OK;
+
+ const std::optional<Stream> streamConfig =
+ mSessionContext.getStreamConfig(reqBuffer.getStreamId());
+
+ if (!streamConfig.has_value()) {
+ resBuffer.status = BufferStatus::ERROR;
+ continue;
+ }
+
+ auto status = streamConfig->format == PixelFormat::BLOB
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ captureResult.result, request.getRequestSettings(),
+ reqBuffer.getFence())
+ : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
+ reqBuffer.getBufferId(),
+ reqBuffer.getFence());
+ if (!status.isOk()) {
+ resBuffer.status = BufferStatus::ERROR;
+ }
+ }
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyTimeout(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ std::vector<NotifyMsg> notifyMsgs;
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::ERROR;
+ notifyMsgs.push_back(createErrorNotifyMsg(
+ request.getFrameNumber(), resBuffer.streamId, ErrorCode::ERROR_REQUEST));
+ }
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyShutter(
+ const ProcessCaptureRequestTask& request, const CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp) {
+ std::vector<NotifyMsg> notifyMsgs{
+ createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
+ for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
+ if (resBuffer.status != BufferStatus::OK) {
+ notifyMsgs.push_back(
+ createErrorNotifyMsg(request.getFrameNumber(), resBuffer.streamId));
+ }
+ }
+
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::submitCaptureResult(
+ std::unique_ptr<CaptureResult> captureResult) {
+ std::vector<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResults;
+ captureResults.push_back(std::move(*captureResult));
+
+ ::ndk::ScopedAStatus status =
+ mCameraDeviceCallback->processCaptureResult(captureResults);
+ if (!status.isOk()) {
+ ALOGE("%s: processCaptureResult call failed: %s", __func__,
+ status.getDescription().c_str());
+ return status;
+ }
+
+ ALOGV("%s: Successfully called processCaptureResult", __func__);
+ return status;
+}
+
void VirtualCameraRenderThread::flushCaptureRequest(
const ProcessCaptureRequestTask& request) {
CaptureResult captureResult;
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 1fb4e84..4cad39e 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -19,6 +19,7 @@
#include <atomic>
#include <chrono>
+#include <cstddef>
#include <cstdint>
#include <deque>
#include <future>
@@ -205,6 +206,35 @@
std::chrono::nanoseconds getSurfaceTimestamp(
std::chrono::nanoseconds timeSinceLastFrame);
+ // Build a default capture result object populating the metadata from the request.
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ createCaptureResult(
+ int frameNumber,
+ std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+ metadata);
+
+ // Renders the images from the input surface into the request's buffers.
+ void renderOutputBuffers(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Notify a shutter event for all the buffers in this request.
+ ::ndk::ScopedAStatus notifyShutter(
+ const ProcessCaptureRequestTask& request,
+ const ::aidl::android::hardware::camera::device::CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp);
+
+ // Notify a timeout error for this request. The capture result still needs to
+ // be submitted after this call.
+ ::ndk::ScopedAStatus notifyTimeout(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Submit the capture result to the camera callback.
+ ::ndk::ScopedAStatus submitCaptureResult(
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResult);
+
// Camera callback
const std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index be36ec4..fc469a0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -105,6 +105,10 @@
return std::chrono::nanoseconds(mGlConsumer->getTimestamp());
}
+bool EglSurfaceTexture::isFirstFrameDrawn() {
+ return mGlConsumer->getFrameNumber() > 0;
+}
+
GLuint EglSurfaceTexture::updateTexture() {
int previousFrameId;
int framesAdvance = 0;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index c1f1169..9f75315 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -86,6 +86,9 @@
// set by the most recent call to updateTexture.
std::chrono::nanoseconds getTimestamp();
+ // Returns true is a frame has ever been drawn on this surface.
+ bool isFirstFrameDrawn();
+
private:
#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
sp<IGraphicBufferProducer> mBufferProducer;
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index 5766f1c..3b2db85 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -80,33 +80,40 @@
{
std::vector<std::pair<std::string, std::string>> result;
- // Currently, the device format is EXACTLY
- // (device1, addr1)|(device2, addr2)|...
+ // Currently, the device format is
+ //
+ // devices = device_addr OR device_addr|devices
+ // device_addr = device OR (device, addr)
+ //
+ // EXAMPLE:
+ // device1|(device2, addr2)|...
static constexpr char delim[] = "()|,";
for (auto it = devices.begin(); ; ) {
- auto token = tokenizer(it, devices.end(), delim);
- if (token != "(") return result;
+ std::string address;
+ std::string device = tokenizer(it, devices.end(), delim);
+ if (device.empty()) return result;
+ if (device == "(") { // it is a pair otherwise we consider it a device
+ device = tokenizer(it, devices.end(), delim); // get actual device
+ auto token = tokenizer(it, devices.end(), delim);
+ if (token != ",") return result; // malformed, must have a comma
- auto device = tokenizer(it, devices.end(), delim);
- if (device.empty() || !std::isalnum(device[0])) return result;
-
- token = tokenizer(it, devices.end(), delim);
- if (token != ",") return result;
-
- // special handling here for empty addresses
- auto address = tokenizer(it, devices.end(), delim);
- if (address.empty() || !std::isalnum(device[0])) return result;
- if (address == ")") { // no address, just the ")"
- address.clear();
- } else {
- token = tokenizer(it, devices.end(), delim);
- if (token != ")") return result;
+ // special handling here for empty addresses
+ address = tokenizer(it, devices.end(), delim);
+ if (address.empty()) return result;
+ if (address == ")") { // no address, just the ")"
+ address.clear();
+ } else {
+ token = tokenizer(it, devices.end(), delim);
+ if (token != ")") return result;
+ }
}
+ // misaligned token, device must start alphanumeric.
+ if (!std::isalnum(device[0])) return result;
result.emplace_back(std::move(device), std::move(address));
- token = tokenizer(it, devices.end(), delim);
+ auto token = tokenizer(it, devices.end(), delim);
if (token != "|") return result; // this includes end of string detection
}
}
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index a7684f4..f3933a7 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -963,6 +963,31 @@
ASSERT_EQ("B", devaddr[0].second);
ASSERT_EQ("C", devaddr[1].first);
ASSERT_EQ("D2", devaddr[1].second);
+
+ devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
+ " Z ");
+ ASSERT_EQ((size_t)1, devaddr.size());
+ ASSERT_EQ("Z", devaddr[0].first);
+
+ devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
+ " A | B|C ");
+ ASSERT_EQ((size_t)3, devaddr.size());
+ ASSERT_EQ("A", devaddr[0].first);
+ ASSERT_EQ("", devaddr[0].second);
+ ASSERT_EQ("B", devaddr[1].first);
+ ASSERT_EQ("", devaddr[1].second);
+ ASSERT_EQ("C", devaddr[2].first);
+ ASSERT_EQ("", devaddr[2].second);
+
+ devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
+ " A | (B1, 10) |C ");
+ ASSERT_EQ((size_t)3, devaddr.size());
+ ASSERT_EQ("A", devaddr[0].first);
+ ASSERT_EQ("", devaddr[0].second);
+ ASSERT_EQ("B1", devaddr[1].first);
+ ASSERT_EQ("10", devaddr[1].second);
+ ASSERT_EQ("C", devaddr[2].first);
+ ASSERT_EQ("", devaddr[2].second);
}
TEST(mediametrics_tests, timed_action) {
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index e7d14a0..e49e9e7 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -25,6 +25,7 @@
#include <sstream>
#include <vector>
+#include <system/aaudio/AAudio.h>
#include <utils/Singleton.h>
@@ -195,20 +196,28 @@
? AAudioConvert_inputPresetToAudioSource(params->getInputPreset())
: AUDIO_SOURCE_DEFAULT;
audio_flags_mask_t flags;
+ std::optional<std::string> optTags = {};
if (direction == AAUDIO_DIRECTION_OUTPUT) {
flags = AAudio_computeAudioFlagsMask(
params->getAllowedCapturePolicy(),
params->getSpatializationBehavior(),
params->isContentSpatialized(),
AUDIO_OUTPUT_FLAG_FAST);
+ optTags = params->getTags();
} else {
flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
| AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
}
- return {
+ audio_attributes_t nativeAttributes = {
.content_type = contentType,
.usage = usage,
.source = source,
.flags = flags,
- .tags = "" };
+ .tags = ""
+ };
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(nativeAttributes.tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ nativeAttributes.tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
+ }
+ return nativeAttributes;
}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index d663f37..59bb98e 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -206,12 +206,17 @@
__func__, config->format, config->sample_rate,
config->channel_mask, deviceId);
+ android::DeviceIdVector deviceIds;
+ if (deviceId != AAUDIO_UNSPECIFIED) {
+ deviceIds.push_back(deviceId);
+ }
+
const std::lock_guard<std::mutex> lock(mMmapStreamLock);
const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
&attributes,
config,
mMmapClient,
- &deviceId,
+ &deviceIds,
&sessionId,
this, // callback
mMmapStream,
@@ -228,6 +233,7 @@
config->channel_mask = currentConfig.channel_mask;
return AAUDIO_ERROR_UNAVAILABLE;
}
+ deviceId = android::getFirstDeviceId(deviceIds);
if (deviceId == AAUDIO_UNSPECIFIED) {
ALOGW("%s() - openMmapStream() failed to set deviceId", __func__);
@@ -422,9 +428,17 @@
return AAUDIO_ERROR_NULL;
}
struct audio_mmap_position position;
- const status_t status = mMmapStream->getMmapPosition(&position);
+ status_t status = mMmapStream->getMmapPosition(&position);
ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
__func__, status, position.position_frames, (long long) position.time_nanoseconds);
+ if (status == INVALID_OPERATION) {
+ // The HAL can return INVALID_OPERATION when the position is UNKNOWN.
+ // That can cause SHARED MMAP to break. So coerce it to NOT_ENOUGH_DATA.
+ // That will get converted to AAUDIO_ERROR_UNAVAILABLE.
+ ALOGW("%s(): change INVALID_OPERATION to NOT_ENOUGH_DATA", __func__);
+ status = NOT_ENOUGH_DATA; // see b/376467258
+ }
+
const aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
if (result == AAUDIO_ERROR_UNAVAILABLE) {
ALOGW("%s(): getMmapPosition() has no position data available", __func__);
@@ -476,8 +490,9 @@
}
};
-void AAudioServiceEndpointMMAP::onRoutingChanged(audio_port_handle_t portHandle) {
- const auto deviceId = static_cast<int32_t>(portHandle);
+void AAudioServiceEndpointMMAP::onRoutingChanged(const android::DeviceIdVector& deviceIds) {
+ const auto deviceId = android::getFirstDeviceId(deviceIds);
+ // TODO(b/367816690): Compare the new and saved device sets.
ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
if (getDeviceId() != deviceId) {
if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 962d390..a4eeba1 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -83,7 +83,7 @@
void onVolumeChanged(float volume) override;
- void onRoutingChanged(audio_port_handle_t portHandle) override;
+ void onRoutingChanged(const android::DeviceIdVector& deviceIds) override;
// ------------------------------------------------------------------------------
aaudio_result_t getDownDataDescription(AudioEndpointParcelable* parcelable);
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 67b319f..8200ab5 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -89,6 +89,7 @@
"libaaudio_internal",
"libaudioclient",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 97825b3..8f672e1 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -44,6 +44,7 @@
"libaudioclient",
"libaudioclient_aidl_conversion",
"libaudioflinger",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",