Merge "Remove SCO flag from valid attributes check" into main
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 2d0a364..4e36e01 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -3,6 +3,14 @@
flag {
namespace: "camera_platform"
+ name: "camera_heif_gainmap"
+ is_exported: true
+ description: "Extend HEIC/HEIF still capture with HDR gainmap"
+ bug: "362608343"
+}
+
+flag {
+ namespace: "camera_platform"
name: "camera_hsum_permission"
is_exported: true
description: "Camera access by headless system user"
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 44aac29..15b165f 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -7959,6 +7959,145 @@
ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION =
// int64[4*n]
ACAMERA_HEIC_START + 5,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
+ * Configuring JPEG and HEIC streams at the same time is not supported.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS =
+ // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t)
+ ACAMERA_HEIC_START + 6,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC UltraHDR output formats.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 7,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC UltraHDR streams.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC UltraHDR
+ * streams.</p>
+ * <p>All HEIC output stream formats may have a nonzero stall
+ * duration.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 8,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream) for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS for details.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION =
+ // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t)
+ ACAMERA_HEIC_START + 9,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC UltraHDR output formats for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION =
+ // int64[4*n]
+ ACAMERA_HEIC_START + 10,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC UltraHDR streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION =
+ // int64[4*n]
+ ACAMERA_HEIC_START + 11,
ACAMERA_HEIC_END,
/**
@@ -11526,6 +11665,26 @@
} acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
+// ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_ultra_hdr_stream_configurations {
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT
+ = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t;
+
+// ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution {
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+ = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t;
+
// ACAMERA_AUTOMOTIVE_LOCATION
diff --git a/include/media/MmapStreamCallback.h b/include/media/MmapStreamCallback.h
index 76ee6d7..a3876d9 100644
--- a/include/media/MmapStreamCallback.h
+++ b/include/media/MmapStreamCallback.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_AUDIO_MMAP_STREAM_CALLBACK_H
#define ANDROID_AUDIO_MMAP_STREAM_CALLBACK_H
+#include <media/AudioContainers.h>
#include <system/audio.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -42,10 +43,10 @@
virtual void onVolumeChanged(float volume) = 0;
/**
- * The device the stream is routed to/from has changed
- * \param[in] onRoutingChanged the unique device ID of the new device.
+ * The devices the stream is routed to/from has changed
+ * \param[in] deviceIds a set of the device IDs of the new devices.
*/
- virtual void onRoutingChanged(audio_port_handle_t deviceId) = 0;
+ virtual void onRoutingChanged(const DeviceIdVector& deviceIds) = 0;
protected:
MmapStreamCallback() {}
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index 7725175..3d29335 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -19,6 +19,7 @@
#include <system/audio.h>
#include <media/AudioClient.h>
+#include <media/AudioContainers.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -51,9 +52,10 @@
* Requested parameters as input,
* Actual parameters as output
* \param[in] client a AudioClient struct describing the first client using this stream.
- * \param[in,out] deviceId audio device the stream should preferably be routed to/from
- * Requested as input,
- * Actual as output
+ * \param[in,out] deviceIds audio devices the stream should preferably be routed to/from.
+ * Leave empty if there are no preferred devices.
+ * Requested as input,
+ * Actual as output
* \param[in,out] sessionId audio sessionId for the stream
* Requested as input, may be AUDIO_SESSION_ALLOCATE
* Actual as output
@@ -70,7 +72,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index 16beb28..1e5eafb 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -50,3 +50,22 @@
],
aconfig_declarations: "aconfig_codec_fwk_flags",
}
+
+aconfig_declarations {
+ name: "aconfig_media_swcodec_flags",
+ package: "android.media.swcodec.flags",
+ container: "com.android.media.swcodec",
+ srcs: ["swcodec_flags.aconfig"],
+}
+
+cc_aconfig_library {
+ name: "android.media.swcodec.flags-aconfig-cc",
+ aconfig_declarations: "aconfig_media_swcodec_flags",
+ min_sdk_version: "apex_inherit",
+ vendor_available: true,
+ double_loadable: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ ],
+}
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
new file mode 100644
index 0000000..a435a43
--- /dev/null
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -0,0 +1,14 @@
+# Media SW Codec Flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+package: "android.media.swcodec.flags"
+container: "com.android.media.swcodec"
+
+flag {
+ name: "apv_software_codec"
+ is_exported: true
+ is_fixed_read_only: true
+ namespace: "codec_fwk"
+ description: "Feature flag for APV Software C2 codec"
+ bug: "376770121"
+}
diff --git a/media/audio/aconfig/README.md b/media/audio/aconfig/README.md
index 8ce1259..83370fe 100644
--- a/media/audio/aconfig/README.md
+++ b/media/audio/aconfig/README.md
@@ -126,11 +126,13 @@
### TestApis
-TestApis do not require flagging, since their existence in the tree implies that they should
-be accessible to callers (xTS not building on trunk enables this).
-
+TestApis do not require flagging, unless they are API additions associated with new features.
+For testing existing features, we have full control over the set of callers.
### Api Changes
-Currently, the flag infra does not support any type of Api modification (arguments, annotation,
-renaming, deletion, etc.) In any of these cases (including for SystemApi), exceptions will need to
-be granted.
+There is partial (work ongoing) support for modifying API surfaces.
+ - SystemApi -> public is supported
+ - UAU -> SystemApi is supported, but the @UAU must remain until the flag is in next
+Other modifications involving moving between surfaces, or annotation changes may not be supported:
+check the [FAQ](https://g3doc.corp.google.com/company/teams/android-api-council/guidelines/faq.md?cl=head#i-cannot-use-flaggedapi-with-data-classes-generated-by-codegen)
+for the up to date list of support.
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index c732708..fe53824 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -35,6 +35,13 @@
}
flag {
+ name: "audio_eraser_effect"
+ namespace: "media_audio"
+ description: "Enable audio eraser effect"
+ bug: "367667349"
+}
+
+flag {
name: "bluetooth_mac_address_anonymization"
namespace: "media_audio"
description:
@@ -69,6 +76,22 @@
}
flag {
+ name: "hardening_impl"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for overall implementation of hardening"
+ bug: "376480814"
+}
+
+flag {
+ name: "hardening_strict"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for strict enforcement (deny access) of hardening"
+ bug: "376480814"
+}
+
+flag {
name: "music_fx_edge_to_edge"
namespace: "media_audio"
description: "Enable Edge-to-edge feature for MusicFx and handle insets"
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index c6479d0..9ca38a2 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -120,6 +120,14 @@
}
flag {
+ name: "iamf_definitions_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API definitions for the IAMF format"
+ bug: "337522902"
+}
+
+flag {
name: "loudness_configurator_api"
is_exported: true
namespace: "media_audio"
@@ -197,6 +205,13 @@
bug: "355050846"
}
+flag {
+ name: "speaker_layout_api"
+ namespace: "media_audio"
+ description: "Surface new API method for returning speaker layout channel mask for devices"
+ bug: "337522902"
+}
+
# TODO remove
flag {
name: "volume_ringer_api_hardening"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 32e3ba2..95a8a69 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -2352,6 +2352,15 @@
audio_port_config_device_ext legacy{};
RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
aidl.device, &legacy.type, legacy.address));
+ const bool isInput = false; // speaker_layout_channel_mask only represents output.
+ if (aidl.speakerLayout.has_value()) {
+ legacy.speaker_layout_channel_mask =
+ VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ aidl.speakerLayout.value(), isInput));
+ } else {
+ // Default to none when the field is null in the AIDL.
+ legacy.speaker_layout_channel_mask = AUDIO_CHANNEL_NONE;
+ }
return legacy;
}
@@ -2360,6 +2369,14 @@
AudioPortDeviceExt aidl;
aidl.device = VALUE_OR_RETURN(
legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+ const bool isInput = false; // speaker_layout_channel_mask only represents output.
+ // The AIDL speakerLayout is nullable and if set, can only be a layoutMask.
+ if (audio_channel_mask_is_valid(legacy.speaker_layout_channel_mask) &&
+ audio_channel_mask_get_representation(legacy.speaker_layout_channel_mask) ==
+ AUDIO_CHANNEL_REPRESENTATION_POSITION) {
+ aidl.speakerLayout = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ legacy.speaker_layout_channel_mask, isInput));
+ }
return aidl;
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 483a1ef..f68b506 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1224,7 +1224,7 @@
const AttributionSourceState& attributionSource,
audio_config_t* config,
audio_output_flags_t flags,
- audio_port_handle_t* selectedDeviceId,
+ DeviceIdVector* selectedDeviceIds,
audio_port_handle_t* portId,
std::vector<audio_io_handle_t>* secondaryOutputs,
bool *isSpatialized,
@@ -1239,8 +1239,8 @@
ALOGE("%s NULL output - shouldn't happen", __func__);
return BAD_VALUE;
}
- if (selectedDeviceId == nullptr) {
- ALOGE("%s NULL selectedDeviceId - shouldn't happen", __func__);
+ if (selectedDeviceIds == nullptr) {
+ ALOGE("%s NULL selectedDeviceIds - shouldn't happen", __func__);
return BAD_VALUE;
}
if (portId == nullptr) {
@@ -1262,20 +1262,20 @@
legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
int32_t flagsAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
- int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(*selectedDeviceId));
+ auto selectedDeviceIdsAidl = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<int32_t>>(
+ *selectedDeviceIds, legacy2aidl_audio_port_handle_t_int32_t));
media::GetOutputForAttrResponse responseAidl;
status_t status = statusTFromBinderStatus(
aps->getOutputForAttr(attrAidl, sessionAidl, attributionSource, configAidl, flagsAidl,
- selectedDeviceIdAidl, &responseAidl));
+ selectedDeviceIdsAidl, &responseAidl));
if (status != NO_ERROR) {
config->format = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
+ aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
config->channel_mask = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
- responseAidl.configBase.channelMask, false /*isInput*/));
+ aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ responseAidl.configBase.channelMask, false /*isInput*/));
config->sample_rate = responseAidl.configBase.sampleRate;
return status;
}
@@ -1287,8 +1287,8 @@
*stream = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioStreamType_audio_stream_type_t(responseAidl.stream));
}
- *selectedDeviceId = VALUE_OR_RETURN_STATUS(
- aidl2legacy_int32_t_audio_port_handle_t(responseAidl.selectedDeviceId));
+ *selectedDeviceIds = VALUE_OR_RETURN_STATUS(convertContainer<DeviceIdVector>(
+ responseAidl.selectedDeviceIds, aidl2legacy_int32_t_audio_port_handle_t));
*portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(responseAidl.portId));
*secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index d3975c0..5d066bb 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -26,8 +26,8 @@
/** Interpreted as audio_io_handle_t. */
int output;
AudioStreamType stream;
- /** Interpreted as audio_port_handle_t. */
- int selectedDeviceId;
+ /** Interpreted as audio_port_handle_t[]. */
+ int[] selectedDeviceIds;
/** Interpreted as audio_port_handle_t. */
int portId;
/** Interpreted as audio_io_handle_t[]. */
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 956acce..7f4a7dd 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -94,7 +94,7 @@
in AttributionSourceState attributionSource,
in AudioConfig config,
int /* Bitmask, indexed by AudioOutputFlags */ flags,
- int /* audio_port_handle_t */ selectedDeviceId);
+ in int[] /* audio_port_handle_t */ selectedDeviceIds);
void startOutput(int /* audio_port_handle_t */ portId);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 5565281..fbc7629 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -342,7 +342,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
bool *isSpatialized,
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 2076045..2cb5f09 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -689,6 +689,25 @@
AudioEncapsulationMetadataType::FRAMEWORK_TUNER,
AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR));
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_layoutMask) {
+ AudioPortDeviceExt initial{};
+ initial.speakerLayout = make_ACL_Stereo();
+ auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_null) {
+ const AudioPortDeviceExt initial{}; // speakerLayout is null
+ auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+
class AudioGainModeRoundTripTest : public testing::TestWithParam<AudioGainMode> {};
TEST_P(AudioGainModeRoundTripTest, Aidl2Legacy2Aidl) {
const auto initial = GetParam();
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 6727562..3e2066b 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -130,9 +130,9 @@
return ss.str();
}
-std::string toString(const DeviceIdSet& deviceIds) {
+std::string toString(const DeviceIdVector& deviceIds) {
if (deviceIds.empty()) {
- return "Empty device ids";
+ return "AUDIO_PORT_HANDLE_NONE";
}
std::stringstream ss;
for (auto it = deviceIds.begin(); it != deviceIds.end(); ++it) {
@@ -144,11 +144,11 @@
return ss.str();
}
-audio_port_handle_t getFirstDeviceId(const DeviceIdSet& deviceIds) {
+audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds) {
if (deviceIds.empty()) {
return AUDIO_PORT_HANDLE_NONE;
}
- return *(deviceIds.begin());
+ return deviceIds[0];
}
AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 3673871..8d4665e 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -33,8 +33,8 @@
using FormatSet = std::set<audio_format_t>;
using SampleRateSet = std::set<uint32_t>;
using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
-using DeviceIdSet = std::set<audio_port_handle_t>;
+using DeviceIdVector = std::vector<audio_port_handle_t>;
using FormatVector = std::vector<audio_format_t>;
using AudioProfileAttributesMultimap =
std::multimap<audio_format_t, std::pair<SampleRateSet, ChannelMaskSet>>;
@@ -142,12 +142,12 @@
/**
* Returns human readable string for a set of device ids.
*/
-std::string toString(const DeviceIdSet& deviceIds);
+std::string toString(const DeviceIdVector& deviceIds);
/**
* Returns the first device id of a set of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
*/
-audio_port_handle_t getFirstDeviceId(const DeviceIdSet& deviceIds);
+audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds);
/**
* Create audio profile attributes map by given audio profile array from the range of [first, last).
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index d084f10..92bf35d 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -317,6 +317,7 @@
"server_configurable_flags",
"libaconfig_storage_read_api_cc",
"aconfig_mediacodec_flags_c_lib",
+ "camera_platform_flags_c_lib",
],
static_libs: [
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 3aa0107..1e233cf 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -53,7 +53,9 @@
#include <media/esds/ESDS.h>
#include "include/HevcUtils.h"
+#include <com_android_internal_camera_flags.h>
#include <com_android_media_editing_flags.h>
+namespace editing_flags = com::android::media::editing::flags;
#ifndef __predict_false
#define __predict_false(exp) __builtin_expect((exp) != 0, 0)
@@ -65,6 +67,8 @@
true; \
}))
+namespace flags_camera = com::android::internal::camera::flags;
+
namespace android {
static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
@@ -91,6 +95,8 @@
static const int kTimestampDebugCount = 10;
static const int kItemIdBase = 10000;
static const char kExifHeader[] = {'E', 'x', 'i', 'f', '\0', '\0'};
+static const char kGainmapMetaHeader[] = {'t', 'm', 'a', 'p', '\0', '\0'};
+static const char kGainmapHeader[] = {'g', 'm', 'a', 'p', '\0', '\0'};
static const uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xff, 0xe1};
static const uint8_t kMandatoryHevcNalUnitTypes[3] = {
@@ -160,6 +166,7 @@
bool isAvc() const { return mIsAvc; }
bool isHevc() const { return mIsHevc; }
bool isAv1() const { return mIsAv1; }
+ bool isApv() const { return mIsApv; }
bool isHeic() const { return mIsHeic; }
bool isAvif() const { return mIsAvif; }
bool isHeif() const { return mIsHeif; }
@@ -167,8 +174,11 @@
bool isMPEG4() const { return mIsMPEG4; }
bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
+ bool isGainmapMetaData(MediaBufferBase* buffer, uint32_t* offset) const;
+ bool isGainmapData(MediaBufferBase* buffer, uint32_t* offset) const;
void addChunkOffset(off64_t offset);
- void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
+ void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif,
+ bool isGainmapMeta = false, bool isGainmap = false);
void flushItemRefs();
TrackId& getTrackId() { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
@@ -178,8 +188,11 @@
void resetInternal();
int64_t trackMetaDataSize();
bool isTimestampValid(int64_t timeUs);
+ uint16_t getImageItemId() { return mImageItemId; };
+ uint16_t getGainmapItemId() { return mGainmapItemId; };
+ uint16_t getGainmapMetaItemId() { return mGainmapMetadataItemId; };
-private:
+ private:
// A helper class to handle faster write box with table entries
template<class TYPE, unsigned ENTRY_SIZE>
// ENTRY_SIZE: # of values in each entry
@@ -328,6 +341,7 @@
bool mIsAvc;
bool mIsHevc;
bool mIsAv1;
+ bool mIsApv;
bool mIsDovi;
bool mIsAudio;
bool mIsVideo;
@@ -405,6 +419,7 @@
Vector<uint16_t> mProperties;
ItemRefs mDimgRefs;
+ ItemRefs mGainmapDimgRefs;
Vector<uint16_t> mExifList;
uint16_t mImageItemId;
uint16_t mItemIdBase;
@@ -413,6 +428,10 @@
int32_t mTileWidth, mTileHeight;
int32_t mGridRows, mGridCols;
size_t mNumTiles, mTileIndex;
+ uint16_t mGainmapItemId, mGainmapMetadataItemId;
+ ColorAspects mColorAspects;
+ bool mColorAspectsValid;
+ Vector<uint8_t> mBitsPerChannel;
// Update the audio track's drift information.
void updateDriftTime(const sp<MetaData>& meta);
@@ -479,6 +498,7 @@
void writeAvccBox();
void writeHvccBox();
void writeAv1cBox();
+ void writeApvcBox();
void writeDoviConfigBox();
void writeUrlBox();
void writeDrefBox();
@@ -680,6 +700,9 @@
return "hvc1";
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
return "av01";
+ } else if (editing_flags::muxer_mp4_enable_apv() &&
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+ return "apv1";
}
} else if (!strncasecmp(mime, "application/", 12)) {
return "mett";
@@ -814,6 +837,10 @@
+ 12 // iref box (when empty)
;
+ if (flags_camera::camera_heif_gainmap()) {
+ metaSize += 36; // grpl box (when empty)
+ }
+
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if ((*it)->isHeif()) {
@@ -2213,8 +2240,7 @@
////////////////////////////////////////////////////////////////////////////////
-MPEG4Writer::Track::Track(
- MPEG4Writer *owner, const sp<MediaSource> &source, uint32_t aTrackId)
+MPEG4Writer::Track::Track(MPEG4Writer* owner, const sp<MediaSource>& source, uint32_t aTrackId)
: mOwner(owner),
mMeta(source->getFormat()),
mSource(source),
@@ -2234,7 +2260,7 @@
mStssTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
mSttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
mCttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
- mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
+ mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
mMinCttsOffsetTimeUs(0),
mMinCttsOffsetTicks(0),
mMaxCttsOffsetTicks(0),
@@ -2248,6 +2274,7 @@
mFirstSampleStartOffsetUs(0),
mRotation(0),
mDimgRefs("dimg"),
+ mGainmapDimgRefs("dimg"),
mImageItemId(0),
mItemIdBase(0),
mIsPrimary(0),
@@ -2258,7 +2285,10 @@
mGridRows(0),
mGridCols(0),
mNumTiles(1),
- mTileIndex(0) {
+ mTileIndex(0),
+ mGainmapItemId(0),
+ mGainmapMetadataItemId(0),
+ mColorAspectsValid(false) {
getCodecSpecificDataFromInputFormatIfPossible();
const char *mime;
@@ -2266,6 +2296,7 @@
mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
mIsAv1 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1);
+ mIsApv = editing_flags::muxer_mp4_enable_apv() && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
mIsAudio = !strncasecmp(mime, "audio/", 6);
mIsVideo = !strncasecmp(mime, "video/", 6);
@@ -2446,25 +2477,57 @@
return OK;
}
-bool MPEG4Writer::Track::isExifData(
- MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
+bool MPEG4Writer::Track::isGainmapMetaData(MediaBufferBase* buffer, uint32_t* offset) const {
+ if (!mIsHeif) {
+ return false;
+ }
+
+ // Gainmap metadata block starting with 'tmap\0\0'
+ size_t length = buffer->range_length();
+ uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kGainmapMetaHeader)) &&
+ !memcmp(data, kGainmapMetaHeader, sizeof(kGainmapMetaHeader))) {
+ *offset = sizeof(kGainmapMetaHeader);
+ return true;
+ }
+
+ return false;
+}
+
+bool MPEG4Writer::Track::isGainmapData(MediaBufferBase* buffer, uint32_t* offset) const {
+ if (!mIsHeif) {
+ return false;
+ }
+
+ // Gainmap block starting with 'gmap\0\0'
+ size_t length = buffer->range_length();
+ uint8_t* data = (uint8_t*)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kGainmapHeader)) &&
+ !memcmp(data, kGainmapHeader, sizeof(kGainmapHeader))) {
+ *offset = sizeof(kGainmapHeader);
+ return true;
+ }
+
+ return false;
+}
+
+bool MPEG4Writer::Track::isExifData(MediaBufferBase* buffer, uint32_t* tiffHdrOffset) const {
if (!mIsHeif) {
return false;
}
// Exif block starting with 'Exif\0\0'
size_t length = buffer->range_length();
- uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
- if ((length > sizeof(kExifHeader))
- && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
+ uint8_t* data = (uint8_t*)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kExifHeader)) && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
*tiffHdrOffset = sizeof(kExifHeader);
return true;
}
// Exif block starting with fourcc 'Exif' followed by APP1 marker
- if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader))
- && !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker))
- && !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
+ if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader)) &&
+ !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker)) &&
+ !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
// skip 'Exif' fourcc
buffer->set_range(4, buffer->range_length() - 4);
@@ -2481,7 +2544,8 @@
mCo64TableEntries->add(hton64(offset));
}
-void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
+void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif,
+ bool isGainmapMeta, bool isGainmap) {
CHECK(mIsHeif);
if (offset > UINT32_MAX || size > UINT32_MAX) {
@@ -2510,6 +2574,46 @@
return;
}
+ bool hasGrid = (mTileWidth > 0);
+
+ if (isGainmapMeta && flags_camera::camera_heif_gainmap()) {
+ uint16_t metaItemId;
+ if (mOwner->reserveItemId_l(1, &metaItemId) != OK) {
+ return;
+ }
+
+ Vector<uint16_t> props;
+ if (mColorAspectsValid) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ props.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ props.push_back(mOwner->addProperty_l(property));
+ }
+ props.push_back(mOwner->addProperty_l({
+ .type = FOURCC('i', 's', 'p', 'e'),
+ .width = hasGrid ? mTileWidth : mWidth,
+ .height = hasGrid ? mTileHeight : mHeight,
+ }));
+ mGainmapMetadataItemId = mOwner->addItem_l({
+ .itemType = "tmap",
+ .itemId = metaItemId,
+ .isPrimary = false,
+ .isHidden = false,
+ .offset = (uint32_t)offset,
+ .size = (uint32_t)size,
+ .properties = props,
+ });
+ return;
+ }
+
if (mTileIndex >= mNumTiles) {
ALOGW("Ignoring excess tiles!");
return;
@@ -2524,8 +2628,6 @@
default: break; // don't set if invalid
}
- bool hasGrid = (mTileWidth > 0);
-
if (mProperties.empty()) {
mProperties.push_back(mOwner->addProperty_l({
.type = static_cast<uint32_t>(mIsAvif ?
@@ -2550,7 +2652,7 @@
mTileIndex++;
if (hasGrid) {
- mDimgRefs.value.push_back(mOwner->addItem_l({
+ uint16_t id = mOwner->addItem_l({
.itemType = mIsAvif ? "av01" : "hvc1",
.itemId = mItemIdBase++,
.isPrimary = false,
@@ -2558,7 +2660,12 @@
.offset = (uint32_t)offset,
.size = (uint32_t)size,
.properties = mProperties,
- }));
+ });
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapDimgRefs.value.push_back(id);
+ } else {
+ mDimgRefs.value.push_back(id);
+ }
if (mTileIndex == mNumTiles) {
mProperties.clear();
@@ -2573,28 +2680,71 @@
.rotation = heifRotation,
}));
}
- mImageItemId = mOwner->addItem_l({
- .itemType = "grid",
- .itemId = mItemIdBase++,
- .isPrimary = (mIsPrimary != 0),
- .isHidden = false,
- .rows = (uint32_t)mGridRows,
- .cols = (uint32_t)mGridCols,
- .width = (uint32_t)mWidth,
- .height = (uint32_t)mHeight,
- .properties = mProperties,
+ if (mColorAspectsValid && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty() && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ uint16_t itemId = mOwner->addItem_l({
+ .itemType = "grid",
+ .itemId = mItemIdBase++,
+ .isPrimary = isGainmap && flags_camera::camera_heif_gainmap()
+ ? false
+ : (mIsPrimary != 0),
+ .isHidden = false,
+ .rows = (uint32_t)mGridRows,
+ .cols = (uint32_t)mGridCols,
+ .width = (uint32_t)mWidth,
+ .height = (uint32_t)mHeight,
+ .properties = mProperties,
});
+
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapItemId = itemId;
+ } else {
+ mImageItemId = itemId;
+ }
}
} else {
- mImageItemId = mOwner->addItem_l({
- .itemType = mIsAvif ? "av01" : "hvc1",
- .itemId = mItemIdBase++,
- .isPrimary = (mIsPrimary != 0),
- .isHidden = false,
- .offset = (uint32_t)offset,
- .size = (uint32_t)size,
- .properties = mProperties,
+ if (mColorAspectsValid && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty() && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ uint16_t itemId = mOwner->addItem_l({
+ .itemType = mIsAvif ? "av01" : "hvc1",
+ .itemId = mItemIdBase++,
+ .isPrimary = (isGainmap && flags_camera::camera_heif_gainmap()) ? false
+ : (mIsPrimary != 0),
+ .isHidden = false,
+ .offset = (uint32_t)offset,
+ .size = (uint32_t)size,
+ .properties = mProperties,
});
+
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapItemId = itemId;
+ } else {
+ mImageItemId = itemId;
+ }
}
}
@@ -2619,6 +2769,10 @@
}
}
}
+
+ if ((mGainmapItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ mOwner->addRefs_l(mGainmapItemId, mGainmapDimgRefs);
+ }
}
void MPEG4Writer::Track::setTimeScale() {
@@ -2708,6 +2862,9 @@
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
mMeta->findData(kKeyAV1C, &type, &data, &size);
+ } else if (editing_flags::muxer_mp4_enable_apv() &&
+ !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV)) {
+ mMeta->findData(kKeyAPVC, &type, &data, &size);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
getDolbyVisionProfile();
if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
@@ -3609,7 +3766,7 @@
(const uint8_t *)buffer->data()
+ buffer->range_offset(),
buffer->range_length());
- } else if (mIsMPEG4 || mIsAv1) {
+ } else if (mIsMPEG4 || mIsAv1 || mIsApv) {
err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
buffer->range_length());
}
@@ -3660,19 +3817,68 @@
break;
}
+ bool isGainmapMeta = false;
+ bool isGainmap = false;
bool isExif = false;
uint32_t tiffHdrOffset = 0;
+ uint32_t gainmapOffset = 0;
int32_t isMuxerData;
if (buffer->meta_data().findInt32(kKeyIsMuxerData, &isMuxerData) && isMuxerData) {
- // We only support one type of muxer data, which is Exif data block.
+ if (flags_camera::camera_heif_gainmap()) {
+ isGainmapMeta = isGainmapMetaData(buffer, &gainmapOffset);
+ isGainmap = isGainmapData(buffer, &gainmapOffset);
+ if ((isGainmap || isGainmapMeta) && (gainmapOffset > 0) &&
+ (gainmapOffset < buffer->range_length())) {
+ // Don't include the tmap/gmap header
+ buffer->set_range(gainmapOffset, buffer->range_length() - gainmapOffset);
+ }
+ }
isExif = isExifData(buffer, &tiffHdrOffset);
- if (!isExif) {
- ALOGW("Ignoring bad Exif data block");
+ if (!isExif && !isGainmap && !isGainmapMeta) {
+ ALOGW("Ignoring bad muxer data block");
buffer->release();
buffer = NULL;
continue;
}
}
+ if (flags_camera::camera_heif_gainmap()) {
+ int32_t val32;
+ if (buffer->meta_data().findInt32(kKeyColorPrimaries, &val32)) {
+ mColorAspects.mPrimaries = static_cast<ColorAspects::Primaries>(val32);
+ mColorAspectsValid = true;
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyTransferFunction, &val32)) {
+ mColorAspects.mTransfer = static_cast<ColorAspects::Transfer>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyColorMatrix, &val32)) {
+ mColorAspects.mMatrixCoeffs = static_cast<ColorAspects::MatrixCoeffs>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyColorRange, &val32)) {
+ mColorAspects.mRange = static_cast<ColorAspects::Range>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (mBitsPerChannel.empty() && buffer->meta_data().findInt32(kKeyColorFormat, &val32)) {
+ switch (val32) {
+ case COLOR_FormatYUV420Flexible:
+ case COLOR_FormatYUV420Planar:
+ case COLOR_FormatYUV420SemiPlanar: {
+ uint8_t bitsPerChannel[] = {8, 8, 8};
+ mBitsPerChannel.appendArray(bitsPerChannel, sizeof(bitsPerChannel));
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
if (!buffer->meta_data().findInt64(kKeySampleFileOffset, &sampleFileOffset)) {
sampleFileOffset = -1;
}
@@ -3698,7 +3904,7 @@
// Make a deep copy of the MediaBuffer and Metadata and release
// the original as soon as we can
- MediaBuffer *copy = new MediaBuffer(buffer->range_length());
+ MediaBuffer* copy = new MediaBuffer(buffer->range_length());
if (sampleFileOffset != -1) {
copy->meta_data().setInt64(kKeySampleFileOffset, sampleFileOffset);
} else {
@@ -3995,13 +4201,13 @@
trackProgressStatus(timestampUs);
}
}
- if (!hasMultipleTracks) {
+ if (!hasMultipleTracks || isGainmapMeta || isGainmap) {
size_t bytesWritten;
off64_t offset = mOwner->addSample_l(
copy, usePrefix, tiffHdrOffset, &bytesWritten);
if (mIsHeif) {
- addItemOffsetAndSize(offset, bytesWritten, isExif);
+ addItemOffsetAndSize(offset, bytesWritten, isExif, isGainmapMeta, isGainmap);
} else {
if (mCo64TableEntries->count() == 0) {
addChunkOffset(offset);
@@ -4304,6 +4510,15 @@
increase += 9; // 'irot' property (worst case)
}
+ if (flags_camera::camera_heif_gainmap()) {
+ // assume we have HDR gainmap and associated metadata
+ increase += (8 + mCodecSpecificDataSize) // 'hvcC' property (HDR gainmap)
+ + (2 * 20) // 'ispe' property
+ + (2 * 16) // 'pixi' property
+ + (2 * 19) // 'colr' property
+ ;
+ }
+
// increase to iref and idat
if (grid) {
increase += (12 + mNumTiles * 2) // 'dimg' in iref
@@ -4317,6 +4532,12 @@
+ 21) // increase to 'iinf'
* (mNumTiles + grid + 1); // "+1" is for 'Exif'
+ if (flags_camera::camera_heif_gainmap()) {
+ increase += (16 // increase to 'iloc'
+ + 21) // increase to 'iinf'
+ * 2; // "2" is for 'tmap', 'gmap'
+ }
+
// When total # of properties is > 127, the properties id becomes 2-byte.
// We write 4 properties at most for each image (2x'ispe', 1x'hvcC', 1x'irot').
// Set the threshold to be 30.
@@ -4338,6 +4559,7 @@
!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
+ (editing_flags::muxer_mp4_enable_apv() && !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
@@ -4512,6 +4734,9 @@
writeHvccBox();
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
writeAv1cBox();
+ } else if (editing_flags::muxer_mp4_enable_apv() &&
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+ writeApvcBox();
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
if (mDoviProfile <= DolbyVisionProfileDvheSt) {
writeHvccBox();
@@ -5103,6 +5328,15 @@
mOwner->endBox(); // av1C
}
+void MPEG4Writer::Track::writeApvcBox() {
+ CHECK(mCodecSpecificData);
+ CHECK_GE(mCodecSpecificDataSize, 4u);
+
+ mOwner->beginBox("apvC");
+ mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+ mOwner->endBox(); // apvC
+}
+
void MPEG4Writer::Track::writeDoviConfigBox() {
CHECK_NE(mDoviProfile, 0u);
@@ -5475,6 +5709,21 @@
endBox();
}
+void MPEG4Writer::writeGrplBox(const Vector<uint16_t> &items) {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("grpl");
+ beginBox("altr");
+ writeInt32(0); // Version = 0, Flags = 0
+ writeInt32(1); // Group Id
+ writeInt32(items.size());// Number of entities
+ for (size_t i = 0; i < items.size(); i++) {
+ writeInt32(items[i]);// Item Id
+ }
+ endBox();
+ endBox();
+ }
+}
+
void MPEG4Writer::writeIpcoBox() {
beginBox("ipco");
size_t numProperties = mProperties.size();
@@ -5520,6 +5769,32 @@
endBox();
break;
}
+ case FOURCC('c', 'o', 'l', 'r'):
+ {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("colr");
+ writeFourcc("nclx");
+ writeInt16(mProperties[propIndex].colorPrimaries);
+ writeInt16(mProperties[propIndex].colorTransfer);
+ writeInt16(mProperties[propIndex].colorMatrix);
+ writeInt8(int8_t(mProperties[propIndex].colorRange ? 0x80 : 0x0));
+ endBox();
+ }
+ break;
+ }
+ case FOURCC('p', 'i', 'x', 'i'):
+ {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("pixi");
+ writeInt32(0); // Version = 0, Flags = 0
+ writeInt8(mProperties[propIndex].bitsPerChannel.size()); // Number of channels
+ for (size_t i = 0; i < mProperties[propIndex].bitsPerChannel.size(); i++) {
+ writeInt8(mProperties[propIndex].bitsPerChannel[i]); // Channel bit depth
+ }
+ endBox();
+ }
+ break;
+ }
default:
ALOGW("Skipping unrecognized property: type 0x%08x",
mProperties[propIndex].type);
@@ -5574,6 +5849,12 @@
for (auto it = mItems.begin(); it != mItems.end(); it++) {
ItemInfo &item = it->second;
+ if (item.isGainmapMeta() && !item.properties.empty() &&
+ flags_camera::camera_heif_gainmap()) {
+ mAssociationEntryCount++;
+ continue;
+ }
+
if (!item.isImage()) continue;
if (item.isPrimary) {
@@ -5605,11 +5886,27 @@
}
}
+ uint16_t gainmapItemId = 0;
+ uint16_t gainmapMetaItemId = 0;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if ((*it)->isHeif()) {
(*it)->flushItemRefs();
}
+ if (flags_camera::camera_heif_gainmap()) {
+ if ((*it)->getGainmapItemId() > 0) {
+ gainmapItemId = (*it)->getGainmapItemId();
+ }
+ if ((*it)->getGainmapMetaItemId() > 0) {
+ gainmapMetaItemId = (*it)->getGainmapMetaItemId();
+ }
+ }
+ }
+ if ((gainmapItemId > 0) && (gainmapMetaItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ ItemRefs gainmapRefs("dimg");
+ gainmapRefs.value.push_back(mPrimaryItemId);
+ gainmapRefs.value.push_back(gainmapItemId);
+ addRefs_l(gainmapMetaItemId, gainmapRefs);
}
beginBox("meta");
@@ -5625,6 +5922,12 @@
if (mHasRefs) {
writeIrefBox();
}
+ if ((gainmapItemId > 0) && (gainmapMetaItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ Vector<uint16_t> itemIds;
+ itemIds.push_back(gainmapMetaItemId);
+ itemIds.push_back(mPrimaryItemId);
+ writeGrplBox(itemIds);
+ }
endBox();
}
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 1008445..96e399b 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -19,6 +19,8 @@
#include "webm/WebmWriter.h"
+#include <com_android_internal_camera_flags.h>
+
#include <utils/Log.h>
#include <media/stagefright/MediaMuxer.h>
@@ -38,6 +40,8 @@
#include <media/stagefright/OggWriter.h>
#include <media/stagefright/Utils.h>
+namespace flags_camera = com::android::internal::camera::flags;
+
namespace android {
static bool isMp4Format(MediaMuxer::OutputFormat format) {
@@ -270,6 +274,25 @@
sampleMetaData.setInt64(kKeyLastSampleIndexInChunk, val64);
}
+ if (flags_camera::camera_heif_gainmap()) {
+ int32_t val32;
+ if (bufMeta->findInt32("color-primaries", &val32)) {
+ sampleMetaData.setInt32(kKeyColorPrimaries, val32);
+ }
+ if (bufMeta->findInt32("color-transfer", &val32)) {
+ sampleMetaData.setInt32(kKeyTransferFunction, val32);
+ }
+ if (bufMeta->findInt32("color-matrix", &val32)) {
+ sampleMetaData.setInt32(kKeyColorMatrix, val32);
+ }
+ if (bufMeta->findInt32("color-range", &val32)) {
+ sampleMetaData.setInt32(kKeyColorRange, val32);
+ }
+ if (bufMeta->findInt32(KEY_COLOR_FORMAT, &val32)) {
+ sampleMetaData.setInt32(kKeyColorFormat, val32);
+ }
+ }
+
sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
// This pushBuffer will wait until the mediaBuffer is consumed.
return currentTrack->pushBuffer(mediaBuffer);
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index ee75129..a409e46 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -200,6 +200,9 @@
bool isImage() const {
return !strcmp("hvc1", itemType) || !strcmp("av01", itemType) || isGrid();
}
+ bool isGainmapMeta() const {
+ return !strcmp("tmap", itemType);
+ }
const char *itemType;
uint16_t itemId;
bool isPrimary;
@@ -227,6 +230,11 @@
int32_t width;
int32_t height;
int32_t rotation;
+ int32_t colorPrimaries;
+ int32_t colorTransfer;
+ int32_t colorMatrix;
+ bool colorRange;
+ Vector<uint8_t> bitsPerChannel;
sp<ABuffer> data;
} ItemProperty;
@@ -347,6 +355,7 @@
void writeIdatBox();
void writeIrefBox();
void writePitmBox();
+ void writeGrplBox(const Vector<uint16_t> &items);
void writeFileLevelMetaBox();
void sendSessionSummary();
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index a7d2eb9..9dce55b 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -63,6 +63,7 @@
kKeyDVVC = 'dvvc', // raw data
kKeyDVWC = 'dvwc', // raw data
kKeyAV1C = 'av1c', // raw data
+ kKeyAPVC = 'apvc', // raw data
kKeyThumbnailHVCC = 'thvc', // raw data
kKeyThumbnailAV1C = 'tav1', // raw data
kKeyD263 = 'd263', // raw data
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index 840c6b3c..483175c 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -47,6 +47,7 @@
"libcutils",
"libutils",
"server_configurable_flags",
+ "camera_platform_flags_c_lib",
],
}
diff --git a/media/module/codecs/amrnb/common/Android.bp b/media/module/codecs/amrnb/common/Android.bp
index 0bc6ed2..35937cb 100644
--- a/media/module/codecs/amrnb/common/Android.bp
+++ b/media/module/codecs/amrnb/common/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
default_applicable_licenses: [
"frameworks_av_media_codecs_amrnb_common_license",
],
@@ -42,8 +43,8 @@
"src/gains_tbl.cpp",
"src/gc_pred.cpp",
"src/gmed_n.cpp",
- "src/grid_tbl.cpp",
"src/gray_tbl.cpp",
+ "src/grid_tbl.cpp",
"src/int_lpc.cpp",
"src/inv_sqrt.cpp",
"src/inv_sqrt_tbl.cpp",
@@ -91,9 +92,9 @@
export_include_dirs: ["include"],
cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
- "-DOSCL_IMPORT_REF=",
"-DOSCL_EXPORT_REF=",
+ "-DOSCL_IMPORT_REF=",
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
"-Werror",
],
diff --git a/media/module/codecs/amrnb/dec/Android.bp b/media/module/codecs/amrnb/dec/Android.bp
index 70741d2..a28500a 100644
--- a/media/module/codecs/amrnb/dec/Android.bp
+++ b/media/module/codecs/amrnb/dec/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
default_applicable_licenses: [
"frameworks_av_media_codecs_amrnb_dec_license",
],
@@ -47,12 +48,12 @@
"src/b_cn_cod.cpp",
"src/bgnscd.cpp",
"src/c_g_aver.cpp",
- "src/d1035pf.cpp",
- "src/d2_11pf.cpp",
"src/d2_9pf.cpp",
+ "src/d2_11pf.cpp",
"src/d3_14pf.cpp",
"src/d4_17pf.cpp",
"src/d8_31pf.cpp",
+ "src/d1035pf.cpp",
"src/d_gain_c.cpp",
"src/d_gain_p.cpp",
"src/d_plsf.cpp",
@@ -81,8 +82,8 @@
export_include_dirs: ["src"],
cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
"-DOSCL_IMPORT_REF=",
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
"-Werror",
],
@@ -94,8 +95,8 @@
//},
shared_libs: [
- "libstagefright_amrnb_common",
"liblog",
+ "libstagefright_amrnb_common",
],
target: {
@@ -113,19 +114,22 @@
srcs: ["test/amrnbdec_test.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
local_include_dirs: ["src"],
static_libs: [
- "libstagefright_amrnbdec",
"libsndfile",
+ "libstagefright_amrnbdec",
],
shared_libs: [
- "libstagefright_amrnb_common",
"libaudioutils",
"liblog",
+ "libstagefright_amrnb_common",
],
target: {
diff --git a/media/module/codecs/amrnb/enc/Android.bp b/media/module/codecs/amrnb/enc/Android.bp
index 3c6566e..13bb29c 100644
--- a/media/module/codecs/amrnb/enc/Android.bp
+++ b/media/module/codecs/amrnb/enc/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
default_applicable_licenses: [
"frameworks_av_media_codecs_amrnb_enc_license",
],
@@ -42,12 +43,12 @@
srcs: [
"src/amrencode.cpp",
"src/autocorr.cpp",
- "src/c1035pf.cpp",
- "src/c2_11pf.cpp",
"src/c2_9pf.cpp",
+ "src/c2_11pf.cpp",
"src/c3_14pf.cpp",
"src/c4_17pf.cpp",
"src/c8_31pf.cpp",
+ "src/c1035pf.cpp",
"src/calc_cor.cpp",
"src/calc_en.cpp",
"src/cbsearch.cpp",
@@ -132,7 +133,10 @@
srcs: ["test/amrnb_enc_test.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
local_include_dirs: ["src"],
diff --git a/media/module/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
index bcbcee2..1b2ec87 100644
--- a/media/module/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -19,6 +19,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_enc_license"
@@ -39,8 +40,8 @@
static_libs: [
"liblog",
- "libstagefright_amrnbenc",
"libstagefright_amrnb_common",
+ "libstagefright_amrnbenc",
],
fuzz_config: {
diff --git a/media/module/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
index 3f29267..c5cbbe2 100644
--- a/media/module/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -19,6 +19,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -34,9 +35,9 @@
"amrnb_dec_fuzzer.cpp",
],
static_libs: [
- "libstagefright_amrnbdec",
- "libstagefright_amrnb_common",
"liblog",
+ "libstagefright_amrnb_common",
+ "libstagefright_amrnbdec",
],
target: {
darwin: {
diff --git a/media/module/extractors/Android.bp b/media/module/extractors/Android.bp
index e29d3e6..cbaabe3 100644
--- a/media/module/extractors/Android.bp
+++ b/media/module/extractors/Android.bp
@@ -81,6 +81,12 @@
srcs: ["extractor.aconfig"],
}
+java_aconfig_library {
+ name: "android.media.extractor.flags-aconfig-java",
+ aconfig_declarations: "android.media.extractor.flags-aconfig",
+ defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
cc_aconfig_library {
name: "android.media.extractor.flags-aconfig-cc",
aconfig_declarations: "android.media.extractor.flags-aconfig",
diff --git a/media/module/extractors/extractor.aconfig b/media/module/extractors/extractor.aconfig
index 7bf8bc1..a7d3397 100644
--- a/media/module/extractors/extractor.aconfig
+++ b/media/module/extractors/extractor.aconfig
@@ -12,3 +12,12 @@
description: "Enable SniffMidi optimizations."
bug: "359920208"
}
+
+flag {
+ name: "extractor_mp4_enable_apv"
+ is_exported: true
+ is_fixed_read_only: true
+ namespace: "media_solutions"
+ description: "Enable APV support in mp4 extractor."
+ bug: "370061501"
+}
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 3da1589..f3da389 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -129,12 +129,18 @@
],
static_libs: [
+ "android.media.extractor.flags-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libstagefright_id3",
"libstagefright_esds",
"libmp4extractor",
"libstagefright_metadatautils",
],
+ shared_libs: [
+ "server_configurable_flags",
+ ],
+
dictionary: "mp4_extractor_fuzzer.dict",
corpus: ["corpus_mp4/*"],
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index 8072002..effd24a 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -42,12 +42,18 @@
],
static_libs: [
+ "android.media.extractor.flags-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libstagefright_esds",
"libstagefright_foundation",
"libstagefright_id3",
"libutils",
],
+ shared_libs: [
+ "server_configurable_flags",
+ ],
+
host_supported: true,
target: {
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 12c0aaf..f062491 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -33,6 +33,7 @@
#include "SampleTable.h"
#include "ItemTable.h"
+#include <com_android_media_extractor_flags.h>
#include <media/esds/ESDS.h>
#include <ID3.h>
#include <media/stagefright/DataSourceBase.h>
@@ -147,6 +148,7 @@
bool mIsAVC;
bool mIsHEVC;
+ bool mIsAPV;
bool mIsDolbyVision;
bool mIsAC4;
bool mIsMpegH = false;
@@ -366,6 +368,13 @@
case FOURCC("hev1"):
return MEDIA_MIMETYPE_VIDEO_HEVC;
+ case FOURCC("apv1"):
+ if (!com::android::media::extractor::flags::extractor_mp4_enable_apv()) {
+ ALOGV("APV support not enabled");
+ return "application/octet-stream";
+ }
+ return MEDIA_MIMETYPE_VIDEO_APV;
+
case FOURCC("dvav"):
case FOURCC("dva1"):
case FOURCC("dvhe"):
@@ -2106,6 +2115,7 @@
case FOURCC("dav1"):
case FOURCC("av01"):
case FOURCC("vp09"):
+ case FOURCC("apv1"):
{
uint8_t buffer[78];
if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -2623,8 +2633,16 @@
break;
}
+ case FOURCC("apvC"):
case FOURCC("av1C"):
{
+ if (!com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ chunk_type == FOURCC("apvC")) {
+ ALOGV("APV support not enabled");
+ *offset += chunk_size;
+ break;
+ }
+
auto buffer = heapbuffer<uint8_t>(chunk_data_size);
if (buffer.get() == NULL) {
@@ -5145,6 +5163,7 @@
mCurrentSampleInfoOffsets(NULL),
mIsAVC(false),
mIsHEVC(false),
+ mIsAPV(false),
mIsDolbyVision(false),
mIsAC4(false),
mIsPcm(false),
@@ -5187,6 +5206,8 @@
mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+ mIsAPV = com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index d6e79c7..5f0f4fa 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -21,6 +21,7 @@
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["frameworks_av_license"],
+ default_team: "trendy_team_android_media_solutions_playback",
}
cc_test {
@@ -31,6 +32,8 @@
srcs: ["ExtractorUnitTest.cpp"],
static_libs: [
+ "android.media.extractor.flags-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libaacextractor",
"libamrextractor",
"libmp3extractor",
@@ -77,6 +80,7 @@
"libhidlmemory",
"libhidlbase",
"libbase",
+ "server_configurable_flags",
],
compile_multilib: "first",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index a1a0634..2322780 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -541,7 +541,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -553,7 +553,7 @@
status_t ret = NO_INIT;
if (af != 0) {
ret = af->openMmapStream(
- direction, attr, config, client, deviceId,
+ direction, attr, config, client, deviceIds,
sessionId, callback, interface, handle);
}
return ret;
@@ -563,7 +563,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -636,7 +636,8 @@
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
- deviceId, &portId, &secondaryOutputs, &isSpatialized,
+ deviceIds, &portId, &secondaryOutputs,
+ &isSpatialized,
&isBitPerfect,
&volume,
&muted);
@@ -648,12 +649,17 @@
ALOGW_IF(!secondaryOutputs.empty(),
"%s does not support secondary outputs, ignoring them", __func__);
} else {
+ audio_port_handle_t deviceId = getFirstDeviceId(*deviceIds);
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
actualSessionId,
adjAttributionSource,
config,
- AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
+ AUDIO_INPUT_FLAG_MMAP_NOIRQ, &deviceId, &portId);
+ deviceIds->clear();
+ if (deviceId != AUDIO_PORT_HANDLE_NONE) {
+ deviceIds->push_back(deviceId);
+ }
}
if (ret != NO_ERROR) {
return ret;
@@ -667,7 +673,7 @@
const sp<IAfMmapThread> thread = mMmapThreads.valueFor(io);
if (thread != 0) {
interface = IAfMmapThread::createMmapStreamInterfaceAdapter(thread);
- thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceId, portId);
+ thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceIds, portId);
*handle = portId;
*sessionId = actualSessionId;
config->sample_rate = thread->sampleRate();
@@ -1166,6 +1172,7 @@
adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
}
+ DeviceIdVector selectedDeviceIds;
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
@@ -1176,11 +1183,14 @@
output.sessionId = sessionId;
output.outputId = AUDIO_IO_HANDLE_NONE;
- output.selectedDeviceId = input.selectedDeviceId;
+ if (input.selectedDeviceId != AUDIO_PORT_HANDLE_NONE) {
+ selectedDeviceIds.push_back(input.selectedDeviceId);
+ }
lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
adjAttributionSource, &input.config, input.flags,
- &output.selectedDeviceId, &portId, &secondaryOutputs,
+ &selectedDeviceIds, &portId, &secondaryOutputs,
&isSpatialized, &isBitPerfect, &volume, &muted);
+ output.selectedDeviceId = getFirstDeviceId(selectedDeviceIds);
if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 042194f..133410e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -439,7 +439,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index a13819c..3163d4c 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -661,7 +661,7 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) EXCLUDES_ThreadBase_Mutex = 0;
virtual void disconnect() EXCLUDES_ThreadBase_Mutex = 0;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 91c82a2..e42b39e 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -10397,13 +10397,13 @@
audio_stream_type_t streamType __unused,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId)
{
mAttr = *attr;
mSessionId = sessionId;
mCallback = callback;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
mPortId = portId;
}
@@ -10496,7 +10496,7 @@
audio_stream_type_t stream = streamType_l();
audio_output_flags_t flags =
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
- audio_port_handle_t deviceId = mDeviceId;
+ DeviceIdVector deviceIds = mDeviceIds;
std::vector<audio_io_handle_t> secondaryOutputs;
bool isSpatialized;
bool isBitPerfect;
@@ -10507,7 +10507,7 @@
adjAttributionSource,
&config,
flags,
- &deviceId,
+ &deviceIds,
&portId,
&secondaryOutputs,
&isSpatialized,
@@ -10523,7 +10523,7 @@
config.sample_rate = mSampleRate;
config.channel_mask = mChannelMask;
config.format = mFormat;
- audio_port_handle_t deviceId = mDeviceId;
+ audio_port_handle_t deviceId = getFirstDeviceId(mDeviceIds);
mutex().unlock();
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
@@ -10876,7 +10876,7 @@
// store new device and send to effects
audio_devices_t type = AUDIO_DEVICE_NONE;
- audio_port_handle_t deviceId;
+ DeviceIdVector deviceIds;
AudioDeviceTypeAddrVector sinkDeviceTypeAddrs;
AudioDeviceTypeAddr sourceDeviceTypeAddr;
uint32_t numDevices = 0;
@@ -10890,12 +10890,12 @@
type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
sinkDeviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
patch->sinks[i].ext.device.address);
+ deviceIds.push_back(patch->sinks[i].id);
}
- deviceId = patch->sinks[0].id;
numDevices = mPatch.num_sinks;
} else {
type = patch->sources[0].ext.device.type;
- deviceId = patch->sources[0].id;
+ deviceIds.push_back(patch->sources[0].id);
numDevices = mPatch.num_sources;
sourceDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
sourceDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
@@ -10921,11 +10921,11 @@
// For mmap streams, once the routing has changed, they will be disconnected. It should be
// okay to notify the client earlier before the new patch creation.
- if (mDeviceId != deviceId) {
+ if (mDeviceIds != deviceIds) {
if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
// The aaudioservice handle the routing changed event asynchronously. In that case,
// it is safe to hold the lock here.
- callback->onRoutingChanged(deviceId);
+ callback->onRoutingChanged(deviceIds);
}
}
@@ -10945,7 +10945,7 @@
*handle = AUDIO_PATCH_HANDLE_NONE;
}
- if (numDevices == 0 || mDeviceId != deviceId) {
+ if (numDevices == 0 || mDeviceIds != deviceIds) {
if (isOutput()) {
sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
mOutDeviceTypeAddrs = sinkDeviceTypeAddrs;
@@ -10955,7 +10955,7 @@
mInDeviceTypeAddr = sourceDeviceTypeAddr;
}
mPatch = *patch;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
}
// Force meteadata update after a route change
mActiveTracks.setHasChanged();
@@ -11110,7 +11110,8 @@
if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
// The aaudioservice handle the routing changed event asynchronously. In that case,
// it is safe to hold the lock here.
- callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ DeviceIdVector emptyDeviceIdVector;
+ callback->onRoutingChanged(emptyDeviceIdVector);
} else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
@@ -11202,11 +11203,11 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId)
{
audio_utils::lock_guard l(mutex());
- MmapThread::configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+ MmapThread::configure_l(attr, streamType, sessionId, callback, deviceIds, portId);
mStreamType = streamType;
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 0c5a2c3..1d6e244 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -2243,17 +2243,17 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) override EXCLUDES_ThreadBase_Mutex {
audio_utils::lock_guard l(mutex());
- configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+ configure_l(attr, streamType, sessionId, callback, deviceIds, portId);
}
void configure_l(const audio_attributes_t* attr,
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) REQUIRES(mutex());
void disconnect() final EXCLUDES_ThreadBase_Mutex;
@@ -2363,9 +2363,9 @@
void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
/**
- * @brief mDeviceId current device port unique identifier
+ * @brief mDeviceIds current device port unique identifiers
*/
- audio_port_handle_t mDeviceId GUARDED_BY(mutex()) = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mDeviceIds GUARDED_BY(mutex());
audio_attributes_t mAttr GUARDED_BY(mutex());
audio_session_t mSessionId GUARDED_BY(mutex());
@@ -2397,7 +2397,7 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) final EXCLUDES_ThreadBase_Mutex;
AudioStreamOut* clearOutput() final EXCLUDES_ThreadBase_Mutex;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index c047a89..8dd247a 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -144,7 +144,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index a5f37b0..1c6248a 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -198,7 +198,7 @@
virtual ~AudioPolicyManagerFuzzer() = default;
virtual bool initialize();
virtual void SetUpManagerConfig();
- bool getOutputForAttr(audio_port_handle_t *selectedDeviceId, audio_format_t format,
+ bool getOutputForAttr(DeviceIdVector *selectedDeviceIds, audio_format_t format,
audio_channel_mask_t channelMask, int sampleRate,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_io_handle_t *output = nullptr,
@@ -248,7 +248,7 @@
void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mConfig->setDefault(); }
bool AudioPolicyManagerFuzzer::getOutputForAttr(
- audio_port_handle_t *selectedDeviceId, audio_format_t format, audio_channel_mask_t channelMask,
+ DeviceIdVector *selectedDeviceIds, audio_format_t format, audio_channel_mask_t channelMask,
int sampleRate, audio_output_flags_t flags, audio_io_handle_t *output,
audio_port_handle_t *portId, audio_attributes_t attr) {
audio_io_handle_t localOutput;
@@ -273,7 +273,7 @@
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
- &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+ &config, &flags, selectedDeviceIds, portId, {}, &outputType, &isSpatialized,
&isBitPerfect, &volume, &muted) != OK) {
return false;
}
@@ -726,8 +726,8 @@
std::string tags(mFdp->ConsumeBool() ? "" : "addr=remote_submix_media");
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&playbackRoutedPortId, mAudioConfig.format, mAudioConfig.channel_mask,
+ DeviceIdVector playbackRoutedPortIds;
+ getOutputForAttr(&playbackRoutedPortIds, mAudioConfig.format, mAudioConfig.channel_mask,
mAudioConfig.sample_rate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/,
nullptr /*portId*/, attr);
}
@@ -807,13 +807,13 @@
findDevicePort(AUDIO_PORT_ROLE_SINK, getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
mMixAddress, &injectionPort);
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_usage_t usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getOutputForAttr(&selectedDeviceId, mAudioConfig.format, mAudioConfig.channel_mask,
+ getOutputForAttr(&selectedDeviceIds, mAudioConfig.format, mAudioConfig.channel_mask,
mAudioConfig.sample_rate /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE,
nullptr /*output*/, &mPortId, attr);
ret = mManager->startOutput(mPortId);
@@ -903,15 +903,17 @@
audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
findDevicePort(role, type, address, &devicePort);
- audio_port_handle_t routedPortId = devicePort.id;
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
- getOutputForAttr(&routedPortId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+ DeviceIdVector routedPortIds = { devicePort.id };
+ getOutputForAttr(&routedPortIds,
+ getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
getValueFromVector<uint32_t>(mFdp, kSamplingRates),
AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
+ audio_port_handle_t routedPortId = devicePort.id;
getInputForAttr({}, tracker.getRiid(), &routedPortId,
getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelInMasks),
@@ -984,10 +986,10 @@
if (ret != NO_ERROR) {
return;
}
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+ getOutputForAttr(&selectedDeviceIds, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
getValueFromVector<uint32_t>(mFdp, kSamplingRates), flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 522451f..7667571 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1262,7 +1262,7 @@
uid_t uid,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
@@ -1270,7 +1270,8 @@
bool *isBitPerfect)
{
DeviceVector outputDevices;
- const audio_port_handle_t requestedPortId = *selectedDeviceId;
+ audio_port_handle_t requestedPortId = getFirstDeviceId(*selectedDeviceIds);
+ selectedDeviceIds->clear();
DeviceVector msdDevices = getMsdAudioOutDevices();
const sp<DeviceDescriptor> requestedDevice =
mAvailableOutputDevices.getDeviceFromId(requestedPortId);
@@ -1347,8 +1348,9 @@
if (policyDesc != nullptr) {
policyDesc->mPolicyMix = primaryMix;
*output = policyDesc->mIoHandle;
- *selectedDeviceId = policyMixDevice != nullptr ? policyMixDevice->getId()
- : AUDIO_PORT_HANDLE_NONE;
+ if (policyMixDevice != nullptr) {
+ selectedDeviceIds->push_back(policyMixDevice->getId());
+ }
if ((policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != AUDIO_OUTPUT_FLAG_DIRECT) {
// Remove direct flag as it is not on a direct output.
*flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1485,11 +1487,13 @@
return INVALID_OPERATION;
}
- *selectedDeviceId = getFirstDeviceId(outputDevices);
for (auto &outputDevice : outputDevices) {
- if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
- *selectedDeviceId = outputDevice->getId();
- break;
+ if (std::find(selectedDeviceIds->begin(), selectedDeviceIds->end(),
+ outputDevice->getId()) == selectedDeviceIds->end()) {
+ selectedDeviceIds->push_back(outputDevice->getId());
+ if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
+ std::swap(selectedDeviceIds->front(), selectedDeviceIds->back());
+ }
}
}
@@ -1499,7 +1503,8 @@
*outputType = API_OUTPUT_LEGACY;
}
- ALOGV("%s returns output %d selectedDeviceId %d", __func__, *output, *selectedDeviceId);
+ ALOGV("%s returns output %d selectedDeviceIds %s", __func__, *output,
+ toString(*selectedDeviceIds).c_str());
return NO_ERROR;
}
@@ -1511,7 +1516,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
@@ -1526,20 +1531,22 @@
}
const uid_t uid = VALUE_OR_RETURN_STATUS(
aidl2legacy_int32_t_uid_t(attributionSource.uid));
- const audio_port_handle_t requestedPortId = *selectedDeviceId;
audio_attributes_t resultAttr;
bool isRequestedDeviceForExclusiveUse = false;
std::vector<sp<AudioPolicyMix>> secondaryMixes;
- const sp<DeviceDescriptor> requestedDevice =
- mAvailableOutputDevices.getDeviceFromId(requestedPortId);
+ DeviceIdVector requestedDeviceIds = *selectedDeviceIds;
// Prevent from storing invalid requested device id in clients
- const audio_port_handle_t sanitizedRequestedPortId =
- requestedDevice != nullptr ? requestedPortId : AUDIO_PORT_HANDLE_NONE;
- *selectedDeviceId = sanitizedRequestedPortId;
+ DeviceIdVector sanitizedRequestedPortIds;
+ for (auto deviceId : *selectedDeviceIds) {
+ if (mAvailableOutputDevices.getDeviceFromId(deviceId) != nullptr) {
+ sanitizedRequestedPortIds.push_back(deviceId);
+ }
+ }
+ *selectedDeviceIds = sanitizedRequestedPortIds;
status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
- config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+ config, flags, selectedDeviceIds, &isRequestedDeviceForExclusiveUse,
secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized,
isBitPerfect);
if (status != NO_ERROR) {
@@ -1564,9 +1571,10 @@
*portId = PolicyAudioPort::getNextUniqueId();
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
+ // TODO(b/367816690): Add device id sets to TrackClientDescriptor
sp<TrackClientDescriptor> clientDesc =
new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
- sanitizedRequestedPortId, *stream,
+ getFirstDeviceId(sanitizedRequestedPortIds), *stream,
mEngine->getProductStrategyForAttributes(resultAttr),
toVolumeSource(resultAttr),
*flags, isRequestedDeviceForExclusiveUse,
@@ -1577,8 +1585,9 @@
*volume = Volume::DbToAmpl(outputDesc->getCurVolume(toVolumeSource(resultAttr)));
*muted = outputDesc->isMutedByGroup(toVolumeSource(resultAttr));
- ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
- *output, requestedPortId, *selectedDeviceId, *portId);
+ ALOGV("%s() returns output %d requestedPortIds %s selectedDeviceIds %s for port ID %d",
+ __func__, *output, toString(requestedDeviceIds).c_str(),
+ toString(*selectedDeviceIds).c_str(), *portId);
return NO_ERROR;
}
@@ -5603,14 +5612,14 @@
: audio_channel_mask_in_to_out(sourceMask);
config.format = sourceDesc->config().format;
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
bool isRequestedDeviceForExclusiveUse = false;
output_type_t outputType;
bool isSpatialized;
bool isBitPerfect;
getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
&stream, sourceDesc->uid(), &config, &flags,
- &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+ &selectedDeviceIds, &isRequestedDeviceForExclusiveUse,
nullptr, &outputType, &isSpatialized, &isBitPerfect);
if (output == AUDIO_IO_HANDLE_NONE) {
ALOGV("%s no output for device %s",
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 1ca0c32..e0cafd4 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -123,7 +123,7 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
@@ -893,15 +893,7 @@
return mAvailableInputDevices.getDevicesFromHwModule(
mPrimaryOutput->getModuleHandle());
}
- /**
- * @brief getFirstDeviceId of the Device Vector
- * @return if the collection is not empty, it returns the first device Id,
- * otherwise AUDIO_PORT_HANDLE_NONE
- */
- audio_port_handle_t getFirstDeviceId(const DeviceVector &devices) const
- {
- return (devices.size() > 0) ? devices.itemAt(0)->getId() : AUDIO_PORT_HANDLE_NONE;
- }
+
String8 getFirstDeviceAddress(const DeviceVector &devices) const
{
return (devices.size() > 0) ?
@@ -1142,7 +1134,7 @@
uid_t uid,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 91a5d2d..3589de1 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -24,6 +24,7 @@
#include <android/content/AttributionSourceState.h>
#include <android_media_audiopolicy.h>
#include <com_android_media_audio.h>
+#include <cutils/properties.h>
#include <error/expected_utils.h>
#include <media/AidlConversion.h>
#include <media/AudioPolicy.h>
@@ -369,7 +370,7 @@
const AttributionSourceState& attributionSource,
const AudioConfig& configAidl,
int32_t flagsAidl,
- int32_t selectedDeviceIdAidl,
+ const std::vector<int32_t>& selectedDeviceIdsAidl,
media::GetOutputForAttrResponse* _aidl_return)
{
audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
@@ -381,8 +382,9 @@
aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
audio_output_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_output_flags_t_mask(flagsAidl));
- audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_int32_t_audio_port_handle_t(selectedDeviceIdAidl));
+ DeviceIdVector selectedDeviceIds = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<DeviceIdVector>(selectedDeviceIdsAidl,
+ aidl2legacy_int32_t_audio_port_handle_t));
audio_io_handle_t output;
audio_port_handle_t portId;
@@ -446,7 +448,7 @@
&stream,
attributionSource,
&config,
- &flags, &selectedDeviceId, &portId,
+ &flags, &selectedDeviceIds, &portId,
&secondaryOutputs,
&outputType,
&isSpatialized,
@@ -493,20 +495,24 @@
}
if (result == NO_ERROR) {
- attr = VALUE_OR_RETURN_BINDER_STATUS(
- mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+ // usecase validator is disabled by default
+ if (property_get_bool("ro.audio.usecase_validator_enabled", false /* default */)) {
+ attr = VALUE_OR_RETURN_BINDER_STATUS(
+ mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+ }
sp<AudioPlaybackClient> client =
new AudioPlaybackClient(attr, output, attributionSource, session,
- portId, selectedDeviceId, stream, isSpatialized, config.channel_mask);
+ portId, selectedDeviceIds, stream, isSpatialized, config.channel_mask);
mAudioPlaybackClients.add(portId, client);
_aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_io_handle_t_int32_t(output));
_aidl_return->stream = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
- _aidl_return->selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ _aidl_return->selectedDeviceIds = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<std::vector<int32_t>>(selectedDeviceIds,
+ legacy2aidl_audio_port_handle_t_int32_t));
_aidl_return->portId = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_port_handle_t_int32_t(portId));
_aidl_return->secondaryOutputs = VALUE_OR_RETURN_BINDER_STATUS(
@@ -864,8 +870,9 @@
return binderStatusFromStatusT(status);
}
+ DeviceIdVector selectedDeviceIds = { selectedDeviceId };
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
- selectedDeviceId, attributionSource,
+ selectedDeviceIds, attributionSource,
virtualDeviceId,
canCaptureOutput, canCaptureHotword,
mOutputCommandThread);
@@ -899,6 +906,17 @@
return {};
}
+std::string AudioPolicyService::getDeviceTypeStrForPortIds(DeviceIdVector portIds) {
+ std::string output = {};
+ for (auto it = portIds.begin(); it != portIds.end(); ++it) {
+ if (it != portIds.begin()) {
+ output += ", ";
+ }
+ output += getDeviceTypeStrForPortId(*it);
+ }
+ return output;
+}
+
Status AudioPolicyService::startInput(int32_t portIdAidl)
{
audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
@@ -988,6 +1006,8 @@
"android.media.audiopolicy.active.session";
static constexpr char kAudioPolicyActiveDevice[] =
"android.media.audiopolicy.active.device";
+ static constexpr char kAudioPolicyActiveDevices[] =
+ "android.media.audiopolicy.active.devices";
mediametrics::Item *item = mediametrics::Item::create(kAudioPolicy);
if (item != NULL) {
@@ -1005,8 +1025,8 @@
item->setCString(kAudioPolicyRqstPkg,
std::to_string(client->attributionSource.uid).c_str());
}
- item->setCString(
- kAudioPolicyRqstDevice, getDeviceTypeStrForPortId(client->deviceId).c_str());
+ item->setCString(kAudioPolicyRqstDevice,
+ getDeviceTypeStrForPortId(getFirstDeviceId(client->deviceIds)).c_str());
int count = mAudioRecordClients.size();
for (int i = 0; i < count ; i++) {
@@ -1028,7 +1048,9 @@
other->attributionSource.uid).c_str());
}
item->setCString(kAudioPolicyActiveDevice,
- getDeviceTypeStrForPortId(other->deviceId).c_str());
+ getDeviceTypeStrForPortId(getFirstDeviceId(other->deviceIds)).c_str());
+ item->setCString(kAudioPolicyActiveDevices,
+ getDeviceTypeStrForPortIds(other->deviceIds).c_str());
}
}
item->selfrecord();
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 44a0e7d..eeac9a6 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -117,7 +117,7 @@
int32_t session,
const AttributionSourceState &attributionSource,
const AudioConfig& config,
- int32_t flags, int32_t selectedDeviceId,
+ int32_t flags, const std::vector<int32_t>& selectedDeviceIds,
media::GetOutputForAttrResponse* _aidl_return) override;
binder::Status startOutput(int32_t portId) override;
binder::Status stopOutput(int32_t portId) override;
@@ -474,6 +474,8 @@
std::string getDeviceTypeStrForPortId(audio_port_handle_t portId);
+ std::string getDeviceTypeStrForPortIds(DeviceIdVector portIds);
+
status_t getAudioPolicyEffects(sp<AudioPolicyEffects>& audioPolicyEffects);
app_state_t apmStatFromAmState(int amState);
@@ -1015,10 +1017,10 @@
const audio_io_handle_t io,
const AttributionSourceState& attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId) :
+ const DeviceIdVector deviceIds) :
attributes(attributes), io(io), attributionSource(
attributionSource), session(session), portId(portId),
- deviceId(deviceId), active(false) {}
+ deviceIds(deviceIds), active(false) {}
~AudioClient() override = default;
@@ -1027,7 +1029,7 @@
const AttributionSourceState attributionSource; //client attributionsource
const audio_session_t session; // audio session ID
const audio_port_handle_t portId;
- const audio_port_handle_t deviceId; // selected input device port ID
+ const DeviceIdVector deviceIds; // selected input device port IDs
bool active; // Playback/Capture is active or inactive
};
private:
@@ -1042,10 +1044,10 @@
AudioPlaybackClient(const audio_attributes_t attributes,
const audio_io_handle_t io, AttributionSourceState attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- audio_port_handle_t deviceId, audio_stream_type_t stream,
+ DeviceIdVector deviceIds, audio_stream_type_t stream,
bool isSpatialized, audio_channel_mask_t channelMask) :
AudioClient(attributes, io, attributionSource, session, portId,
- deviceId), stream(stream), isSpatialized(isSpatialized),
+ deviceIds), stream(stream), isSpatialized(isSpatialized),
channelMask(channelMask) {}
~AudioPlaybackClient() override = default;
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index 977d77b..76bc17a 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -87,13 +87,13 @@
AudioRecordClient(const audio_attributes_t attributes,
const audio_io_handle_t io,
const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId,
+ const DeviceIdVector deviceIds,
const AttributionSourceState& attributionSource,
const uint32_t virtualDeviceId,
bool canCaptureOutput, bool canCaptureHotword,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
- session, portId, deviceId), attributionSource(attributionSource),
+ session, portId, deviceIds), attributionSource(attributionSource),
virtualDeviceId(virtualDeviceId),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
canCaptureHotword(canCaptureHotword), silenced(false),
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index e901cfd..c37540c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -191,7 +191,7 @@
// When explicit routing is needed, selectedDeviceId needs to be set as the wanted port
// id. Otherwise, selectedDeviceId needs to be initialized as AUDIO_PORT_HANDLE_NONE.
void getOutputForAttr(
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_format_t format,
audio_channel_mask_t channelMask,
int sampleRate,
@@ -284,7 +284,7 @@
}
void AudioPolicyManagerTest::getOutputForAttr(
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_format_t format,
audio_channel_mask_t channelMask,
int sampleRate,
@@ -314,7 +314,7 @@
AttributionSourceState attributionSource = createAttributionSourceState(uid);
ASSERT_EQ(OK, mManager->getOutputForAttr(
&attr, output, session, &stream, attributionSource, &config, &flags,
- selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+ selectedDeviceIds, portId, {}, &outputType, &isSpatialized,
isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume,
&muted));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
@@ -648,42 +648,42 @@
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
+ selectedDeviceIds.clear();
+ getOutputForAttr(&selectedDeviceIds,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
+ ASSERT_NE(mMsdOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
@@ -691,32 +691,33 @@
// Switch between formats that are supported and not supported by MSD.
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
}
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
- ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(mMsdOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount) + 2, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
}
@@ -1138,14 +1139,14 @@
&mediaAttr, usbPortId, uid, &mixerAttributes[0]));
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
AUDIO_SESSION_NONE, uid);
status_t status = mManager->startOutput(portId);
if (status == DEAD_OBJECT) {
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
AUDIO_SESSION_NONE, uid);
status = mManager->startOutput(portId);
@@ -1172,6 +1173,56 @@
"", "", AUDIO_FORMAT_LDAC));
}
+template <typename T>
+bool hasDuplicates(const T& container) {
+ return std::unordered_set<typename T::value_type>(container.begin(),
+ container.end()).size() != container.size();
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, UniqueSelectedDeviceIds) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ auto devices = mManager->getAvailableOutputDevices();
+ audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t speakerPortId = AUDIO_PORT_HANDLE_NONE;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+ usbPortId = device->getId();
+ }
+ if (device->type() == AUDIO_DEVICE_OUT_SPEAKER) {
+ speakerPortId = device->getId();
+ }
+ }
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, speakerPortId);
+
+ const uid_t uid = 1234;
+ const audio_attributes_t mediaAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+ .usage = AUDIO_USAGE_ALARM,
+ };
+
+ audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+ AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output,
+ &portId, mediaAttr, AUDIO_SESSION_NONE, uid));
+ EXPECT_FALSE(selectedDeviceIds.empty());
+ EXPECT_NE(std::find(selectedDeviceIds.begin(), selectedDeviceIds.end(), usbPortId),
+ selectedDeviceIds.end());
+ EXPECT_NE(std::find(selectedDeviceIds.begin(), selectedDeviceIds.end(), speakerPortId),
+ selectedDeviceIds.end());
+ EXPECT_FALSE(hasDuplicates(selectedDeviceIds));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+}
+
TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -1953,14 +2004,15 @@
const DPTestParam param = GetParam();
const audio_attributes_t& attr = param.attributes;
- audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ DeviceIdVector playbackRoutedPortIds;
+ getOutputForAttr(&playbackRoutedPortIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
attr, param.session);
if (param.expected_match) {
- EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
+ ASSERT_EQ(mInjectionPort.id, playbackRoutedPortIds[0]);
} else {
- EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
+ ASSERT_GT(playbackRoutedPortIds.size(), 0);
+ ASSERT_NE(mInjectionPort.id, playbackRoutedPortIds[0]);
}
}
@@ -2129,7 +2181,7 @@
audio_config_t audioConfig;
audio_io_handle_t mOutput;
audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
- audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mSelectedDeviceIds;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t mOutputType;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -2154,7 +2206,7 @@
ASSERT_EQ(INVALID_OPERATION,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
}
@@ -2174,7 +2226,7 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
}
@@ -2206,10 +2258,9 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
- ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
auto outputDesc = mManager->getOutputs().valueFor(mOutput);
ASSERT_NE(nullptr, outputDesc);
ASSERT_EQ(mmapDirectFlags, outputDesc->getFlags().output);
@@ -2223,10 +2274,10 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
- ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
+ ASSERT_EQ(usbDevicePort.id, mSelectedDeviceIds[0]);
outputDesc = mManager->getOutputs().valueFor(mOutput);
ASSERT_NE(nullptr, outputDesc);
ASSERT_NE(mmapDirectFlags, outputDesc->getFlags().output);
@@ -2253,7 +2304,7 @@
ASSERT_EQ(INVALID_OPERATION,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
&mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
&mMuted));
}
@@ -2298,13 +2349,13 @@
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
mMixAddress, &injectionPort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_usage_t usage = AUDIO_USAGE_VIRTUAL_SOURCE;
audio_attributes_t attr =
{AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
ASSERT_EQ(NO_ERROR, mManager->startOutput(mPortId));
ASSERT_EQ(injectionPort.id, getDeviceIdFromPatch(mClient->getLastAddedPatch()));
@@ -2502,19 +2553,21 @@
? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
ASSERT_TRUE(findDevicePort(role, type, address, &devicePort));
- audio_port_handle_t routedPortId = devicePort.id;
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
- getOutputForAttr(&routedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ DeviceIdVector routedPortIds = { devicePort.id };
+ getOutputForAttr(&routedPortIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
+ ASSERT_EQ(devicePort.id, routedPortIds[0]);
} else if (audio_is_input_device(type)) {
+ audio_port_handle_t routedPortId = devicePort.id;
RecordingActivityTracker tracker;
audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
getInputForAttr({}, &input, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
AUDIO_INPUT_FLAG_NONE);
+ ASSERT_EQ(devicePort.id, routedPortId);
}
- ASSERT_EQ(devicePort.id, routedPortId);
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
type, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -2775,24 +2828,24 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
ASSERT_EQ(AUDIO_FORMAT_E_AC3_JOC, outDesc->getFormat());
ASSERT_EQ(AUDIO_CHANNEL_OUT_5POINT1, outDesc->getChannelMask());
ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ selectedDeviceIds.clear();
output = AUDIO_IO_HANDLE_NONE;
portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
ASSERT_EQ(AUDIO_FORMAT_PCM_16_BIT, outDesc->getFormat());
@@ -2812,25 +2865,25 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 mediaDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusMediaOutput, &mediaDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(mediaDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(mediaDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterRegisteringPolicyMix) {
@@ -2845,25 +2898,25 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterUserAffinities) {
@@ -2878,10 +2931,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice};
@@ -2889,17 +2942,18 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_NE(navDevicePort.id, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithExcludeUserIdCriteria) {
@@ -2914,11 +2968,11 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false),
- createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
+ createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
@@ -2926,14 +2980,15 @@
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t navigationAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ DeviceIdVector selectedDeviceIds;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, navigationAttribute);
- ASSERT_NE(navDevicePort.id, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputExcludeUserIdCriteria) {
@@ -2948,30 +3003,30 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false),
- createUserIdCriterion(0 /* userId */, /* exclude */ true)};
+ createUserIdCriterion(0 /* userId */, /* exclude */ true)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
- sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ sCarBusNavigationOutput, &navDevicePort));
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
+ GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -2983,10 +3038,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -2995,21 +3050,21 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
+ GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3021,10 +3076,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -3033,21 +3088,21 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t alarmAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3070,23 +3125,23 @@
audio_port_v7 primaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusMediaOutput, &primaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user11AppUid);
- ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3112,23 +3167,23 @@
audio_port_v7 secondaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarRearZoneOneOutput, &secondaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user11AppUid);
- ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3157,19 +3212,19 @@
audio_port_v7 tertiaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarRearZoneTwoOutput, &tertiaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user15AppUid = multiuser_get_uid(/* user_id */ 15, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user15AppUid);
- ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithNoMatchingMix) {
@@ -3184,10 +3239,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -3196,17 +3251,17 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t alarmAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrForMMapWithPolicyMatched) {
@@ -3218,13 +3273,13 @@
std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusMmapOutput, audioConfig, mediaMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusMmapOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 mmapDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
- sCarBusMmapOutput, &mmapDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ sCarBusMmapOutput, &mmapDevicePort));
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
@@ -3232,12 +3287,13 @@
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
getOutputForAttr(
- &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ &selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT),
&output, &portId, mediaAttribute);
- ASSERT_EQ(mmapDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(mmapDevicePort.id, selectedDeviceIds[0]);
+
}
class AudioPolicyManagerTVTest : public AudioPolicyManagerTestWithConfigurationFile {
@@ -3257,10 +3313,10 @@
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
AUDIO_DEVICE_OUT_AUX_DIGITAL, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
"" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
@@ -3339,7 +3395,7 @@
void AudioPolicyManagerPhoneTest::testOutputMixPortSelectionForAttr(
audio_output_flags_t flags, audio_format_t format, int samplingRate, bool isMusic,
const char* expectedMixPortName) {
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -3347,7 +3403,7 @@
attr.content_type = AUDIO_CONTENT_TYPE_MUSIC;
attr.usage = AUDIO_USAGE_MEDIA;
}
- getOutputForAttr(&selectedDeviceId, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
+ getOutputForAttr(&selectedDeviceIds, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
&output, &portId, attr);
EXPECT_NO_FATAL_FAILURE(verifyMixPortNameAndFlags(output, expectedMixPortName));
mManager->releaseOutput(portId);
@@ -3916,7 +3972,7 @@
audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mSelectedDeviceIds;
audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
static constexpr audio_attributes_t sMediaAttr = {
@@ -3975,12 +4031,12 @@
reset();
bool isBitPerfect;
- getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&mSelectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
&mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
status_t status = mManager->startOutput(mBitPerfectPortId);
if (status == DEAD_OBJECT) {
- getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&mSelectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
&mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
status = mManager->startOutput(mBitPerfectPortId);
@@ -3996,8 +4052,8 @@
void AudioPolicyManagerTestBitPerfectBase::reset() {
mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
- mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+ mSelectedDeviceIds.clear();
}
void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
@@ -4017,7 +4073,7 @@
EXPECT_EQ(expected,
mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
&stream, attributionSource, &config, &flags,
- &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+ &mSelectedDeviceIds, &mBitPerfectPortId, {}, &outputType,
&isSpatialized, &isBitPerfect, &volume, &muted));
}
@@ -4027,13 +4083,13 @@
TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
const uid_t anotherUid = 5678;
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
bool isBitPerfect;
// When there is no active bit-perfect playback, the output selection will follow default
// routing strategy.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4047,14 +4103,14 @@
// If the playback is from preferred mixer attributes owner but the request doesn't match
// preferred mixer attributes, it will not be bit-perfect.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
EXPECT_EQ(mBitPerfectOutput, output);
// When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4066,9 +4122,9 @@
.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
};
audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ selectedDeviceIds.clear();
portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4076,7 +4132,7 @@
// When configuration matches preferred mixer attributes, which is bit-perfect, but the client
// is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4102,9 +4158,9 @@
.content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
bool isBitPerfect;
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
&systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4124,7 +4180,7 @@
.content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
.usage = AUDIO_USAGE_NOTIFICATION,
};
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, ¬ificationOutput,
¬ificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
&isBitPerfect);
@@ -4191,11 +4247,11 @@
.content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
.usage = GetParam(),
};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
ASSERT_NO_FATAL_FAILURE(
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
EXPECT_NE(mBitPerfectOutput, output);
EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index 158ee69..2030c68 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -96,6 +96,12 @@
{36, {
ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
} },
};
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 0f4ba65..768eaf8 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -27,54 +27,79 @@
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
#include <camera/StringUtils.h>
#include <com_android_graphics_libgui_flags.h>
+#include <com_android_internal_camera_flags.h>
#include <gui/Surface.h>
#include <libyuv.h>
#include <utils/Log.h>
#include <utils/Trace.h>
+#include <ultrahdr/jpegr.h>
+#include <ultrahdr/ultrahdrcommon.h>
-#include <mediadrm/ICrypto.h>
#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MetaData.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/MediaDefs.h>
-#include <media/stagefright/MediaCodecConstants.h>
+#include <mediadrm/ICrypto.h>
+#include <memory>
+#include "HeicCompositeStream.h"
+#include "HeicEncoderInfoManager.h"
#include "common/CameraDeviceBase.h"
+#include "system/camera_metadata.h"
#include "utils/ExifUtils.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/Utils.h"
-#include "HeicEncoderInfoManager.h"
-#include "HeicCompositeStream.h"
using aidl::android::hardware::camera::device::CameraBlob;
using aidl::android::hardware::camera::device::CameraBlobId;
+namespace flags = com::android::internal::camera::flags;
+
namespace android {
namespace camera3 {
HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
- wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
- CompositeStream(device, cb),
- mUseHeic(false),
- mNumOutputTiles(1),
- mOutputWidth(0),
- mOutputHeight(0),
- mMaxHeicBufferSize(0),
- mGridWidth(HeicEncoderInfoManager::kGridWidth),
- mGridHeight(HeicEncoderInfoManager::kGridHeight),
- mGridRows(1),
- mGridCols(1),
- mUseGrid(false),
- mAppSegmentStreamId(-1),
- mAppSegmentSurfaceId(-1),
- mMainImageStreamId(-1),
- mMainImageSurfaceId(-1),
- mYuvBufferAcquired(false),
- mStreamSurfaceListener(new StreamSurfaceListener()),
- mDequeuedOutputBufferCnt(0),
- mCodecOutputCounter(0),
- mQuality(-1),
- mGridTimestampUs(0),
- mStatusId(StatusTracker::NO_STATUS_ID) {
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb)
+ : CompositeStream(device, cb),
+ mUseHeic(false),
+ mNumOutputTiles(1),
+ mNumGainmapOutputTiles(1),
+ mOutputWidth(0),
+ mOutputHeight(0),
+ mGainmapOutputWidth(0),
+ mGainmapOutputHeight(0),
+ mMaxHeicBufferSize(0),
+ mGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGainmapGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGainmapGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGridRows(1),
+ mGridCols(1),
+ mGainmapGridRows(1),
+ mGainmapGridCols(1),
+ mUseGrid(false),
+ mGainmapUseGrid(false),
+ mAppSegmentStreamId(-1),
+ mAppSegmentSurfaceId(-1),
+ mMainImageStreamId(-1),
+ mMainImageSurfaceId(-1),
+ mYuvBufferAcquired(false),
+ mStreamSurfaceListener(new StreamSurfaceListener()),
+ mDequeuedOutputBufferCnt(0),
+ mCodecOutputCounter(0),
+ mCodecGainmapOutputCounter(0),
+ mQuality(-1),
+ mGridTimestampUs(0),
+ mStatusId(StatusTracker::NO_STATUS_ID) {
+ mStaticInfo = device->info();
+ camera_metadata_entry halHeicSupport = mStaticInfo.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count == 1 &&
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
+ // The camera device supports the HEIC stream combination,
+ // use the standard stream combintion.
+ mAppSegmentSupported = true;
+ }
}
HeicCompositeStream::~HeicCompositeStream() {
@@ -84,6 +109,7 @@
mInputAppSegmentBuffers.clear();
mCodecOutputBuffers.clear();
+ mGainmapCodecOutputBuffers.clear();
mAppSegmentStreamId = -1;
mAppSegmentSurfaceId = -1;
@@ -97,7 +123,8 @@
}
bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
- return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+ return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) ||
+ (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace))) &&
(streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
}
@@ -120,7 +147,8 @@
return false;
}
- return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
+ return ((format == HAL_PIXEL_FORMAT_BLOB) && ((dataspace == HAL_DATASPACE_HEIF) ||
+ (dataspace == static_cast<int>(kUltraHDRDataSpace))));
}
status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
@@ -130,13 +158,27 @@
std::vector<int> *surfaceIds,
int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
+
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
return NO_INIT;
}
- status_t res = initializeCodec(width, height, device);
+ ANativeWindow* anw = consumers[0].mSurface.get();
+ int dataspace;
+ status_t res;
+ if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+ if ((dataspace == static_cast<int>(kUltraHDRDataSpace)) && flags::camera_heif_gainmap()) {
+ mHDRGainmapEnabled = true;
+ mInternalDataSpace = static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG);
+ }
+
+ res = initializeCodec(width, height, device);
if (res != OK) {
ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -144,42 +186,48 @@
}
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
- mAppSegmentConsumer->setFrameAvailableListener(this);
- mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
- mAppSegmentSurface = mAppSegmentConsumer->getSurface();
- sp<IGraphicBufferProducer> producer = mAppSegmentSurface->getIGraphicBufferProducer();
+ if (mAppSegmentSupported) {
+ mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = mAppSegmentConsumer->getSurface();
+ }
+ sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr ?
+ mAppSegmentSurface->getIGraphicBufferProducer() : nullptr;
#else
sp<IGraphicBufferProducer> producer;
sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
- mAppSegmentConsumer->setFrameAvailableListener(this);
- mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
- mAppSegmentSurface = new Surface(producer);
+ if (mAppSegmentSupported) {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = new Surface(producer);
+ }
#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mStaticInfo = device->info();
-
- res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
- kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
- /*isShared*/false, /*isMultiResolution*/false,
- /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
- OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- OutputConfiguration::MIRROR_MODE_AUTO,
- colorSpace,
- useReadoutTimestamp);
- if (res == OK) {
- mAppSegmentSurfaceId = (*surfaceIds)[0];
- } else {
- ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
+ if (mAppSegmentSupported) {
+ std::vector<int> sourceSurfaceId;
+ res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
+ kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
+ sensorPixelModesUsed, &sourceSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
+ if (res == OK) {
+ mAppSegmentSurfaceId = sourceSurfaceId[0];
+ } else {
+ ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
}
- if (!mUseGrid) {
+ if (!mUseGrid && !mHDRGainmapEnabled) {
res = mCodec->createInputSurface(&producer);
if (res != OK) {
ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
@@ -206,21 +254,32 @@
return res;
}
- std::vector<int> sourceSurfaceId;
- //Use YUV_888 format if framework tiling is needed.
- int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ if (mHDRGainmapEnabled) {
+ res = mGainmapCodec->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start gainmap codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ //Use YUV_420 format if framework tiling is needed.
+ int srcStreamFmt = mHDRGainmapEnabled ?
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : mUseGrid ?
+ HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, mInternalDataSpace,
+ rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
- /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ /*consumerUsage*/0, mHDRGainmapEnabled ?
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 :
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
colorSpace,
useReadoutTimestamp);
if (res == OK) {
- mMainImageSurfaceId = sourceSurfaceId[0];
+ mMainImageSurfaceId = (*surfaceIds)[0];
mMainImageStreamId = *id;
} else {
ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
@@ -236,11 +295,13 @@
return res;
}
- res = registerCompositeStreamListener(mAppSegmentStreamId);
- if (res != OK) {
- ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
+ if (mAppSegmentSupported) {
+ res = registerCompositeStreamListener(mAppSegmentStreamId);
+ if (res != OK) {
+ ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
}
initCopyRowFunction(width);
@@ -299,6 +360,9 @@
mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
+ if (mHDRGainmapEnabled) {
+ mCodecGainmapOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
+ }
} else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
@@ -346,13 +410,13 @@
mInputAppSegmentBuffers.push_back(item.mTimestamp);
mInputReadyCondition.signal();
}
- } else if (item.mDataSpace == kHeifDataSpace) {
- ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
+ } else if (item.mDataSpace == mInternalDataSpace) {
+ ALOGV("%s: YUV_420 buffer with ts: %" PRIu64 " ms. arrived!",
__func__, ns2ms(item.mTimestamp));
Mutex::Autolock l(mMutex);
- if (!mUseGrid) {
- ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
+ if (!mUseGrid && !mHDRGainmapEnabled) {
+ ALOGE("%s: YUV_420 internal stream is only supported for HEVC tiling",
__FUNCTION__);
return;
}
@@ -367,6 +431,7 @@
status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ bool gainmapEnabled = false;
if (compositeOutput == nullptr) {
return BAD_VALUE;
}
@@ -381,30 +446,44 @@
return OK;
}
- compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
+ if (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace)) {
+ gainmapEnabled = true;
+ }
- // JPEG APPS segments Blob stream info
- (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
- (*compositeOutput)[0].height = 1;
- (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
- (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
- (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ compositeOutput->clear();
+ compositeOutput->push_back({});
// YUV/IMPLEMENTATION_DEFINED stream info
- (*compositeOutput)[1].width = streamInfo.width;
- (*compositeOutput)[1].height = streamInfo.height;
- (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- (*compositeOutput)[1].dataSpace = kHeifDataSpace;
- (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
+ (*compositeOutput)[0].width = streamInfo.width;
+ (*compositeOutput)[0].height = streamInfo.height;
+ (*compositeOutput)[0].format = gainmapEnabled ?
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : useGrid ?
+ HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ (*compositeOutput)[0].dataSpace = gainmapEnabled ?
+ static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG) : kHeifDataSpace;
+ (*compositeOutput)[0].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
+
+ camera_metadata_ro_entry halHeicSupport = ch.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count == 1 &&
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
+
+ compositeOutput->push_back({});
+ // JPEG APPS segments Blob stream info
+ (*compositeOutput)[1].width = calcAppSegmentMaxSize(ch);
+ (*compositeOutput)[1].height = 1;
+ (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[1].dataSpace = kAppSegmentDataSpace;
+ (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ }
+
return NO_ERROR;
}
bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
- static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName, bool allowSWCodec) {
+ static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance(allowSWCodec);
return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
}
@@ -421,7 +500,7 @@
}
void HeicCompositeStream::onHeicOutputFrameAvailable(
- const CodecOutputBufferInfo& outputBufferInfo) {
+ const CodecOutputBufferInfo& outputBufferInfo, bool isGainmap) {
Mutex::Autolock l(mMutex);
ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
@@ -431,31 +510,34 @@
if (!mErrorState) {
if ((outputBufferInfo.size > 0) &&
((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
- mCodecOutputBuffers.push_back(outputBufferInfo);
+ isGainmap ? mGainmapCodecOutputBuffers.push_back(outputBufferInfo) :
+ mCodecOutputBuffers.push_back(outputBufferInfo);
mInputReadyCondition.signal();
} else {
ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
outputBufferInfo.size, outputBufferInfo.flags);
- mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
}
} else {
- mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
}
}
-void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
+void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index, bool isGainmap) {
Mutex::Autolock l(mMutex);
- if (!mUseGrid) {
+ if (!mUseGrid && !mHDRGainmapEnabled) {
ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
return;
}
- mCodecInputBuffers.push_back(index);
+ isGainmap ? mGainmapCodecInputBuffers.push_back(index) : mCodecInputBuffers.push_back(index);
mInputReadyCondition.signal();
}
-void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
+void HeicCompositeStream::onHeicGainmapFormatChanged(sp<AMessage>& newFormat) {
if (newFormat == nullptr) {
ALOGE("%s: newFormat must not be null!", __FUNCTION__);
return;
@@ -470,6 +552,66 @@
// For HEVC codec, below keys need to be filled out or overwritten so that the
// muxer can handle them as HEIC output image.
newFormat->setString(KEY_MIME, mimeHeic);
+ newFormat->setInt32(KEY_WIDTH, mGainmapOutputWidth);
+ newFormat->setInt32(KEY_HEIGHT, mGainmapOutputHeight);
+ }
+
+ if (mGainmapUseGrid) {
+ int32_t gridRows, gridCols, tileWidth, tileHeight;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
+ newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
+ newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
+ mGainmapGridWidth = tileWidth;
+ mGainmapGridHeight = tileHeight;
+ mGainmapGridRows = gridRows;
+ mGainmapGridCols = gridCols;
+ } else {
+ newFormat->setInt32(KEY_TILE_WIDTH, mGainmapGridWidth);
+ newFormat->setInt32(KEY_TILE_HEIGHT, mGainmapGridHeight);
+ newFormat->setInt32(KEY_GRID_ROWS, mGainmapGridRows);
+ newFormat->setInt32(KEY_GRID_COLUMNS, mGainmapGridCols);
+ }
+ int32_t left, top, right, bottom;
+ if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ newFormat->setRect("crop", 0, 0, mGainmapOutputWidth - 1, mGainmapOutputHeight - 1);
+ }
+ }
+ newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
+
+ int32_t gridRows, gridCols;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
+ mNumGainmapOutputTiles = gridRows * gridCols;
+ } else {
+ mNumGainmapOutputTiles = 1;
+ }
+
+ mGainmapFormat = newFormat;
+
+ ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
+ mInputReadyCondition.signal();
+}
+
+
+void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap) {
+ if (newFormat == nullptr) {
+ ALOGE("%s: newFormat must not be null!", __FUNCTION__);
+ return;
+ }
+
+ if (isGainmap) {
+ return onHeicGainmapFormatChanged(newFormat);
+ }
+ Mutex::Autolock l(mMutex);
+
+ AString mime;
+ AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
+ newFormat->findString(KEY_MIME, &mime);
+ if (mime != mimeHeic) {
+ // For HEVC codec, below keys need to be filled out or overwritten so that the
+ // muxer can handle them as HEIC output image.
+ newFormat->setString(KEY_MIME, mimeHeic);
newFormat->setInt32(KEY_WIDTH, mOutputWidth);
newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
}
@@ -577,10 +719,12 @@
status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
- if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
- outputStreamIds->push_back(mAppSegmentStreamId);
+ if (mAppSegmentSupported) {
+ if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mAppSegmentStreamId);
+ }
+ (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
}
- (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
outputStreamIds->push_back(mMainImageStreamId);
@@ -600,7 +744,9 @@
return BAD_VALUE;
}
- compositeStreamIds->push_back(mAppSegmentStreamId);
+ if (mAppSegmentSupported) {
+ compositeStreamIds->push_back(mAppSegmentStreamId);
+ }
compositeStreamIds->push_back(mMainImageStreamId);
return OK;
@@ -762,6 +908,31 @@
mCodecOutputBuffers.erase(it);
}
+ while (!mGainmapCodecOutputBuffers.empty()) {
+ auto it = mGainmapCodecOutputBuffers.begin();
+ // Assume encoder input to output is FIFO, use a queue to look up
+ // frameNumber when handling codec outputs.
+ int64_t bufferFrameNumber = -1;
+ if (mCodecGainmapOutputBufferFrameNumbers.empty()) {
+ ALOGV("%s: Failed to find buffer frameNumber for gainmap codec output buffer!",
+ __FUNCTION__);
+ break;
+ } else {
+ // Direct mapping between camera frame number and codec timestamp (in us).
+ bufferFrameNumber = mCodecGainmapOutputBufferFrameNumbers.front();
+ mCodecGainmapOutputCounter++;
+ if (mCodecGainmapOutputCounter == mNumGainmapOutputTiles) {
+ mCodecGainmapOutputBufferFrameNumbers.pop();
+ mCodecGainmapOutputCounter = 0;
+ }
+
+ mPendingInputFrames[bufferFrameNumber].gainmapCodecOutputBuffers.push_back(*it);
+ ALOGV("%s: [%" PRId64 "]: Pushing gainmap codecOutputBuffers (frameNumber %" PRId64 ")",
+ __FUNCTION__, bufferFrameNumber, it->timeUs);
+ }
+ mGainmapCodecOutputBuffers.erase(it);
+ }
+
while (!mCaptureResults.empty()) {
auto it = mCaptureResults.begin();
// Negative frame number indicates that something went wrong during the capture result
@@ -772,6 +943,9 @@
if (mPendingInputFrames[frameNumber].timestamp == it->first) {
mPendingInputFrames[frameNumber].result =
std::make_unique<CameraMetadata>(std::get<1>(it->second));
+ if (!mAppSegmentSupported) {
+ mPendingInputFrames[frameNumber].exifError = true;
+ }
} else {
ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
"shutter and capture result! before: %" PRId64 ", after: %" PRId64,
@@ -825,6 +999,27 @@
break;
}
}
+
+ // Distribute codec input buffers to be filled out from YUV output
+ for (auto it = mPendingInputFrames.begin();
+ it != mPendingInputFrames.end() && mGainmapCodecInputBuffers.size() > 0; it++) {
+ InputFrame& inputFrame(it->second);
+ if (inputFrame.gainmapCodecInputCounter < mGainmapGridRows * mGainmapGridCols) {
+ // Available input tiles that are required for the current input
+ // image.
+ size_t newInputTiles = std::min(mGainmapCodecInputBuffers.size(),
+ mGainmapGridRows * mGainmapGridCols - inputFrame.gainmapCodecInputCounter);
+ for (size_t i = 0; i < newInputTiles; i++) {
+ CodecInputBufferInfo inputInfo = { mGainmapCodecInputBuffers[0],
+ mGridTimestampUs++, inputFrame.gainmapCodecInputCounter };
+ inputFrame.gainmapCodecInputBuffers.push_back(inputInfo);
+
+ mGainmapCodecInputBuffers.erase(mGainmapCodecInputBuffers.begin());
+ inputFrame.gainmapCodecInputCounter++;
+ }
+ break;
+ }
+ }
}
bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
@@ -845,7 +1040,8 @@
(it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
!it.second.appSegmentWritten && it.second.result != nullptr &&
it.second.muxer != nullptr;
- bool codecOutputReady = !it.second.codecOutputBuffers.empty();
+ bool codecOutputReady = !it.second.codecOutputBuffers.empty() ||
+ !it.second.gainmapCodecOutputBuffers.empty();
bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
(!it.second.codecInputBuffers.empty());
bool hasOutputBuffer = it.second.muxer != nullptr ||
@@ -856,6 +1052,9 @@
if (it.second.format == nullptr && mFormat != nullptr) {
it.second.format = mFormat->dup();
}
+ if (it.second.gainmapFormat == nullptr && mGainmapFormat != nullptr){
+ it.second.gainmapFormat = mGainmapFormat->dup();
+ }
newInputAvailable = true;
break;
}
@@ -886,11 +1085,15 @@
(inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
!inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
inputFrame.muxer != nullptr;
- bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
+ bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0 ||
+ inputFrame.gainmapCodecOutputBuffers.size() > 0;
bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
!inputFrame.codecInputBuffers.empty();
+ bool gainmapCodecInputReady = inputFrame.gainmapImage.get() != nullptr &&
+ !inputFrame.gainmapCodecInputBuffers.empty();
bool hasOutputBuffer = inputFrame.muxer != nullptr ||
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
+ bool hasGainmapMetadata = !inputFrame.isoGainmapMetadata.empty();
ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
" dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
@@ -899,6 +1102,15 @@
// Handle inputs for Hevc tiling
if (codecInputReady) {
+ if (mHDRGainmapEnabled && (inputFrame.baseBuffer.get() == nullptr)) {
+ auto res = generateBaseImageAndGainmap(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Error generating SDR base image and HDR gainmap: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
res = processCodecInputFrame(inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
@@ -907,6 +1119,15 @@
}
}
+ if (gainmapCodecInputReady) {
+ res = processCodecGainmapInputFrame(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process gainmap codec input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
return OK;
}
@@ -923,6 +1144,31 @@
}
}
+ // Write the HDR gainmap metadata
+ if (hasGainmapMetadata) {
+ uint8_t kGainmapMetaMarker[] = {'t', 'm', 'a', 'p', '\0', '\0'};
+ sp<ABuffer> aBuffer =
+ new ABuffer(inputFrame.isoGainmapMetadata.size() + sizeof(kGainmapMetaMarker));
+ memcpy(aBuffer->data(), kGainmapMetaMarker, sizeof(kGainmapMetaMarker));
+ memcpy(aBuffer->data() + sizeof(kGainmapMetaMarker), inputFrame.isoGainmapMetadata.data(),
+ inputFrame.isoGainmapMetadata.size());
+
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecColorRange);
+ auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
+ inputFrame.timestamp,
+ MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ if (res != OK) {
+ ALOGE("%s: Failed to write HDR gainmap metadata to muxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ inputFrame.isoGainmapMetadata.clear();
+ }
+
// Write JPEG APP segments data to the muxer.
if (appSegmentReady) {
res = processAppSegment(frameNumber, inputFrame);
@@ -943,7 +1189,17 @@
}
}
- if (inputFrame.pendingOutputTiles == 0) {
+ // Write media codec gainmap bitstream buffers to muxer.
+ while (!inputFrame.gainmapCodecOutputBuffers.empty()) {
+ res = processOneCodecGainmapOutputFrame(frameNumber, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec gainmap output frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ if ((inputFrame.pendingOutputTiles == 0) && (inputFrame.gainmapPendingOutputTiles == 0)) {
if (inputFrame.appSegmentWritten) {
res = processCompletedInputFrame(frameNumber, inputFrame);
if (res != OK) {
@@ -1001,6 +1257,16 @@
inputFrame.trackIndex = trackId;
inputFrame.pendingOutputTiles = mNumOutputTiles;
+ if (inputFrame.gainmapFormat.get() != nullptr) {
+ trackId = inputFrame.muxer->addTrack(inputFrame.gainmapFormat);
+ if (trackId < 0) {
+ ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
+ return NO_INIT;
+ }
+ inputFrame.gainmapTrackIndex = trackId;
+ inputFrame.gainmapPendingOutputTiles = mNumGainmapOutputTiles;
+ }
+
res = inputFrame.muxer->start();
if (res != OK) {
ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
@@ -1085,9 +1351,101 @@
inputFrame.appSegmentWritten = true;
// Release the buffer now so any pending input app segments can be processed
- mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
- inputFrame.appSegmentBuffer.data = nullptr;
- inputFrame.exifError = false;
+ if (!inputFrame.exifError) {
+ mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
+ inputFrame.appSegmentBuffer.data = nullptr;
+ inputFrame.exifError = false;
+ }
+
+ return OK;
+}
+
+status_t HeicCompositeStream::generateBaseImageAndGainmap(InputFrame &inputFrame) {
+ ultrahdr::JpegR jpegR(nullptr /*gles ctx*/, kGainmapScale);
+ inputFrame.baseBuffer = std::make_unique<ultrahdr::uhdr_raw_image_ext_t>(
+ kUltraHdrOutputFmt, kUltraHdrOutputGamut, kUltraHdrInputTransfer, kUltraHdrOutputRange,
+ inputFrame.yuvBuffer.width, inputFrame.yuvBuffer.height, 8/*stride*/);
+
+ uhdr_raw_image_t hdr_intent;
+ hdr_intent.fmt = kUltraHdrInputFmt;
+ hdr_intent.cg = kUltraHdrInputGamut;
+ hdr_intent.ct = kUltraHdrInputTransfer;
+ hdr_intent.range = kUltraHdrInputRange;
+ hdr_intent.w = inputFrame.yuvBuffer.width;
+ hdr_intent.h = inputFrame.yuvBuffer.height;
+ hdr_intent.planes[UHDR_PLANE_Y] = inputFrame.yuvBuffer.data;
+ hdr_intent.planes[UHDR_PLANE_UV] = inputFrame.yuvBuffer.dataCb;
+ hdr_intent.planes[UHDR_PLANE_V] = nullptr;
+ //libUltraHDR expects the stride in pixels
+ hdr_intent.stride[UHDR_PLANE_Y] = inputFrame.yuvBuffer.stride / 2;
+ hdr_intent.stride[UHDR_PLANE_UV] = inputFrame.yuvBuffer.chromaStride / 2;
+ hdr_intent.stride[UHDR_PLANE_V] = 0;
+ auto res = jpegR.toneMap(&hdr_intent, inputFrame.baseBuffer.get());
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: Base image tonemapped successfully", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed during HDR to SDR tonemap: %d", __FUNCTION__, res.error_code);
+ return BAD_VALUE;
+ }
+
+ inputFrame.baseImage = std::make_unique<CpuConsumer::LockedBuffer>();
+ *inputFrame.baseImage = inputFrame.yuvBuffer;
+ inputFrame.baseImage->data = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_Y]);
+ inputFrame.baseImage->dataCb = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_U]);
+ inputFrame.baseImage->dataCr = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_V]);
+ inputFrame.baseImage->chromaStep = 1;
+ inputFrame.baseImage->stride = inputFrame.baseBuffer->stride[UHDR_PLANE_Y];
+ inputFrame.baseImage->chromaStride = inputFrame.baseBuffer->stride[UHDR_PLANE_UV];
+ inputFrame.baseImage->dataSpace = HAL_DATASPACE_V0_JFIF;
+
+ ultrahdr::uhdr_gainmap_metadata_ext_t metadata;
+ res = jpegR.generateGainMap(inputFrame.baseBuffer.get(), &hdr_intent, &metadata,
+ inputFrame.gainmap, false /*sdr_is_601*/, true /*use_luminance*/);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap generated successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed HDR gainmap: %d", __FUNCTION__, res.error_code);
+ return BAD_VALUE;
+ }
+ // Ensure the gaimap U/V planes are all 0
+ inputFrame.gainmapChroma = std::make_unique<uint8_t[]>(
+ inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
+ memset(inputFrame.gainmapChroma.get(), 0, inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
+
+ ultrahdr::uhdr_gainmap_metadata_frac iso_secondary_metadata;
+ res = ultrahdr::uhdr_gainmap_metadata_frac::gainmapMetadataFloatToFraction(
+ &metadata, &iso_secondary_metadata);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap converted to fractions successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed to convert HDR gainmap to fractions: %d", __FUNCTION__,
+ res.error_code);
+ return BAD_VALUE;
+ }
+
+ res = ultrahdr::uhdr_gainmap_metadata_frac::encodeGainmapMetadata(&iso_secondary_metadata,
+ inputFrame.isoGainmapMetadata);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap encoded to ISO format successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed to encode HDR gainmap to ISO format: %d", __FUNCTION__,
+ res.error_code);
+ return BAD_VALUE;
+ }
+
+ inputFrame.gainmapImage = std::make_unique<CpuConsumer::LockedBuffer>();
+ *inputFrame.gainmapImage = inputFrame.yuvBuffer;
+ inputFrame.gainmapImage->data = reinterpret_cast<uint8_t*>(
+ inputFrame.gainmap->planes[UHDR_PLANE_Y]);
+ inputFrame.gainmapImage->dataCb = inputFrame.gainmapChroma.get();
+ inputFrame.gainmapImage->dataCr = inputFrame.gainmapChroma.get() + 1;
+ inputFrame.gainmapImage->chromaStep = 2;
+ inputFrame.gainmapImage->stride = inputFrame.gainmap->stride[UHDR_PLANE_Y];
+ inputFrame.gainmapImage->chromaStride = inputFrame.gainmap->w;
+ inputFrame.gainmapImage->dataSpace = HAL_DATASPACE_V0_JFIF;
return OK;
}
@@ -1115,7 +1473,9 @@
" timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
inputBuffer.timeUs);
- res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
+ auto yuvInput = (inputFrame.baseImage.get() != nullptr) ?
+ *inputFrame.baseImage.get() : inputFrame.yuvBuffer;
+ res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
if (res != OK) {
ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
strerror(-res), res);
@@ -1135,6 +1495,50 @@
return OK;
}
+status_t HeicCompositeStream::processCodecGainmapInputFrame(InputFrame &inputFrame) {
+ for (auto& inputBuffer : inputFrame.gainmapCodecInputBuffers) {
+ sp<MediaCodecBuffer> buffer;
+ auto res = mGainmapCodec->getInputBuffer(inputBuffer.index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Copy one tile from source to destination.
+ size_t tileX = inputBuffer.tileIndex % mGainmapGridCols;
+ size_t tileY = inputBuffer.tileIndex / mGainmapGridCols;
+ size_t top = mGainmapGridHeight * tileY;
+ size_t left = mGainmapGridWidth * tileX;
+ size_t width = (tileX == static_cast<size_t>(mGainmapGridCols) - 1) ?
+ mGainmapOutputWidth - tileX * mGainmapGridWidth : mGainmapGridWidth;
+ size_t height = (tileY == static_cast<size_t>(mGainmapGridRows) - 1) ?
+ mGainmapOutputHeight - tileY * mGainmapGridHeight : mGainmapGridHeight;
+ ALOGV("%s: gainmap inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, "
+ "height %zu, timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
+ inputBuffer.timeUs);
+
+ auto yuvInput = *inputFrame.gainmapImage;
+ res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
+ if (res != OK) {
+ ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mGainmapCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
+ inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ inputFrame.gainmapCodecInputBuffers.clear();
+ return OK;
+}
+
status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
auto it = inputFrame.codecOutputBuffers.begin();
@@ -1152,6 +1556,13 @@
}
sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
+ if (mHDRGainmapEnabled) {
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecColorRange);
+ }
res = inputFrame.muxer->writeSampleData(
aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
if (res != OK) {
@@ -1174,6 +1585,54 @@
return OK;
}
+status_t HeicCompositeStream::processOneCodecGainmapOutputFrame(int64_t frameNumber,
+ InputFrame &inputFrame) {
+ auto it = inputFrame.gainmapCodecOutputBuffers.begin();
+ sp<MediaCodecBuffer> buffer;
+ status_t res = mGainmapCodec->getOutputBuffer(it->index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting Heic gainmap codec output buffer at index %d: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+ if (buffer == nullptr) {
+ ALOGE("%s: Invalid Heic gainmap codec output buffer at index %d",
+ __FUNCTION__, it->index);
+ return BAD_VALUE;
+ }
+
+ uint8_t kGainmapMarker[] = {'g', 'm', 'a', 'p', '\0', '\0'};
+ sp<ABuffer> aBuffer = new ABuffer(buffer->size() + sizeof(kGainmapMarker));
+ memcpy(aBuffer->data(), kGainmapMarker, sizeof(kGainmapMarker));
+ memcpy(aBuffer->data() + sizeof(kGainmapMarker), buffer->data(), buffer->size());
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecGainmapColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecGainmapColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecGainmapColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecGainmapColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecGainmapColorRange);
+ res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.gainmapTrackIndex,
+ inputFrame.timestamp,
+ MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ if (res != OK) {
+ ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+
+ mGainmapCodec->releaseOutputBuffer(it->index);
+ if (inputFrame.gainmapPendingOutputTiles == 0) {
+ ALOGW("%s: Codec generated more gainmap tiles than expected!", __FUNCTION__);
+ } else {
+ inputFrame.gainmapPendingOutputTiles--;
+ }
+
+ inputFrame.gainmapCodecOutputBuffers.erase(inputFrame.gainmapCodecOutputBuffers.begin());
+
+ ALOGV("%s: [%" PRId64 "]: Gainmap output buffer index %d",
+ __FUNCTION__, frameNumber, it->index);
+ return OK;
+}
+
status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface;
@@ -1256,6 +1715,13 @@
inputFrame->codecOutputBuffers.erase(it);
}
+ while (!inputFrame->gainmapCodecOutputBuffers.empty()) {
+ auto it = inputFrame->gainmapCodecOutputBuffers.begin();
+ ALOGV("%s: release gainmap output buffer index %d", __FUNCTION__, it->index);
+ mGainmapCodec->releaseOutputBuffer(it->index);
+ inputFrame->gainmapCodecOutputBuffers.erase(it);
+ }
+
if (inputFrame->yuvBuffer.data != nullptr) {
mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
inputFrame->yuvBuffer.data = nullptr;
@@ -1267,6 +1733,11 @@
inputFrame->codecInputBuffers.erase(it);
}
+ while (!inputFrame->gainmapCodecInputBuffers.empty()) {
+ auto it = inputFrame->gainmapCodecInputBuffers.begin();
+ inputFrame->gainmapCodecInputBuffers.erase(it);
+ }
+
if (inputFrame->error || mErrorState) {
ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
notifyError(frameNumber, inputFrame->requestId);
@@ -1292,7 +1763,8 @@
while (it != mPendingInputFrames.end()) {
auto& inputFrame = it->second;
if (inputFrame.error ||
- (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
+ (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0 &&
+ inputFrame.gainmapPendingOutputTiles == 0)) {
releaseInputFrameLocked(it->first, &inputFrame);
it = mPendingInputFrames.erase(it);
inputFrameDone = true;
@@ -1318,6 +1790,110 @@
}
}
+status_t HeicCompositeStream::initializeGainmapCodec() {
+ ALOGV("%s", __FUNCTION__);
+
+ if (!mHDRGainmapEnabled) {
+ return OK;
+ }
+ uint32_t width = mOutputWidth / kGainmapScale;
+ uint32_t height = mOutputHeight / kGainmapScale;
+ bool useGrid = false;
+ bool useHeic = false;
+ AString hevcName;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
+ &useHeic, &useGrid, nullptr, &hevcName);
+ if (!isSizeSupported) {
+ ALOGE("%s: Encoder doesn't support size %u x %u!",
+ __FUNCTION__, width, height);
+ return BAD_VALUE;
+ }
+
+ // Create HEVC codec.
+ mGainmapCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
+ if (mGainmapCodec == nullptr) {
+ ALOGE("%s: Failed to create gainmap codec", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ // Create Looper and handler for Codec callback.
+ mGainmapCodecCallbackHandler = new CodecCallbackHandler(this, true /*isGainmap*/);
+ if (mGainmapCodecCallbackHandler == nullptr) {
+ ALOGE("%s: Failed to create gainmap codec callback handler", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ mGainmapCallbackLooper = new ALooper;
+ mGainmapCallbackLooper->setName("Camera3-HeicComposite-MediaCodecGainmapCallbackLooper");
+ auto res = mGainmapCallbackLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start gainmap media callback looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+ mGainmapCallbackLooper->registerHandler(mGainmapCodecCallbackHandler);
+
+ mGainmapAsyncNotify = new AMessage(kWhatCallbackNotify, mGainmapCodecCallbackHandler);
+ res = mGainmapCodec->setCallback(mGainmapAsyncNotify);
+ if (res != OK) {
+ ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Create output format and configure the Codec.
+ sp<AMessage> outputFormat = new AMessage();
+ outputFormat->setString(KEY_MIME, MIMETYPE_VIDEO_HEVC);
+ outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
+ outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
+ // Ask codec to skip timestamp check and encode all frames.
+ outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
+
+ int32_t gridWidth, gridHeight, gridRows, gridCols;
+ if (useGrid){
+ gridWidth = HeicEncoderInfoManager::kGridWidth;
+ gridHeight = HeicEncoderInfoManager::kGridHeight;
+ gridRows = (height + gridHeight - 1)/gridHeight;
+ gridCols = (width + gridWidth - 1)/gridWidth;
+ } else {
+ gridWidth = width;
+ gridHeight = height;
+ gridRows = 1;
+ gridCols = 1;
+ }
+
+ outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
+ outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
+ outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
+ outputFormat->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
+ outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
+ // This only serves as a hint to encoder when encoding is not real-time.
+ outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
+
+ res = mGainmapCodec->configure(outputFormat, nullptr /*nativeWindow*/,
+ nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
+ if (res != OK) {
+ ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mGainmapGridWidth = gridWidth;
+ mGainmapGridHeight = gridHeight;
+ mGainmapGridRows = gridRows;
+ mGainmapGridCols = gridCols;
+ mGainmapUseGrid = useGrid;
+ mGainmapOutputWidth = width;
+ mGainmapOutputHeight = height;
+ mMaxHeicBufferSize +=
+ ALIGN(mGainmapOutputWidth, HeicEncoderInfoManager::kGridWidth) *
+ ALIGN(mGainmapOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2;
+
+ return OK;
+}
+
status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice) {
ALOGV("%s", __FUNCTION__);
@@ -1331,6 +1907,12 @@
__FUNCTION__, width, height);
return BAD_VALUE;
}
+ if (mHDRGainmapEnabled) {
+ // HDR Gainmap tonemapping and generation can only be done in SW
+ // using P010 as input. HEIC codecs expect private/impl.defined
+ // which is opaque.
+ mUseHeic = false;
+ }
// Create Looper for MediaCodec.
auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
@@ -1417,7 +1999,7 @@
outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
outputFormat->setInt32(KEY_COLOR_FORMAT,
- useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
+ useGrid || mHDRGainmapEnabled ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
// This only serves as a hint to encoder when encoding is not real-time.
outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
@@ -1442,7 +2024,24 @@
ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
- return OK;
+ return initializeGainmapCodec();
+}
+
+void HeicCompositeStream::deinitGainmapCodec() {
+ ALOGV("%s", __FUNCTION__);
+ if (mGainmapCodec != nullptr) {
+ mGainmapCodec->stop();
+ mGainmapCodec->release();
+ mGainmapCodec.clear();
+ }
+
+ if (mGainmapCallbackLooper != nullptr) {
+ mGainmapCallbackLooper->stop();
+ mGainmapCallbackLooper.clear();
+ }
+
+ mGainmapAsyncNotify.clear();
+ mGainmapFormat.clear();
}
void HeicCompositeStream::deinitCodec() {
@@ -1453,6 +2052,8 @@
mCodec.clear();
}
+ deinitGainmapCodec();
+
if (mCodecLooper != nullptr) {
mCodecLooper->stop();
mCodecLooper.clear();
@@ -1873,7 +2474,7 @@
ALOGE("CB_INPUT_AVAILABLE: index is expected.");
break;
}
- parent->onHeicInputFrameAvailable(index);
+ parent->onHeicInputFrameAvailable(index, mIsGainmap);
break;
}
@@ -1912,7 +2513,7 @@
timeUs,
(uint32_t)flags};
- parent->onHeicOutputFrameAvailable(bufferInfo);
+ parent->onHeicOutputFrameAvailable(bufferInfo, mIsGainmap);
break;
}
@@ -1928,7 +2529,7 @@
if (format != nullptr) {
formatCopy = format->dup();
}
- parent->onHeicFormatChanged(formatCopy);
+ parent->onHeicFormatChanged(formatCopy, mIsGainmap);
break;
}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index fad968a..beb08b0 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -17,6 +17,9 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+#include <algorithm>
+#include <android/data_space.h>
+#include <memory>
#include <queue>
#include <gui/CpuConsumer.h>
@@ -27,6 +30,8 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaMuxer.h>
+#include <ultrahdr/ultrahdrcommon.h>
+#include <ultrahdr/gainmapmetadata.h>
#include "CompositeStream.h"
@@ -79,8 +84,13 @@
void getStreamStats(hardware::CameraStreamStats*) override {};
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr,
+ bool allowSWCodec = false);
static bool isInMemoryTempFileSupported();
+
+ // HDR Gainmap subsampling
+ static const size_t kGainmapScale = 4;
+
protected:
bool threadLoop() override;
@@ -108,12 +118,12 @@
class CodecCallbackHandler : public AHandler {
public:
- explicit CodecCallbackHandler(wp<HeicCompositeStream> parent) {
- mParent = parent;
- }
+ explicit CodecCallbackHandler(wp<HeicCompositeStream> parent, bool isGainmap = false) :
+ mParent(parent), mIsGainmap(isGainmap) {}
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
wp<HeicCompositeStream> mParent;
+ bool mIsGainmap;
};
enum {
@@ -122,30 +132,34 @@
bool mUseHeic;
sp<MediaCodec> mCodec;
- sp<ALooper> mCodecLooper, mCallbackLooper;
- sp<CodecCallbackHandler> mCodecCallbackHandler;
- sp<AMessage> mAsyncNotify;
- sp<AMessage> mFormat;
- size_t mNumOutputTiles;
+ sp<MediaCodec> mGainmapCodec;
+ sp<ALooper> mCodecLooper, mCallbackLooper, mGainmapCallbackLooper;
+ sp<CodecCallbackHandler> mCodecCallbackHandler, mGainmapCodecCallbackHandler;
+ sp<AMessage> mAsyncNotify, mGainmapAsyncNotify;
+ sp<AMessage> mFormat, mGainmapFormat;
+ size_t mNumOutputTiles, mNumGainmapOutputTiles;
- int32_t mOutputWidth, mOutputHeight;
+ int32_t mOutputWidth, mOutputHeight, mGainmapOutputWidth, mGainmapOutputHeight;
size_t mMaxHeicBufferSize;
- int32_t mGridWidth, mGridHeight;
- size_t mGridRows, mGridCols;
- bool mUseGrid; // Whether to use framework YUV frame tiling.
+ int32_t mGridWidth, mGridHeight, mGainmapGridWidth, mGainmapGridHeight;
+ size_t mGridRows, mGridCols, mGainmapGridRows, mGainmapGridCols;
+ bool mUseGrid, mGainmapUseGrid; // Whether to use framework YUV frame tiling.
static const int64_t kNoFrameDropMaxPtsGap = -1000000;
static const int32_t kNoGridOpRate = 30;
static const int32_t kGridOpRate = 120;
- void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo);
- void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode.
- void onHeicFormatChanged(sp<AMessage>& newFormat);
+ void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo, bool isGainmap);
+ void onHeicInputFrameAvailable(int32_t index, bool isGainmap);// Only called for YUV input mode.
+ void onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap);
+ void onHeicGainmapFormatChanged(sp<AMessage>& newFormat);
void onHeicCodecError();
status_t initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice);
void deinitCodec();
+ status_t initializeGainmapCodec();
+ void deinitGainmapCodec();
//
// Composite stream related structures, utility functions and callbacks.
@@ -155,33 +169,51 @@
int32_t quality;
CpuConsumer::LockedBuffer appSegmentBuffer;
- std::vector<CodecOutputBufferInfo> codecOutputBuffers;
+ std::vector<CodecOutputBufferInfo> codecOutputBuffers, gainmapCodecOutputBuffers;
std::unique_ptr<CameraMetadata> result;
// Fields that are only applicable to HEVC tiling.
CpuConsumer::LockedBuffer yuvBuffer;
- std::vector<CodecInputBufferInfo> codecInputBuffers;
+ std::vector<CodecInputBufferInfo> codecInputBuffers, gainmapCodecInputBuffers;
bool error; // Main input image buffer error
bool exifError; // Exif/APP_SEGMENT buffer error
int64_t timestamp;
int32_t requestId;
- sp<AMessage> format;
+ sp<AMessage> format, gainmapFormat;
sp<MediaMuxer> muxer;
int fenceFd;
int fileFd;
- ssize_t trackIndex;
+ ssize_t trackIndex, gainmapTrackIndex;
ANativeWindowBuffer *anb;
bool appSegmentWritten;
- size_t pendingOutputTiles;
- size_t codecInputCounter;
+ size_t pendingOutputTiles, gainmapPendingOutputTiles;
+ size_t codecInputCounter, gainmapCodecInputCounter;
- InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
- exifError(false), timestamp(-1), requestId(-1), fenceFd(-1),
- fileFd(-1), trackIndex(-1), anb(nullptr), appSegmentWritten(false),
- pendingOutputTiles(0), codecInputCounter(0) { }
+ std::unique_ptr<CpuConsumer::LockedBuffer> baseImage, gainmapImage;
+ std::unique_ptr<ultrahdr::uhdr_raw_image_ext> baseBuffer, gainmap;
+ std::unique_ptr<uint8_t[]> gainmapChroma;
+ std::vector<uint8_t> isoGainmapMetadata;
+
+ InputFrame()
+ : orientation(0),
+ quality(kDefaultJpegQuality),
+ error(false),
+ exifError(false),
+ timestamp(-1),
+ requestId(-1),
+ fenceFd(-1),
+ fileFd(-1),
+ trackIndex(-1),
+ gainmapTrackIndex(-1),
+ anb(nullptr),
+ appSegmentWritten(false),
+ pendingOutputTiles(0),
+ gainmapPendingOutputTiles(0),
+ codecInputCounter(0),
+ gainmapCodecInputCounter(0) {}
};
void compilePendingInputLocked();
@@ -192,9 +224,11 @@
status_t processInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCodecInputFrame(InputFrame &inputFrame);
+ status_t processCodecGainmapInputFrame(InputFrame &inputFrame);
status_t startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processAppSegment(int64_t frameNumber, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
+ status_t processOneCodecGainmapOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCompletedInputFrame(int64_t frameNumber, InputFrame &inputFrame);
void releaseInputFrameLocked(int64_t frameNumber, InputFrame *inputFrame /*out*/);
@@ -216,6 +250,7 @@
static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
static const android_dataspace kHeifDataSpace =
static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
+ android_dataspace mInternalDataSpace = kHeifDataSpace;
// Use the limit of pipeline depth in the API sepc as maximum number of acquired
// app segment buffers.
static const uint32_t kMaxAcquiredAppSegment = 8;
@@ -260,15 +295,15 @@
std::vector<int64_t> mInputAppSegmentBuffers;
// Keep all incoming HEIC blob buffer pending further processing.
- std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
- std::queue<int64_t> mCodecOutputBufferFrameNumbers;
- size_t mCodecOutputCounter;
+ std::vector<CodecOutputBufferInfo> mCodecOutputBuffers, mGainmapCodecOutputBuffers;
+ std::queue<int64_t> mCodecOutputBufferFrameNumbers, mCodecGainmapOutputBufferFrameNumbers;
+ size_t mCodecOutputCounter, mCodecGainmapOutputCounter;
int32_t mQuality;
// Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
std::vector<int64_t> mInputYuvBuffers;
// Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only)
- std::vector<int32_t> mCodecInputBuffers;
+ std::vector<int32_t> mCodecInputBuffers, mGainmapCodecInputBuffers;
// Artificial strictly incremental YUV grid timestamp to make encoder happy.
int64_t mGridTimestampUs;
@@ -286,6 +321,49 @@
// The status id for tracking the active/idle status of this composite stream
int mStatusId;
void markTrackerIdle();
+
+ //APP_SEGMENT stream supported
+ bool mAppSegmentSupported = false;
+
+ bool mHDRGainmapEnabled = false;
+
+ // UltraHDR tonemap color and format aspects
+ static const uhdr_img_fmt_t kUltraHdrInputFmt = UHDR_IMG_FMT_24bppYCbCrP010;
+ static const uhdr_color_gamut kUltraHdrInputGamut = UHDR_CG_BT_2100;
+ static const uhdr_color_transfer kUltraHdrInputTransfer = UHDR_CT_HLG;
+ static const uhdr_color_range kUltraHdrInputRange = UHDR_CR_FULL_RANGE;
+
+ static const uhdr_img_fmt_t kUltraHdrOutputFmt = UHDR_IMG_FMT_12bppYCbCr420;
+ static const uhdr_color_gamut kUltraHdrOutputGamut = UHDR_CG_DISPLAY_P3;
+ static const uhdr_color_transfer kUltraHdrOutputTransfer = UHDR_CT_SRGB;
+ static const uhdr_color_range kUltraHdrOutputRange = UHDR_CR_FULL_RANGE;
+
+ static const auto kUltraHDRDataSpace = ADATASPACE_HEIF_ULTRAHDR;
+
+ // MediaMuxer/Codec color and format aspects for base image and gainmap metadata
+ static const int32_t kCodecColorFormat = COLOR_FormatYUV420Flexible;
+ static const ColorAspects::Primaries kCodecColorPrimaries =
+ ColorAspects::Primaries::PrimariesEG432;
+ static const ColorAspects::MatrixCoeffs kCodecColorMatrix =
+ ColorAspects::MatrixCoeffs::MatrixUnspecified;
+ static const ColorAspects::Transfer kCodecColorTransfer =
+ ColorAspects::Transfer::TransferSRGB;
+ static const ColorAspects::Range kCodecColorRange =
+ ColorAspects::Range::RangeFull;
+
+ // MediaMuxer/Codec color and format aspects for gainmap as per ISO 23008-12:2024
+ static const int32_t kCodecGainmapColorFormat = COLOR_FormatYUV420Flexible;
+ static const ColorAspects::Primaries kCodecGainmapColorPrimaries =
+ ColorAspects::Primaries::PrimariesUnspecified;
+ static const ColorAspects::MatrixCoeffs kCodecGainmapColorMatrix =
+ ColorAspects::MatrixCoeffs::MatrixUnspecified;
+ static const ColorAspects::Transfer kCodecGainmapColorTransfer =
+ ColorAspects::Transfer::TransferUnspecified;
+ static const ColorAspects::Range kCodecGainmapColorRange =
+ ColorAspects::Range::RangeFull;
+
+
+ status_t generateBaseImageAndGainmap(InputFrame &inputFrame);
};
}; // namespace camera3
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
index d36ca3b..92072b0 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -20,6 +20,7 @@
#include <cstdint>
#include <regex>
+#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <log/log_main.h>
#include <system/graphics.h>
@@ -33,14 +34,16 @@
namespace android {
namespace camera3 {
-HeicEncoderInfoManager::HeicEncoderInfoManager() :
+namespace flags = com::android::internal::camera::flags;
+
+HeicEncoderInfoManager::HeicEncoderInfoManager(bool useSWCodec) :
mIsInited(false),
mMinSizeHeic(0, 0),
mMaxSizeHeic(INT32_MAX, INT32_MAX),
mHasHEVC(false),
mHasHEIC(false),
mDisableGrid(false) {
- if (initialize() == OK) {
+ if (initialize(useSWCodec) == OK) {
mIsInited = true;
}
}
@@ -72,14 +75,15 @@
(width <= 1920 && height <= 1080))) {
enableGrid = false;
}
- if (hevcName != nullptr) {
- *hevcName = mHevcName;
- }
} else {
// No encoder available for the requested size.
return false;
}
+ if (hevcName != nullptr) {
+ *hevcName = mHevcName;
+ }
+
if (stall != nullptr) {
// Find preferred encoder which advertise
// "measured-frame-rate-WIDTHxHEIGHT-range" key.
@@ -109,7 +113,7 @@
return true;
}
-status_t HeicEncoderInfoManager::initialize() {
+status_t HeicEncoderInfoManager::initialize(bool allowSWCodec) {
mDisableGrid = property_get_bool("camera.heic.disable_grid", false);
sp<IMediaCodecList> codecsList = MediaCodecList::getInstance();
if (codecsList == nullptr) {
@@ -119,7 +123,7 @@
sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
- if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC, allowSWCodec)) {
if (heicDetails != nullptr) {
ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
__FUNCTION__);
@@ -268,7 +272,7 @@
}
bool HeicEncoderInfoManager::getHevcCodecDetails(
- sp<IMediaCodecList> codecsList, const char* mime) {
+ sp<IMediaCodecList> codecsList, const char* mime, bool allowSWCodec) {
bool found = false;
ssize_t idx = 0;
while ((idx = codecsList->findCodecByType(mime, true /*encoder*/, idx)) >= 0) {
@@ -280,11 +284,13 @@
ALOGV("%s: [%s] codec found", __FUNCTION__,
info->getCodecName());
- // Filter out software ones as they may be too slow
- if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
- ALOGV("%s: [%s] Filter out software ones as they may be too slow", __FUNCTION__,
- info->getCodecName());
- continue;
+ if (!allowSWCodec) {
+ // Filter out software ones as they may be too slow
+ if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
+ ALOGV("%s: [%s] Filter out software ones as they may be too slow", __FUNCTION__,
+ info->getCodecName());
+ continue;
+ }
}
const sp<MediaCodecInfo::Capabilities> caps =
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index a65be9c..1e28eca 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -30,8 +30,8 @@
class HeicEncoderInfoManager {
public:
- static HeicEncoderInfoManager& getInstance() {
- static HeicEncoderInfoManager instance;
+ static HeicEncoderInfoManager& getInstance(bool useSWCodec) {
+ static HeicEncoderInfoManager instance(useSWCodec);
return instance;
}
@@ -51,10 +51,10 @@
typedef std::unordered_map<std::pair<int32_t, int32_t>,
std::pair<int32_t, int32_t>, SizePairHash> FrameRateMaps;
- HeicEncoderInfoManager();
+ HeicEncoderInfoManager(bool useSWCodec);
virtual ~HeicEncoderInfoManager();
- status_t initialize();
+ status_t initialize(bool allowSWCodec);
status_t getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps);
status_t getCodecSizeRange(const char* codecName, sp<AMessage> details,
std::pair<int32_t, int32_t>* minSize, std::pair<int32_t, int32_t>* maxSize,
@@ -62,7 +62,8 @@
FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
int32_t width, int32_t height) const;
sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
- bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime);
+ bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime,
+ bool allowSWCodec = false);
bool mIsInited;
std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index f5e960b..e17d700 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -44,6 +44,10 @@
namespace camera3 {
+// TODO: Remove this once the GFX native dataspace
+// dependencies are available
+enum { HEIC_ULTRAHDR, ADATASPACE_HEIF_ULTRAHDR = 0x1006 };
+
typedef enum camera_stream_configuration_mode {
CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 6394ec1..2d58652 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include "system/camera_metadata.h"
#include "system/graphics-base-v1.0.h"
#include "system/graphics-base-v1.1.h"
#define LOG_TAG "CameraProviderManager"
@@ -76,6 +77,10 @@
const float CameraProviderManager::kDepthARTolerance = .1f;
const bool CameraProviderManager::kFrameworkJpegRDisabled =
property_get_bool("ro.camera.disableJpegR", false);
+const bool CameraProviderManager::kFrameworkHeicUltraHDRDisabled =
+ property_get_bool("ro.camera.disableHeicUltraHDR", false);
+const bool CameraProviderManager::kFrameworkHeicAllowSWCodecs =
+ property_get_bool("ro.camera.enableSWHEVC", false);
CameraProviderManager::HidlServiceInteractionProxyImpl
CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -1246,6 +1251,165 @@
return false;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicUltraHDRTags(
+ bool maxResolution) {
+ if (!flags::camera_heif_gainmap() || kFrameworkHeicUltraHDRDisabled ||
+ mCompositeHeicUltraHDRDisabled ||
+ !camera3::HeicCompositeStream::isInMemoryTempFileSupported()) {
+ return OK;
+ }
+
+ const int32_t scalerSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+ const int32_t heicUltraHDRSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t heicUltraHDRStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS, maxResolution);
+ const int32_t heicUltraHDRFrameDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS, maxResolution);
+
+ auto& c = mCameraCharacteristics;
+ std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, filteredSizes;
+ auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (capabilities.count == 0) {
+ ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto end = capabilities.data.u8 + capabilities.count;
+ bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+ if (!isTenBitOutputSupported) {
+ // No 10-bit support, nothing more to do.
+ return OK;
+ }
+
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+ auto it = supportedP010Sizes.begin();
+ if (supportedP010Sizes.empty()) {
+ // Nothing to do in this case.
+ return OK;
+ }
+
+ std::vector<int32_t> heicUltraHDREntries;
+ int64_t stall = 0;
+ bool useHeic = false;
+ bool useGrid = false;
+ for (const auto& it : supportedP010Sizes) {
+ int32_t width = std::get<0>(it);
+ int32_t height = std::get<1>(it);
+ int32_t gainmapWidth = std::get<0>(it) / HeicCompositeStream::kGainmapScale;
+ int32_t gainmapHeight = std::get<1>(it) / HeicCompositeStream::kGainmapScale;
+ if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(width, height,
+ &useHeic, &useGrid, &stall, nullptr /*hevcName*/, kFrameworkHeicAllowSWCodecs) &&
+ camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(gainmapWidth,
+ gainmapHeight, &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+ kFrameworkHeicAllowSWCodecs) ) {
+ int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+ static_cast<int32_t> (std::get<1>(it)),
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT };
+ heicUltraHDREntries.insert(heicUltraHDREntries.end(), entry, entry + 4);
+ filteredSizes.push_back(it);
+ }
+ }
+
+ std::vector<int64_t> heicUltraHDRMinDurations, heicUltraHDRStallDurations;
+ auto ret = deriveBlobDurationEntries(c, maxResolution, filteredSizes,
+ &heicUltraHDRStallDurations, &heicUltraHDRMinDurations);
+ if (ret != OK) {
+ return ret;
+ }
+
+ return insertStreamConfigTags(heicUltraHDRSizesTag, heicUltraHDRFrameDurationsTag,
+ heicUltraHDRStallDurationsTag, heicUltraHDREntries,
+ heicUltraHDRMinDurations, heicUltraHDRStallDurations, &c);
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::insertStreamConfigTags(
+ int32_t sizeTag, int32_t minFrameDurationTag, int32_t stallDurationTag,
+ const std::vector<int32_t>& sizeEntries,
+ const std::vector<int64_t>& minFrameDurationEntries,
+ const std::vector<int64_t>& stallDurationEntries, CameraMetadata* c /*out*/) {
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ supportedChTags.reserve(chTags.count + 3);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32, chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(sizeTag);
+ supportedChTags.push_back(minFrameDurationTag);
+ supportedChTags.push_back(stallDurationTag);
+ c->update(sizeTag, sizeEntries.data(), sizeEntries.size());
+ c->update(minFrameDurationTag, minFrameDurationEntries.data(), minFrameDurationEntries.size());
+ c->update(stallDurationTag, stallDurationEntries.data(), stallDurationEntries.size());
+ c->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveBlobDurationEntries(
+ const CameraMetadata& c, bool maxResolution,
+ const std::vector<std::tuple<size_t, size_t>>& filteredSizes,
+ std::vector<int64_t>* filteredStallDurations /*out*/,
+ std::vector<int64_t>* filteredMinDurations /*out*/) {
+ std::vector<int64_t> blobMinDurations, blobStallDurations;
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+ // We use the jpeg stall and min frame durations to approximate the respective Heic UltraHDR
+ // durations.
+ getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB, filteredSizes,
+ &blobMinDurations);
+ getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB, filteredSizes,
+ &blobStallDurations);
+ if (blobStallDurations.empty() || blobMinDurations.empty() ||
+ filteredSizes.size() != blobMinDurations.size() ||
+ blobMinDurations.size() != blobStallDurations.size()) {
+ ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
+ "filteredSizes size: %zu",
+ __FUNCTION__, blobMinDurations.size(), blobStallDurations.size(),
+ filteredSizes.size());
+ return BAD_VALUE;
+ }
+
+ auto itDuration = blobMinDurations.begin();
+ auto itSize = filteredSizes.begin();
+ while (itDuration != blobMinDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t>(std::get<0>(*itSize)),
+ static_cast<int32_t>(std::get<1>(*itSize)), *itDuration};
+ filteredMinDurations->insert(filteredMinDurations->end(), entry, entry + 4);
+ itDuration++;
+ itSize++;
+ }
+
+ itDuration = blobStallDurations.begin();
+ itSize = filteredSizes.begin();
+ while (itDuration != blobStallDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t>(std::get<0>(*itSize)),
+ static_cast<int32_t>(std::get<1>(*itSize)), *itDuration};
+ filteredStallDurations->insert(filteredStallDurations->end(), entry, entry + 4);
+ itDuration++;
+ itSize++;
+ }
+
+ return OK;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
return OK;
@@ -1271,13 +1435,6 @@
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
auto& c = mCameraCharacteristics;
- std::vector<int32_t> supportedChTags;
- auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
- if (chTags.count == 0) {
- ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
- return BAD_VALUE;
- }
-
std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
if (capabilities.count == 0) {
@@ -1331,54 +1488,19 @@
jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
}
- std::vector<int64_t> blobMinDurations, blobStallDurations;
std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
-
- // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
- // durations.
- getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
- supportedP010Sizes, &blobMinDurations);
- getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
- supportedP010Sizes, &blobStallDurations);
- if (blobStallDurations.empty() || blobMinDurations.empty() ||
- supportedP010Sizes.size() != blobMinDurations.size() ||
- blobMinDurations.size() != blobStallDurations.size()) {
- ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
- "supportedP010Sizes size: %zu", __FUNCTION__, blobMinDurations.size(),
- blobStallDurations.size(), supportedP010Sizes.size());
- return BAD_VALUE;
+ auto ret = deriveBlobDurationEntries(c, maxResolution, supportedP010Sizes, &jpegRStallDurations,
+ &jpegRMinDurations);
+ if (ret != OK) {
+ return ret;
}
- auto itDuration = blobMinDurations.begin();
- auto itSize = supportedP010Sizes.begin();
- while (itDuration != blobMinDurations.end()) {
- int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
- static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
- jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
- itDuration++; itSize++;
+ ret = insertStreamConfigTags(jpegRSizesTag, jpegRMinFrameDurationsTag, jpegRStallDurationsTag,
+ jpegREntries, jpegRMinDurations, jpegRStallDurations, &c);
+ if (ret != OK) {
+ return ret;
}
- itDuration = blobStallDurations.begin();
- itSize = supportedP010Sizes.begin();
- while (itDuration != blobStallDurations.end()) {
- int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
- static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
- jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
- itDuration++; itSize++;
- }
-
- supportedChTags.reserve(chTags.count + 3);
- supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
- chTags.data.i32 + chTags.count);
- supportedChTags.push_back(jpegRSizesTag);
- supportedChTags.push_back(jpegRMinFrameDurationsTag);
- supportedChTags.push_back(jpegRStallDurationsTag);
- c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
- c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
- c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
- c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
- supportedChTags.size());
-
auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
if (colorSpaces.count > 0 && !maxResolution) {
bool displayP3Support = false;
@@ -1976,7 +2098,7 @@
bool useGrid = false;
if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(
halStreamConfigs.data.i32[i+1], halStreamConfigs.data.i32[i+2],
- &useHeic, &useGrid, &stall)) {
+ &useHeic, &useGrid, &stall, nullptr /*hevcName*/, kFrameworkHeicAllowSWCodecs)) {
if (useGrid != (format == HAL_PIXEL_FORMAT_YCBCR_420_888)) {
continue;
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index f0db8bc..e629218 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -470,6 +470,9 @@
static const float kDepthARTolerance;
static const bool kFrameworkJpegRDisabled;
+ static const bool kFrameworkHeicUltraHDRDisabled;
+ static const bool kFrameworkHeicAllowSWCodecs;
+
private:
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;
@@ -629,6 +632,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
+ bool isCompositeHeicUltraHDRDisabled() const { return mCompositeHeicUltraHDRDisabled; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -685,14 +689,15 @@
mParentProvider(parentProvider), mTorchStrengthLevel(0),
mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
mHasFlashUnit(false), mSupportNativeZoomRatio(false),
- mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
+ mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false),
+ mCompositeHeicUltraHDRDisabled(false) {}
virtual ~DeviceInfo() {}
protected:
bool mHasFlashUnit; // const after constructor
bool mSupportNativeZoomRatio; // const after constructor
const std::vector<std::string>& mPublicCameraIds;
- bool mCompositeJpegRDisabled;
+ bool mCompositeJpegRDisabled, mCompositeHeicUltraHDRDisabled;
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
@@ -757,6 +762,18 @@
status_t addDynamicDepthTags(bool maxResolution = false);
status_t deriveHeicTags(bool maxResolution = false);
status_t deriveJpegRTags(bool maxResolution = false);
+ status_t deriveHeicUltraHDRTags(bool maxResolution = false);
+ status_t deriveBlobDurationEntries(
+ const CameraMetadata& c, bool maxResolution,
+ const std::vector<std::tuple<size_t, size_t>>& filteredSizes,
+ std::vector<int64_t>* filteredStallDurations /*out*/,
+ std::vector<int64_t>* filteredMinDurations /*out*/);
+ status_t insertStreamConfigTags(int32_t sizeTag, int32_t minFrameDurationTag,
+ int32_t stallDurationTag,
+ const std::vector<int32_t>& sizeEntries,
+ const std::vector<int64_t>& minFrameDurationEntries,
+ const std::vector<int64_t>& stallDurationEntries,
+ CameraMetadata* c /*out*/);
status_t addRotateCropTags();
status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index e1efd90..3d6a23f 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -517,6 +517,8 @@
mCompositeJpegRDisabled = mCameraCharacteristics.exists(
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+ mCompositeHeicUltraHDRDisabled = mCameraCharacteristics.exists(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS);
mSystemCameraKind = getSystemCameraKind();
@@ -548,6 +550,12 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ res = deriveHeicUltraHDRTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to derive Heic UltraHDR tags based on camera and "
+ "media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
@@ -567,6 +575,12 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
"maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
}
+ status = deriveHeicUltraHDRTags(/*maxResolution*/true);
+ if (OK != status) {
+ ALOGE("%s: Unable to derive Heic UltraHDR tags based on camera and "
+ "media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-status), status);
+ }
}
res = addRotateCropTags();
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index eb8cb9d..5295442 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -44,6 +44,7 @@
#include <utility>
+#include <android/data_space.h>
#include <android-base/stringprintf.h>
#include <sched.h>
#include <utils/Log.h>
@@ -2561,6 +2562,8 @@
// always occupy the initial entry.
if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
(outputStream->data_space ==
+ static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) ||
+ (outputStream->data_space ==
static_cast<android_dataspace_t>(
aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
bufferSizes[k] = static_cast<uint32_t>(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index dc663f3..2eba5a7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -26,6 +26,7 @@
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include <android/data_space.h>
#include <android-base/unique_fd.h>
#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
@@ -402,6 +403,8 @@
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
(getDataSpace() ==
+ static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) ||
+ (getDataSpace() ==
static_cast<android_dataspace_t>(
aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
if (mIPCTransport == IPCTransport::HIDL) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index d937fe9..08f6314 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -17,6 +17,7 @@
#include <cutils/properties.h>
#include "SessionConfigurationUtils.h"
+#include <android/data_space.h>
#include "../api2/DepthCompositeStream.h"
#include "../api2/HeicCompositeStream.h"
#include "aidl/android/hardware/graphics/common/Dataspace.h"
@@ -167,11 +168,16 @@
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
const int32_t jpegRSizesTag = getAppropriateModeTag(
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t heicUltraHDRSizesTag = getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
+ bool isHeicUltraHDRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ADATASPACE_HEIF_ULTRAHDR));
camera_metadata_ro_entry streamConfigs =
(isJpegRDataSpace) ? info.find(jpegRSizesTag) :
+ (isHeicUltraHDRDataSpace) ? info.find(heicUltraHDRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -232,6 +238,8 @@
if (dataSpace == static_cast<android_dataspace_t>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
return true;
+ } else if (dataSpace == static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) {
+ return true;
}
return false;
@@ -341,6 +349,9 @@
static_cast<android_dataspace>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+ static_cast<android_dataspace>(ADATASPACE_HEIF_ULTRAHDR)) {
+ format64 = static_cast<int64_t>(HEIC_ULTRAHDR);
}
camera_metadata_ro_entry_t entry =
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 7d344f8..2f4e83a 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,6 +49,12 @@
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index d663f37..59bb98e 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -206,12 +206,17 @@
__func__, config->format, config->sample_rate,
config->channel_mask, deviceId);
+ android::DeviceIdVector deviceIds;
+ if (deviceId != AAUDIO_UNSPECIFIED) {
+ deviceIds.push_back(deviceId);
+ }
+
const std::lock_guard<std::mutex> lock(mMmapStreamLock);
const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
&attributes,
config,
mMmapClient,
- &deviceId,
+ &deviceIds,
&sessionId,
this, // callback
mMmapStream,
@@ -228,6 +233,7 @@
config->channel_mask = currentConfig.channel_mask;
return AAUDIO_ERROR_UNAVAILABLE;
}
+ deviceId = android::getFirstDeviceId(deviceIds);
if (deviceId == AAUDIO_UNSPECIFIED) {
ALOGW("%s() - openMmapStream() failed to set deviceId", __func__);
@@ -422,9 +428,17 @@
return AAUDIO_ERROR_NULL;
}
struct audio_mmap_position position;
- const status_t status = mMmapStream->getMmapPosition(&position);
+ status_t status = mMmapStream->getMmapPosition(&position);
ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
__func__, status, position.position_frames, (long long) position.time_nanoseconds);
+ if (status == INVALID_OPERATION) {
+ // The HAL can return INVALID_OPERATION when the position is UNKNOWN.
+ // That can cause SHARED MMAP to break. So coerce it to NOT_ENOUGH_DATA.
+ // That will get converted to AAUDIO_ERROR_UNAVAILABLE.
+ ALOGW("%s(): change INVALID_OPERATION to NOT_ENOUGH_DATA", __func__);
+ status = NOT_ENOUGH_DATA; // see b/376467258
+ }
+
const aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
if (result == AAUDIO_ERROR_UNAVAILABLE) {
ALOGW("%s(): getMmapPosition() has no position data available", __func__);
@@ -476,8 +490,9 @@
}
};
-void AAudioServiceEndpointMMAP::onRoutingChanged(audio_port_handle_t portHandle) {
- const auto deviceId = static_cast<int32_t>(portHandle);
+void AAudioServiceEndpointMMAP::onRoutingChanged(const android::DeviceIdVector& deviceIds) {
+ const auto deviceId = android::getFirstDeviceId(deviceIds);
+ // TODO(b/367816690): Compare the new and saved device sets.
ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
if (getDeviceId() != deviceId) {
if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 962d390..a4eeba1 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -83,7 +83,7 @@
void onVolumeChanged(float volume) override;
- void onRoutingChanged(audio_port_handle_t portHandle) override;
+ void onRoutingChanged(const android::DeviceIdVector& deviceIds) override;
// ------------------------------------------------------------------------------
aaudio_result_t getDownDataDescription(AudioEndpointParcelable* parcelable);
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 67b319f..8200ab5 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -89,6 +89,7 @@
"libaaudio_internal",
"libaudioclient",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 97825b3..8f672e1 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -44,6 +44,7 @@
"libaudioclient",
"libaudioclient_aidl_conversion",
"libaudioflinger",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",