Merge "Add audio device type MULTICHANNEL_GROUP" into main
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index cabfbc4..323b23a 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -186,3 +186,19 @@
description: "Support setting and getting mirror mode for shared surfaces"
bug: "298899993"
}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "multiresolution_imagereader_usage_public"
+ description: "Make constructor for MultiResolutionImageReader with usage public"
+ bug: "338621560"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "color_temperature"
+ description: "Add keys to manually set color temperature and color tint"
+ bug: "359409044"
+}
+
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 69b30f7..fef6443 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -536,6 +536,8 @@
case ACAMERA_COLOR_CORRECTION_TRANSFORM:
case ACAMERA_COLOR_CORRECTION_GAINS:
case ACAMERA_COLOR_CORRECTION_ABERRATION_MODE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TINT:
case ACAMERA_CONTROL_AE_ANTIBANDING_MODE:
case ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION:
case ACAMERA_CONTROL_AE_LOCK:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index acc3c7c..44aac29 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -307,6 +307,100 @@
*/
ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES = // byte[n]
ACAMERA_COLOR_CORRECTION_START + 4,
+ /**
+ * <p>Specifies the color temperature for CCT mode in Kelvin
+ * to adjust the white balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color temperature in Kelvin units for when
+ * ACAMERA_COLOR_CORRECTION_MODE is CCT to adjust the
+ * white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color temperature,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color temperatures
+ * requested outside the advertised ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * will be clamped.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE = // int32
+ ACAMERA_COLOR_CORRECTION_START + 5,
+ /**
+ * <p>Specifies the color tint for CCT mode to adjust the white
+ * balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color tint for when ACAMERA_COLOR_CORRECTION_MODE
+ * is CCT to adjust the white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color tint,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color tints requested
+ * outside the supported range will be clamped to the nearest limit (-50 or +50).</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TINT = // int32
+ ACAMERA_COLOR_CORRECTION_START + 6,
+ /**
+ * <p>The range of supported color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ *
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid range of color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE supported by this camera device.</p>
+ * <p>This key will be null on devices that do not support CCT mode for
+ * ACAMERA_COLOR_CORRECTION_MODE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE = // int32[2]
+ ACAMERA_COLOR_CORRECTION_START + 7,
+ /**
+ * <p>List of color correction modes for ACAMERA_COLOR_CORRECTION_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_MODE. If no
+ * color correction modes are available for a device, this key will be null.</p>
+ * <p>Camera devices that have a FULL hardware level will always include at least
+ * FAST, HIGH_QUALITY, and TRANSFORM_MATRIX modes.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_AVAILABLE_MODES = // byte[n]
+ ACAMERA_COLOR_CORRECTION_START + 8,
ACAMERA_COLOR_CORRECTION_END,
/**
@@ -8137,6 +8231,20 @@
*/
ACAMERA_COLOR_CORRECTION_MODE_HIGH_QUALITY = 2,
+ /**
+ * <p>Use
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE and
+ * ACAMERA_COLOR_CORRECTION_COLOR_TINT to adjust the white balance based
+ * on correlated color temperature.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * CCT is ignored.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TINT
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_MODE_CCT = 3,
+
} acamera_metadata_enum_android_color_correction_mode_t;
// ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index aebdf1e..c6479d0 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -33,6 +33,14 @@
}
flag {
+ name: "deprecate_stream_bt_sco"
+ namespace: "media_audio"
+ description: "Deprecate STREAM_BLUETOOTH_SCO"
+ is_exported: true
+ bug: "376756660"
+}
+
+flag {
name: "enable_multichannel_group_device"
namespace: "media_audio"
description:
@@ -104,6 +112,14 @@
}
flag {
+ name: "hardening_permission_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API flag for additional appop/perm constructs for hardening."
+ bug: "376480814"
+}
+
+flag {
name: "loudness_configurator_api"
is_exported: true
namespace: "media_audio"
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index e6782a9..069d6ad 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -430,6 +430,7 @@
_C2_PL_AV1_BASE = 0x9000,
_C2_PL_VP8_BASE = 0xA000,
_C2_PL_MPEGH_BASE = 0xB000, // MPEG-H 3D Audio
+ _C2_PL_APV_BASE = 0xC000, // APV
C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
};
@@ -597,6 +598,15 @@
PROFILE_MPEGH_HIGH, ///< MPEG-H High
PROFILE_MPEGH_LC, ///< MPEG-H Low-complexity
PROFILE_MPEGH_BASELINE, ///< MPEG-H Baseline
+
+ // Advanced Professional VideoCodec (APV)
+ PROFILE_APV_422_10 = _C2_PL_APV_BASE, ///< APV 422-10 Profile
+ PROFILE_APV_422_12, ///< APV 422-12 Profile
+ PROFILE_APV_444_10, ///< APV 444-10 Profile
+ PROFILE_APV_444_12, ///< APV 444-12 Profile
+ PROFILE_APV_4444_10, ///< APV 4444-10 Profile
+ PROFILE_APV_4444_12, ///< APV 4444-12 Profile
+ PROFILE_APV_400_10, ///< APV 400-10 Profile
};
enum C2Config::level_t : uint32_t {
@@ -752,6 +762,68 @@
LEVEL_MPEGH_3, ///< MPEG-H L3
LEVEL_MPEGH_4, ///< MPEG-H L4
LEVEL_MPEGH_5, ///< MPEG-H L5
+
+ // Advanced Professional VideoCodec(APV) levels/bands
+ LEVEL_APV_1_BAND_0 = _C2_PL_APV_BASE, ///< APV L 1, BAND 0
+ LEVEL_APV_1_1_BAND_0, ///< APV L 1.1, BAND 0
+ LEVEL_APV_2_BAND_0, ///< APV L 2, BAND 0
+ LEVEL_APV_2_1_BAND_0, ///< APV L 2.1, BAND 0
+ LEVEL_APV_3_BAND_0, ///< APV L 3, BAND 0
+ LEVEL_APV_3_1_BAND_0, ///< APV L 3.1, BAND 0
+ LEVEL_APV_4_BAND_0, ///< APV L 4, BAND 0
+ LEVEL_APV_4_1_BAND_0, ///< APV L 4.1, BAND 0
+ LEVEL_APV_5_BAND_0, ///< APV L 5, BAND 0
+ LEVEL_APV_5_1_BAND_0, ///< APV L 5.1, BAND 0
+ LEVEL_APV_6_BAND_0, ///< APV L 6, BAND 0
+ LEVEL_APV_6_1_BAND_0, ///< APV L 6.1, BAND 0
+ LEVEL_APV_7_BAND_0, ///< APV L 7, BAND 0
+ LEVEL_APV_7_1_BAND_0, ///< APV L 7.1, BAND 0
+
+ LEVEL_APV_1_BAND_1 = _C2_PL_APV_BASE + 0x100, ///< APV L 1, BAND 1
+ LEVEL_APV_1_1_BAND_1, ///< APV L 1.1, BAND 1
+ LEVEL_APV_2_BAND_1, ///< APV L 2, BAND 1
+ LEVEL_APV_2_1_BAND_1, ///< APV L 2.1, BAND 1
+ LEVEL_APV_3_BAND_1, ///< APV L 3, BAND 1
+ LEVEL_APV_3_1_BAND_1, ///< APV L 3.1, BAND 1
+ LEVEL_APV_4_BAND_1, ///< APV L 4, BAND 1
+ LEVEL_APV_4_1_BAND_1, ///< APV L 4.1, BAND 1
+ LEVEL_APV_5_BAND_1, ///< APV L 5, BAND 1
+ LEVEL_APV_5_1_BAND_1, ///< APV L 5.1, BAND 1
+ LEVEL_APV_6_BAND_1, ///< APV L 6, BAND 1
+ LEVEL_APV_6_1_BAND_1, ///< APV L 6.1, BAND 1
+ LEVEL_APV_7_BAND_1, ///< APV L 7, BAND 1
+ LEVEL_APV_7_1_BAND_1, ///< APV L 7.1, BAND 1
+
+ LEVEL_APV_1_BAND_2 = _C2_PL_APV_BASE + 0x200, ///< APV L 1, BAND 2
+ LEVEL_APV_1_1_BAND_2, ///< APV L 1.1, BAND 2
+ LEVEL_APV_2_BAND_2, ///< APV L 2, BAND 2
+ LEVEL_APV_2_1_BAND_2, ///< APV L 2.1, BAND 2
+ LEVEL_APV_3_BAND_2, ///< APV L 3, BAND 2
+ LEVEL_APV_3_1_BAND_2, ///< APV L 3.1, BAND 2
+ LEVEL_APV_4_BAND_2, ///< APV L 4, BAND 2
+ LEVEL_APV_4_1_BAND_2, ///< APV L 4.1, BAND 2
+ LEVEL_APV_5_BAND_2, ///< APV L 5, BAND 2
+ LEVEL_APV_5_1_BAND_2, ///< APV L 5.1, BAND 2
+ LEVEL_APV_6_BAND_2, ///< APV L 6, BAND 2
+ LEVEL_APV_6_1_BAND_2, ///< APV L 6.1, BAND 2
+ LEVEL_APV_7_BAND_2, ///< APV L 7, BAND 2
+ LEVEL_APV_7_1_BAND_2, ///< APV L 7.1, BAND 2
+
+ LEVEL_APV_1_BAND_3 = _C2_PL_APV_BASE + 0x300, ///< APV L 1, BAND 3
+ LEVEL_APV_1_1_BAND_3, ///< APV L 1.1, BAND 3
+ LEVEL_APV_2_BAND_3, ///< APV L 2, BAND 3
+ LEVEL_APV_2_1_BAND_3, ///< APV L 2.1, BAND 3
+ LEVEL_APV_3_BAND_3, ///< APV L 3, BAND 3
+ LEVEL_APV_3_1_BAND_3, ///< APV L 3.1, BAND 3
+ LEVEL_APV_4_BAND_3, ///< APV L 4, BAND 3
+ LEVEL_APV_4_1_BAND_3, ///< APV L 4.1, BAND 3
+ LEVEL_APV_5_BAND_3, ///< APV L 5, BAND 3
+ LEVEL_APV_5_1_BAND_3, ///< APV L 5.1, BAND 3
+ LEVEL_APV_6_BAND_3, ///< APV L 6, BAND 3
+ LEVEL_APV_6_1_BAND_3, ///< APV L 6.1, BAND 3
+ LEVEL_APV_7_BAND_3, ///< APV L 7, BAND 3
+ LEVEL_APV_7_1_BAND_3, ///< APV L 7.1, BAND 3
+
};
struct C2ProfileLevelStruct {
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 9297520..3841831 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -436,6 +436,86 @@
{ C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
};
+// APV
+ALookup<C2Config::profile_t, int32_t> sApvProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10 },
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdrProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdr10PlusProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::level_t, int32_t> sApvLevels = {
+ { C2Config::LEVEL_APV_1_BAND_0, APVLevel1Band0 },
+ { C2Config::LEVEL_APV_1_BAND_1, APVLevel1Band1 },
+ { C2Config::LEVEL_APV_1_BAND_2, APVLevel1Band2 },
+ { C2Config::LEVEL_APV_1_BAND_3, APVLevel1Band3 },
+ { C2Config::LEVEL_APV_1_1_BAND_0, APVLevel11Band0 },
+ { C2Config::LEVEL_APV_1_1_BAND_1, APVLevel11Band1 },
+ { C2Config::LEVEL_APV_1_1_BAND_2, APVLevel11Band2 },
+ { C2Config::LEVEL_APV_1_1_BAND_3, APVLevel11Band3 },
+ { C2Config::LEVEL_APV_2_BAND_0, APVLevel2Band0 },
+ { C2Config::LEVEL_APV_2_BAND_1, APVLevel2Band1 },
+ { C2Config::LEVEL_APV_2_BAND_2, APVLevel2Band2 },
+ { C2Config::LEVEL_APV_2_BAND_3, APVLevel2Band3 },
+ { C2Config::LEVEL_APV_2_1_BAND_0, APVLevel21Band0 },
+ { C2Config::LEVEL_APV_2_1_BAND_1, APVLevel21Band1 },
+ { C2Config::LEVEL_APV_2_1_BAND_2, APVLevel21Band2 },
+ { C2Config::LEVEL_APV_2_1_BAND_3, APVLevel21Band3 },
+ { C2Config::LEVEL_APV_3_BAND_0, APVLevel3Band0 },
+ { C2Config::LEVEL_APV_3_BAND_1, APVLevel3Band1 },
+ { C2Config::LEVEL_APV_3_BAND_2, APVLevel3Band2 },
+ { C2Config::LEVEL_APV_3_BAND_3, APVLevel3Band3 },
+ { C2Config::LEVEL_APV_3_1_BAND_0, APVLevel31Band0 },
+ { C2Config::LEVEL_APV_3_1_BAND_1, APVLevel31Band1 },
+ { C2Config::LEVEL_APV_3_1_BAND_2, APVLevel31Band2 },
+ { C2Config::LEVEL_APV_3_1_BAND_3, APVLevel31Band3 },
+ { C2Config::LEVEL_APV_4_BAND_0, APVLevel4Band0 },
+ { C2Config::LEVEL_APV_4_BAND_1, APVLevel4Band1 },
+ { C2Config::LEVEL_APV_4_BAND_2, APVLevel4Band2 },
+ { C2Config::LEVEL_APV_4_BAND_3, APVLevel4Band3 },
+ { C2Config::LEVEL_APV_4_1_BAND_0, APVLevel41Band0 },
+ { C2Config::LEVEL_APV_4_1_BAND_1, APVLevel41Band1 },
+ { C2Config::LEVEL_APV_4_1_BAND_2, APVLevel41Band2 },
+ { C2Config::LEVEL_APV_4_1_BAND_3, APVLevel41Band3 },
+ { C2Config::LEVEL_APV_5_BAND_0, APVLevel5Band0 },
+ { C2Config::LEVEL_APV_5_BAND_1, APVLevel5Band1 },
+ { C2Config::LEVEL_APV_5_BAND_2, APVLevel5Band2 },
+ { C2Config::LEVEL_APV_5_BAND_3, APVLevel5Band3 },
+ { C2Config::LEVEL_APV_5_1_BAND_0, APVLevel51Band0 },
+ { C2Config::LEVEL_APV_5_1_BAND_1, APVLevel51Band1 },
+ { C2Config::LEVEL_APV_5_1_BAND_2, APVLevel51Band2 },
+ { C2Config::LEVEL_APV_5_1_BAND_3, APVLevel51Band3 },
+ { C2Config::LEVEL_APV_6_BAND_0, APVLevel6Band0 },
+ { C2Config::LEVEL_APV_6_BAND_1, APVLevel6Band1 },
+ { C2Config::LEVEL_APV_6_BAND_2, APVLevel6Band2 },
+ { C2Config::LEVEL_APV_6_BAND_3, APVLevel6Band3 },
+ { C2Config::LEVEL_APV_6_1_BAND_0, APVLevel61Band0 },
+ { C2Config::LEVEL_APV_6_1_BAND_1, APVLevel61Band1 },
+ { C2Config::LEVEL_APV_6_1_BAND_2, APVLevel61Band2 },
+ { C2Config::LEVEL_APV_6_1_BAND_3, APVLevel61Band3 },
+ { C2Config::LEVEL_APV_7_BAND_0, APVLevel7Band0 },
+ { C2Config::LEVEL_APV_7_BAND_1, APVLevel7Band1 },
+ { C2Config::LEVEL_APV_7_BAND_2, APVLevel7Band2 },
+ { C2Config::LEVEL_APV_7_BAND_3, APVLevel7Band3 },
+ { C2Config::LEVEL_APV_7_1_BAND_0, APVLevel71Band0 },
+ { C2Config::LEVEL_APV_7_1_BAND_1, APVLevel71Band1 },
+ { C2Config::LEVEL_APV_7_1_BAND_2, APVLevel71Band2 },
+ { C2Config::LEVEL_APV_7_1_BAND_3, APVLevel71Band3 },
+};
+
+ALookup<C2Config::hdr_format_t, int32_t> sApvHdrFormats = {
+ { C2Config::hdr_format_t::HLG, APVProfile422_10 },
+ { C2Config::hdr_format_t::HDR10, APVProfile422_10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, APVProfile422_10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -720,6 +800,37 @@
int32_t mBitDepth;
};
+// APV
+struct ApvProfileLevelMapper : ProfileLevelMapperHelper {
+ ApvProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+ ProfileLevelMapperHelper(),
+ mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+
+ virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+ return sApvLevels.map(from, to);
+ }
+ virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+ return sApvLevels.map(from, to);
+ }
+ virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+ return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sApvHdrProfiles.map(from, to) :
+ sApvProfiles.map(from, to);
+ }
+ virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+ return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sApvHdrProfiles.map(from, to) :
+ sApvProfiles.map(from, to);
+ }
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sApvHdrFormats.map(from, to);
+ }
+
+private:
+ bool mIsHdr;
+ bool mIsHdr10Plus;
+};
+
} // namespace
// the default mapper is used for media types that do not support HDR
@@ -753,6 +864,8 @@
return std::make_shared<Vp9ProfileLevelMapper>();
} else if (mediaType == MIMETYPE_VIDEO_AV1) {
return std::make_shared<Av1ProfileLevelMapper>();
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>();
}
return nullptr;
}
@@ -767,6 +880,8 @@
return std::make_shared<Vp9ProfileLevelMapper>(true, isHdr10Plus);
} else if (mediaType == MIMETYPE_VIDEO_AV1) {
return std::make_shared<Av1ProfileLevelMapper>(true, isHdr10Plus);
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>(true, isHdr10Plus);
}
return nullptr;
}
@@ -779,6 +894,8 @@
return GetProfileLevelMapper(mediaType);
} else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>();
}
return nullptr;
}
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 3243726..95c2b97 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -4,6 +4,7 @@
andrewlewis@google.com
bachinger@google.com
claincly@google.com
+dancho@google.com
ibaker@google.com
ivanbuper@google.com
jbibik@google.com
diff --git a/media/libaaudio/include/system/aaudio/AAudio.h b/media/libaaudio/include/system/aaudio/AAudio.h
new file mode 100644
index 0000000..933ad35
--- /dev/null
+++ b/media/libaaudio/include/system/aaudio/AAudio.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This is the system APIs for AAudio.
+ */
+#ifndef SYSTEM_AAUDIO_H
+#define SYSTEM_AAUDIO_H
+
+#include <aaudio/AAudio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * The tags string attributes allows OEMs to extend the
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>.
+ *
+ * Note that the maximum length includes all tags combined with delimiters and null terminator.
+ *
+ * Note that it matches the equivalent value in
+ * <a href="/reference/android/system/media/audio">AUDIO_ATTRIBUTES_TAGS_MAX_SIZE</a>
+ * in the Android native API.
+ */
+#define AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE 256
+
+/**
+ * Set one or more vendor extension tags that the output stream will carry.
+ *
+ * The tags can be used by the audio policy engine for routing purpose.
+ * Routing is based on audio attributes, translated into legacy stream type.
+ * The stream types cannot be extended, so the product strategies have been introduced to allow
+ * vendor extension of routing capabilities.
+ * This could, for example, affect how volume and routing is handled for the stream.
+ *
+ * The tags can also be used by a System App to pass vendor specific information through the
+ * framework to the HAL. That info could affect routing, ducking or other audio behavior in the HAL.
+ *
+ * By default, audio attributes tags are empty if this method is not called.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param tags the desired tags to add, which must be UTF-8 format and null-terminated. The size
+ * of the tags must be at most {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}. Multiple tags
+ * must be separated by semicolons.
+ * @return {@link #AAUDIO_OK} on success or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given
+ * tags is null or its length is greater than {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ */
+aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* _Nonnull builder,
+ const char* _Nonnull tags);
+
+/**
+ * Read the audio attributes' tags for the stream into a buffer.
+ * The caller is responsible for allocating and freeing the returned data.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @param tags pointer to write the value to in UTF-8 that containing OEM extension tags. It must
+ * be sized with {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ * @return {@link #AAUDIO_OK} or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given tags is null.
+ */
+aaudio_result_t AAudioStream_getTags(AAudioStream* _Nonnull stream, char* _Nonnull tags);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif //SYSTEM_AAUDIO_H
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index c4692ce..c53a897 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -50,7 +50,7 @@
setUsage(parcelable.usage);
static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
setContentType(parcelable.contentType);
-
+ setTags(parcelable.tags);
static_assert(sizeof(aaudio_spatialization_behavior_t) ==
sizeof(parcelable.spatializationBehavior));
setSpatializationBehavior(parcelable.spatializationBehavior);
@@ -106,6 +106,8 @@
result.usage = getUsage();
static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
result.contentType = getContentType();
+ std::optional<std::string> tags = getTags();
+ result.tags = tags.has_value() ? tags.value() : "";
static_assert(
sizeof(aaudio_spatialization_behavior_t) == sizeof(result.spatializationBehavior));
result.spatializationBehavior = getSpatializationBehavior();
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index fa46e0d..a301da8 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -27,6 +27,7 @@
int /* aaudio_direction_t */ direction; // = AAUDIO_DIRECTION_OUTPUT;
int /* aaudio_usage_t */ usage; // = AAUDIO_UNSPECIFIED;
int /* aaudio_content_type_t */ contentType; // = AAUDIO_UNSPECIFIED;
+ @utf8InCpp String tags; /* UTF8 */
int /* aaudio_spatialization_behavior_t */spatializationBehavior; //= AAUDIO_UNSPECIFIED;
boolean isContentSpatialized; // = false;
int /* aaudio_input_preset_t */ inputPreset; // = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index fa3f5a0..99b90e2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -129,6 +129,7 @@
request.getConfiguration().setUsage(getUsage());
request.getConfiguration().setContentType(getContentType());
+ request.getConfiguration().setTags(getTags());
request.getConfiguration().setSpatializationBehavior(getSpatializationBehavior());
request.getConfiguration().setIsContentSpatialized(isContentSpatialized());
request.getConfiguration().setInputPreset(getInputPreset());
@@ -185,6 +186,7 @@
setUsage(configurationOutput.getUsage());
setContentType(configurationOutput.getContentType());
+ setTags(configurationOutput.getTags());
setSpatializationBehavior(configurationOutput.getSpatializationBehavior());
setIsContentSpatialized(configurationOutput.isContentSpatialized());
setInputPreset(configurationOutput.getInputPreset());
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 3315344..fb87dd9 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,6 +25,7 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <system/aaudio/AAudio.h>
#include "AudioClock.h"
#include "AudioGlobal.h"
#include "AudioStreamBuilder.h"
@@ -177,6 +178,17 @@
streamBuilder->setContentType(contentType);
}
+AAUDIO_API aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* builder,
+ const char* tags) {
+ if (tags == nullptr || strlen(tags) >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ std::optional<std::string> optionalTags = std::string(tags);
+ streamBuilder->setTags(optionalTags);
+ return AAUDIO_OK;
+}
+
AAUDIO_API void AAudioStreamBuilder_setSpatializationBehavior(AAudioStreamBuilder* builder,
aaudio_spatialization_behavior_t spatializationBehavior) {
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
@@ -546,6 +558,22 @@
return audioStream->getContentType();
}
+AAUDIO_API aaudio_result_t AAudioStream_getTags(AAudioStream* stream, char* tags)
+{
+ if (tags == nullptr) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ std::optional<std::string> optTags = audioStream->getTags();
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE-1] = '\0';
+ } else {
+ tags[0] = '\0';
+ }
+ return AAUDIO_OK;
+}
+
AAUDIO_API aaudio_spatialization_behavior_t AAudioStream_getSpatializationBehavior(
AAudioStream* stream)
{
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 67fc668..056918a 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "AAudioStreamParameters"
#include <utils/Log.h>
#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
#include "AAudioStreamParameters.h"
@@ -34,6 +35,7 @@
mBufferCapacity = other.mBufferCapacity;
mUsage = other.mUsage;
mContentType = other.mContentType;
+ mTags = other.mTags;
mSpatializationBehavior = other.mSpatializationBehavior;
mIsContentSpatialized = other.mIsContentSpatialized;
mInputPreset = other.mInputPreset;
@@ -199,6 +201,10 @@
// break;
}
+ if (mTags.has_value() && mTags->size() >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+
return validateChannelMask();
}
@@ -301,6 +307,7 @@
ALOGD("mBufferCapacity = %6d", mBufferCapacity);
ALOGD("mUsage = %6d", mUsage);
ALOGD("mContentType = %6d", mContentType);
+ ALOGD("mTags = %s", mTags.has_value() ? mTags.value().c_str() : "");
ALOGD("mSpatializationBehavior = %6d", mSpatializationBehavior);
ALOGD("mIsContentSpatialized = %s", mIsContentSpatialized ? "true" : "false");
ALOGD("mInputPreset = %6d", mInputPreset);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 7c78f03..cad27a7 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -97,6 +97,14 @@
mContentType = contentType;
}
+ void setTags(const std::optional<std::string>& tags) {
+ mTags = tags;
+ }
+
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -223,6 +231,7 @@
aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior
= AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index e0fd325..a75a2a1 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -93,6 +93,7 @@
if (mContentType == AAUDIO_UNSPECIFIED) {
mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
}
+ mTags = builder.getTags();
mSpatializationBehavior = builder.getSpatializationBehavior();
// for consistency with other properties, note UNSPECIFIED is the same as AUTO
if (mSpatializationBehavior == AAUDIO_UNSPECIFIED) {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 49a63c4..3271882 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -290,6 +290,10 @@
return mContentType;
}
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -687,6 +691,13 @@
mContentType = contentType;
}
+ /**
+ * This should not be called after the open() call.
+ */
+ void setTags(const std::optional<std::string> &tags) {
+ mTags = tags;
+ }
+
void setSpatializationBehavior(aaudio_spatialization_behavior_t spatializationBehavior) {
mSpatializationBehavior = spatializationBehavior;
}
@@ -776,6 +787,7 @@
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior = AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
aaudio_input_preset_t mInputPreset = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index d729047..16c0bcd 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -146,14 +146,14 @@
builder.isContentSpatialized(),
flags);
- const audio_attributes_t attributes = {
- .content_type = contentType,
- .usage = usage,
- .source = AUDIO_SOURCE_DEFAULT, // only used for recording
- .flags = attributesFlags,
- .tags = ""
- };
-
+ const std::optional<std::string> tags = builder.getTags();
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+ attributes.content_type = contentType;
+ attributes.usage = usage;
+ attributes.flags = attributesFlags;
+ if (tags.has_value() && !tags.value().empty()) {
+ strcpy(attributes.tags, tags.value().c_str());
+ }
mAudioTrack = new AudioTrack();
// TODO b/182392769: use attribution source util
mAudioTrack->set(
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 7213393..13c19a1 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -72,6 +72,9 @@
AAudioStream_getHardwareSampleRate; # introduced=UpsideDownCake
AAudio_getPlatformMMapPolicy; # introduced=36
AAudio_getPlatformMMapExclusivePolicy; #introduced=36
+
+ AAudioStreamBuilder_setTags; # systemapi
+ AAudioStream_getTags; # systemapi
local:
*;
};
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index e5676a7..045c236 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -26,6 +26,8 @@
#include <aaudio/AAudio.h>
#include <gtest/gtest.h>
+#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
constexpr int64_t kNanosPerSecond = 1000000000;
constexpr int kNumFrames = 256;
@@ -36,6 +38,7 @@
static void checkAttributes(aaudio_performance_mode_t perfMode,
aaudio_usage_t usage,
aaudio_content_type_t contentType,
+ const char * tags = nullptr,
aaudio_input_preset_t preset = DONT_SET,
aaudio_allowed_capture_policy_t capturePolicy = DONT_SET,
int privacyMode = DONT_SET,
@@ -45,6 +48,7 @@
AAudioStreamBuilder *aaudioBuilder = nullptr;
AAudioStream *aaudioStream = nullptr;
+ aaudio_result_t expectedSetTagsResult = AAUDIO_OK;
// Use an AAudioStreamBuilder to contain requested parameters.
ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
@@ -60,6 +64,12 @@
if (contentType != DONT_SET) {
AAudioStreamBuilder_setContentType(aaudioBuilder, contentType);
}
+ if (tags != nullptr) {
+ aaudio_result_t result = AAudioStreamBuilder_setTags(aaudioBuilder, tags);
+ expectedSetTagsResult = (strlen(tags) >= AUDIO_ATTRIBUTES_TAGS_MAX_SIZE) ?
+ AAUDIO_ERROR_ILLEGAL_ARGUMENT : AAUDIO_OK;
+ EXPECT_EQ(result, expectedSetTagsResult);
+ }
if (preset != DONT_SET) {
AAudioStreamBuilder_setInputPreset(aaudioBuilder, preset);
}
@@ -87,6 +97,20 @@
: contentType;
EXPECT_EQ(expectedContentType, AAudioStream_getContentType(aaudioStream));
+ char readTags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE] = {};
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_getTags(aaudioStream, readTags))
+ << "Expected tags=" << (tags != nullptr ? tags : "null") << ", got tags=" << readTags;;
+ EXPECT_LT(strlen(readTags), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE)
+ << "expected tags len " << strlen(readTags) << " less than "
+ << AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+
+ // Null tags or failed to set, empty tags expected (default initializer)
+ const char * expectedTags = tags == nullptr ?
+ "" : (expectedSetTagsResult != AAUDIO_OK ? "" : tags);
+ // Oversized tags will be discarded
+ EXPECT_TRUE(std::strcmp(expectedTags, readTags) == 0)
+ << "Expected tags=" << expectedTags << ", got tags=" << readTags;
+
aaudio_input_preset_t expectedPreset =
(preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
? AAUDIO_INPUT_PRESET_VOICE_RECOGNITION // default
@@ -139,6 +163,21 @@
// Note that the AAUDIO_SYSTEM_USAGE_* values requires special permission.
};
+static const std::string oversizedTags2 = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1, 'A');
+static const std::string oversizedTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE, 'B');
+static const std::string maxSizeTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1, 'C');
+
+static const char * const sTags[] = {
+ nullptr,
+ "",
+ "oem=routing_extension",
+ "VX_OEM_ROUTING_EXTENSION",
+ maxSizeTags.c_str(),
+ // intentionnaly use oversized tags
+ oversizedTags.c_str(),
+ oversizedTags2.c_str()
+};
+
static const aaudio_content_type_t sContentypes[] = {
DONT_SET,
AAUDIO_UNSPECIFIED,
@@ -185,11 +224,18 @@
}
}
+static void checkAttributesTags(aaudio_performance_mode_t perfMode) {
+ for (const char * const tags : sTags) {
+ checkAttributes(perfMode, DONT_SET, DONT_SET, tags);
+ }
+}
+
static void checkAttributesInputPreset(aaudio_performance_mode_t perfMode) {
for (aaudio_input_preset_t inputPreset : sInputPresets) {
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
inputPreset,
DONT_SET,
DONT_SET,
@@ -202,6 +248,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
policy,
AAUDIO_DIRECTION_INPUT);
@@ -213,6 +260,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
DONT_SET,
privacyMode,
@@ -228,6 +276,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_NONE);
}
+TEST(test_attributes, aaudio_tags_perfnone) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
TEST(test_attributes, aaudio_input_preset_perfnone) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_NONE);
}
@@ -244,6 +296,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
+TEST(test_attributes, aaudio_tags_lowlat) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
TEST(test_attributes, aaudio_input_preset_lowlat) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index e1265cf..6727562 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -130,6 +130,27 @@
return ss.str();
}
+std::string toString(const DeviceIdSet& deviceIds) {
+ if (deviceIds.empty()) {
+ return "Empty device ids";
+ }
+ std::stringstream ss;
+ for (auto it = deviceIds.begin(); it != deviceIds.end(); ++it) {
+ if (it != deviceIds.begin()) {
+ ss << ", ";
+ }
+ ss << *it;
+ }
+ return ss.str();
+}
+
+audio_port_handle_t getFirstDeviceId(const DeviceIdSet& deviceIds) {
+ if (deviceIds.empty()) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return *(deviceIds.begin());
+}
+
AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
uint32_t first,
uint32_t last) {
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 46fd620..3673871 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -33,6 +33,7 @@
using FormatSet = std::set<audio_format_t>;
using SampleRateSet = std::set<uint32_t>;
using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
+using DeviceIdSet = std::set<audio_port_handle_t>;
using FormatVector = std::vector<audio_format_t>;
using AudioProfileAttributesMultimap =
@@ -139,6 +140,16 @@
}
/**
+ * Returns human readable string for a set of device ids.
+ */
+std::string toString(const DeviceIdSet& deviceIds);
+
+/**
+ * Returns the first device id of a set of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
+ */
+audio_port_handle_t getFirstDeviceId(const DeviceIdSet& deviceIds);
+
+/**
* Create audio profile attributes map by given audio profile array from the range of [first, last).
*
* @param profiles the array of audio profiles.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index b1cf665..c9e0a97 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -586,6 +586,139 @@
}
}
+// APV ProfileLevel
+inline constexpr int32_t APVProfile422_10 = 0x01;
+inline constexpr int32_t APVProfile422_10HDR10 = 0x1000;
+inline constexpr int32_t APVProfile422_10HDR10Plus = 0x2000;
+
+inline static const char *asString_APVProfile(int32_t i, const char *def = "??") {
+ switch (i) {
+ case APVProfile422_10: return "APVProfile422_10";
+ case APVProfile422_10HDR10: return "APVProfile422_10HDR10";
+ case APVProfile422_10HDR10Plus: return "APVProfile422_10HDR10Plus";
+ default: return def;
+ }
+}
+
+inline constexpr int32_t APVLevel1Band0 = 0x101;
+inline constexpr int32_t APVLevel1Band1 = 0x102;
+inline constexpr int32_t APVLevel1Band2 = 0x104;
+inline constexpr int32_t APVLevel1Band3 = 0x108;
+inline constexpr int32_t APVLevel11Band0 = 0x201;
+inline constexpr int32_t APVLevel11Band1 = 0x202;
+inline constexpr int32_t APVLevel11Band2 = 0x204;
+inline constexpr int32_t APVLevel11Band3 = 0x208;
+inline constexpr int32_t APVLevel2Band0 = 0x401;
+inline constexpr int32_t APVLevel2Band1 = 0x402;
+inline constexpr int32_t APVLevel2Band2 = 0x404;
+inline constexpr int32_t APVLevel2Band3 = 0x408;
+inline constexpr int32_t APVLevel21Band0 = 0x801;
+inline constexpr int32_t APVLevel21Band1 = 0x802;
+inline constexpr int32_t APVLevel21Band2 = 0x804;
+inline constexpr int32_t APVLevel21Band3 = 0x808;
+inline constexpr int32_t APVLevel3Band0 = 0x1001;
+inline constexpr int32_t APVLevel3Band1 = 0x1002;
+inline constexpr int32_t APVLevel3Band2 = 0x1004;
+inline constexpr int32_t APVLevel3Band3 = 0x1008;
+inline constexpr int32_t APVLevel31Band0 = 0x2001;
+inline constexpr int32_t APVLevel31Band1 = 0x2002;
+inline constexpr int32_t APVLevel31Band2 = 0x2004;
+inline constexpr int32_t APVLevel31Band3 = 0x2008;
+inline constexpr int32_t APVLevel4Band0 = 0x4001;
+inline constexpr int32_t APVLevel4Band1 = 0x4002;
+inline constexpr int32_t APVLevel4Band2 = 0x4004;
+inline constexpr int32_t APVLevel4Band3 = 0x4008;
+inline constexpr int32_t APVLevel41Band0 = 0x8001;
+inline constexpr int32_t APVLevel41Band1 = 0x8002;
+inline constexpr int32_t APVLevel41Band2 = 0x8004;
+inline constexpr int32_t APVLevel41Band3 = 0x8008;
+inline constexpr int32_t APVLevel5Band0 = 0x10001;
+inline constexpr int32_t APVLevel5Band1 = 0x10002;
+inline constexpr int32_t APVLevel5Band2 = 0x10004;
+inline constexpr int32_t APVLevel5Band3 = 0x10008;
+inline constexpr int32_t APVLevel51Band0 = 0x20001;
+inline constexpr int32_t APVLevel51Band1 = 0x20002;
+inline constexpr int32_t APVLevel51Band2 = 0x20004;
+inline constexpr int32_t APVLevel51Band3 = 0x20008;
+inline constexpr int32_t APVLevel6Band0 = 0x40001;
+inline constexpr int32_t APVLevel6Band1 = 0x40002;
+inline constexpr int32_t APVLevel6Band2 = 0x40004;
+inline constexpr int32_t APVLevel6Band3 = 0x40008;
+inline constexpr int32_t APVLevel61Band0 = 0x80001;
+inline constexpr int32_t APVLevel61Band1 = 0x80002;
+inline constexpr int32_t APVLevel61Band2 = 0x80004;
+inline constexpr int32_t APVLevel61Band3 = 0x80008;
+inline constexpr int32_t APVLevel7Band0 = 0x100001;
+inline constexpr int32_t APVLevel7Band1 = 0x100002;
+inline constexpr int32_t APVLevel7Band2 = 0x100004;
+inline constexpr int32_t APVLevel7Band3 = 0x100008;
+inline constexpr int32_t APVLevel71Band0 = 0x200001;
+inline constexpr int32_t APVLevel71Band1 = 0x200002;
+inline constexpr int32_t APVLevel71Band2 = 0x200004;
+inline constexpr int32_t APVLevel71Band3 = 0x200008;
+
+inline static const char *asString_APVBandLevel(int32_t i, const char *def = "??") {
+ switch (i) {
+ case APVLevel1Band0: return "Level 1, Band 0";
+ case APVLevel1Band1: return "Level 1, Band 1";
+ case APVLevel1Band2: return "Level 1, Band 2";
+ case APVLevel1Band3: return "Level 1, Band 3";
+ case APVLevel11Band0: return "Level 1.1, Band 0";
+ case APVLevel11Band1: return "Level 1.1, Band 1";
+ case APVLevel11Band2: return "Level 1.1, Band 2";
+ case APVLevel11Band3: return "Level 1.1, Band 3";
+ case APVLevel2Band0: return "Level 2, Band 0";
+ case APVLevel2Band1: return "Level 2, Band 1";
+ case APVLevel2Band2: return "Level 2, Band 2";
+ case APVLevel2Band3: return "Level 2, Band 3";
+ case APVLevel21Band0: return "Level 2.1, Band 0";
+ case APVLevel21Band1: return "Level 2.1, Band 1";
+ case APVLevel21Band2: return "Level 2.1, Band 2";
+ case APVLevel21Band3: return "Level 2.1, Band 3";
+ case APVLevel3Band0: return "Level 3, Band 0";
+ case APVLevel3Band1: return "Level 3, Band 1";
+ case APVLevel3Band2: return "Level 3, Band 2";
+ case APVLevel3Band3: return "Level 3, Band 3";
+ case APVLevel31Band0: return "Level 3.1, Band 0";
+ case APVLevel31Band1: return "Level 3.1, Band 1";
+ case APVLevel31Band2: return "Level 3.1, Band 2";
+ case APVLevel31Band3: return "Level 3.1, Band 3";
+ case APVLevel4Band0: return "Level 4, Band 0";
+ case APVLevel4Band1: return "Level 4, Band 1";
+ case APVLevel4Band2: return "Level 4, Band 2";
+ case APVLevel4Band3: return "Level 4, Band 3";
+ case APVLevel41Band0: return "Level 4.1, Band 0";
+ case APVLevel41Band1: return "Level 4.1, Band 1";
+ case APVLevel41Band2: return "Level 4.1, Band 2";
+ case APVLevel41Band3: return "Level 4.1, Band 3";
+ case APVLevel5Band0: return "Level 5, Band 0";
+ case APVLevel5Band1: return "Level 5, Band 1";
+ case APVLevel5Band2: return "Level 5, Band 2";
+ case APVLevel5Band3: return "Level 5, Band 3";
+ case APVLevel51Band0: return "Level 5.1, Band 0";
+ case APVLevel51Band1: return "Level 5.1, Band 1";
+ case APVLevel51Band2: return "Level 5.1, Band 2";
+ case APVLevel51Band3: return "Level 5.1, Band 3";
+ case APVLevel6Band0: return "Level 6, Band 0";
+ case APVLevel6Band1: return "Level 6, Band 1";
+ case APVLevel6Band2: return "Level 6, Band 2";
+ case APVLevel6Band3: return "Level 6, Band 3";
+ case APVLevel61Band0: return "Level 6.1, Band 0";
+ case APVLevel61Band1: return "Level 6.1, Band 1";
+ case APVLevel61Band2: return "Level 6.1, Band 2";
+ case APVLevel61Band3: return "Level 6.1, Band 3";
+ case APVLevel7Band0: return "Level 7, Band 0";
+ case APVLevel7Band1: return "Level 7, Band 1";
+ case APVLevel7Band2: return "Level 7, Band 2";
+ case APVLevel7Band3: return "Level 7, Band 3";
+ case APVLevel71Band0: return "Level 7.1, Band 0";
+ case APVLevel71Band1: return "Level 7.1, Band 1";
+ case APVLevel71Band2: return "Level 7.1, Band 2";
+ case APVLevel71Band3: return "Level 7.1, Band 3";
+ default: return def;
+ }
+}
+
inline constexpr int32_t BITRATE_MODE_CBR = 2;
inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -654,6 +787,7 @@
inline constexpr int32_t COLOR_FormatYUV444Flexible = 0x7F444888;
inline constexpr int32_t COLOR_FormatYUV444Interleaved = 29;
inline constexpr int32_t COLOR_FormatYUVP010 = 54;
+inline constexpr int32_t COLOR_FormatYUVP210 = 60;
inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
@@ -712,6 +846,7 @@
case COLOR_FormatYUV444Flexible: return "YUV444Flexible";
case COLOR_FormatYUV444Interleaved: return "YUV444Interleaved";
case COLOR_FormatYUVP010: return "YUVP010";
+ case COLOR_FormatYUVP210: return "YUVP210";
case COLOR_QCOM_FormatYUV420SemiPlanar: return "QCOM_YUV420SemiPlanar";
case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
default: return def;
@@ -731,6 +866,7 @@
inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_APV[] = "video/apv";
inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 49b2dec..d62e1ed 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -140,6 +140,8 @@
"audio_decoder.g711alaw", "audio_encoder.g711alaw" },
{ MEDIA_MIMETYPE_VIDEO_AVC,
"video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_APV,
+ "video_decoder.apv", "video_encoder.apv" },
{ MEDIA_MIMETYPE_VIDEO_HEVC,
"video_decoder.hevc", "video_encoder.hevc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4,
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 7abab63..a890696 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -25,6 +25,7 @@
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
const char *MEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char *MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 05ee7fc..2b3f446 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -27,6 +27,7 @@
extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
extern const char *MEDIA_MIMETYPE_VIDEO_AV1;
+extern const char *MEDIA_MIMETYPE_VIDEO_APV;
extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 679b111..ddb93fe 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -45,6 +45,7 @@
namespace android {
namespace {
+constexpr auto PERMISSION_GRANTED = permission::PermissionChecker::PERMISSION_GRANTED;
constexpr auto PERMISSION_HARD_DENIED = permission::PermissionChecker::PERMISSION_HARD_DENIED;
}
@@ -78,19 +79,32 @@
int32_t getOpForSource(audio_source_t source) {
switch (source) {
- case AUDIO_SOURCE_HOTWORD:
- return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
+ case AUDIO_SOURCE_FM_TUNER:
+ return AppOpsManager::OP_NONE;
case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
case AUDIO_SOURCE_REMOTE_SUBMIX:
return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
case AUDIO_SOURCE_VOICE_DOWNLINK:
return AppOpsManager::OP_RECORD_INCOMING_PHONE_AUDIO;
+ case AUDIO_SOURCE_HOTWORD:
+ return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
case AUDIO_SOURCE_DEFAULT:
default:
return AppOpsManager::OP_RECORD_AUDIO;
}
}
+bool isRecordOpRequired(audio_source_t source) {
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ return false;
+ default:
+ return true;
+ }
+}
+
std::optional<AttributionSourceState> resolveAttributionSource(
const AttributionSourceState& callerAttributionSource, const uint32_t virtualDeviceId) {
AttributionSourceState nextAttributionSource = callerAttributionSource;
@@ -122,7 +136,8 @@
return std::optional<AttributionSourceState>{myAttributionSource};
}
- static int checkRecordingInternal(const AttributionSourceState &attributionSource,
+
+static int checkRecordingInternal(const AttributionSourceState &attributionSource,
const uint32_t virtualDeviceId,
const String16 &msg, bool start, audio_source_t source) {
// Okay to not track in app ops as audio server or media server is us and if
@@ -131,32 +146,47 @@
// user is active, but it is a core system service so let it through.
// TODO(b/141210120): UserManager.DISALLOW_RECORD_AUDIO should not affect system user 0
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
- if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return true;
-
- // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
- // may open a record track on behalf of a client. Note that pid may be a tid.
- // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource, virtualDeviceId);
- if (!resolvedAttributionSource.has_value()) {
- return false;
- }
+ if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return PERMISSION_GRANTED;
const int32_t attributedOpCode = getOpForSource(source);
+ if (isRecordOpRequired(source)) {
+ // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+ // may open a record track on behalf of a client. Note that pid may be a tid.
+ // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+ std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
+ if (!resolvedAttributionSource.has_value()) {
+ return PERMISSION_HARD_DENIED;
+ }
- permission::PermissionChecker permissionChecker;
- int permitted;
- if (start) {
- permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
- sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
- attributedOpCode);
+ permission::PermissionChecker permissionChecker;
+ int permitted;
+ if (start) {
+ permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+ sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+ attributedOpCode);
+ } else {
+ permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
+ sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+ attributedOpCode);
+ }
+
+ return permitted;
} else {
- permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
- sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
- attributedOpCode);
+ if (attributedOpCode == AppOpsManager::OP_NONE) return PERMISSION_GRANTED; // nothing to do
+ AppOpsManager ap{};
+ PermissionController pc{};
+ return ap.startOpNoThrow(
+ attributedOpCode, attributionSource.uid,
+ resolveCallingPackage(pc,
+ String16{attributionSource.packageName.value_or("").c_str()},
+ attributionSource.uid),
+ false,
+ attributionSource.attributionTag.has_value()
+ ? String16{attributionSource.attributionTag.value().c_str()}
+ : String16{},
+ msg);
}
-
- return permitted;
}
static constexpr int DEVICE_ID_DEFAULT = 0;
@@ -188,19 +218,32 @@
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return;
- // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
- // may open a record track on behalf of a client. Note that pid may be a tid.
- // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- const std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource, virtualDeviceId);
- if (!resolvedAttributionSource.has_value()) {
- return;
- }
-
const int32_t attributedOpCode = getOpForSource(source);
- permission::PermissionChecker permissionChecker;
- permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
- resolvedAttributionSource.value());
+ if (isRecordOpRequired(source)) {
+ // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+ // may open a record track on behalf of a client. Note that pid may be a tid.
+ // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+ const std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
+ if (!resolvedAttributionSource.has_value()) {
+ return;
+ }
+
+ permission::PermissionChecker permissionChecker;
+ permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
+ resolvedAttributionSource.value());
+ } else {
+ if (attributedOpCode == AppOpsManager::OP_NONE) return; // nothing to do
+ AppOpsManager ap{};
+ PermissionController pc{};
+ ap.finishOp(attributedOpCode, attributionSource.uid,
+ resolveCallingPackage(
+ pc, String16{attributionSource.packageName.value_or("").c_str()},
+ attributionSource.uid),
+ attributionSource.attributionTag.has_value()
+ ? String16{attributionSource.attributionTag.value().c_str()}
+ : String16{});
+ }
}
bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource) {
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 461e190..9abdbf1 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -118,6 +118,7 @@
const AttributionSourceState& attributionSource, const String16& caller);
void anonymizeBluetoothAddress(char *address);
+bool isRecordOpRequired(audio_source_t source);
int32_t getOpForSource(audio_source_t source);
AttributionSourceState getCallingAttributionSource();
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 1ffa176..f298541 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -909,13 +909,12 @@
std::stringstream msg;
msg << "Audio recording on session " << client->session;
+
const auto permitted = startRecording(client->attributionSource, client->virtualDeviceId,
String16(msg.str().c_str()), client->attributes.source);
// check calling permissions
- if (permitted == PERMISSION_HARD_DENIED && client->attributes.source != AUDIO_SOURCE_FM_TUNER
- && client->attributes.source != AUDIO_SOURCE_REMOTE_SUBMIX
- && client->attributes.source != AUDIO_SOURCE_ECHO_REFERENCE) {
+ if (permitted == PERMISSION_HARD_DENIED) {
ALOGE("%s permission denied: recording not allowed for attribution source %s",
__func__, client->attributionSource.toString().c_str());
return binderStatusFromStatusT(PERMISSION_DENIED);
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index 733f0d6..fd344d9 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -19,6 +19,7 @@
#include "AudioRecordClient.h"
#include "AudioPolicyService.h"
#include "binder/AppOpsManager.h"
+#include "mediautils/ServiceUtilities.h"
#include <android_media_audiopolicy.h>
#include <algorithm>
@@ -118,16 +119,20 @@
}
return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
- getOpForSource(attr.source), commandThread);
+ getOpForSource(attr.source),
+ isRecordOpRequired(attr.source),
+ commandThread);
}
OpRecordAudioMonitor::OpRecordAudioMonitor(
const AttributionSourceState &attributionSource,
const uint32_t virtualDeviceId, const audio_attributes_t &attr,
int32_t appOp,
+ bool shouldMonitorRecord,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
mHasOp(true), mAttributionSource(attributionSource),
mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+ mShouldMonitorRecord(shouldMonitorRecord),
mCommandThread(commandThread) {
}
@@ -160,7 +165,7 @@
});
};
reg(mAppOp);
- if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+ if (mAppOp != AppOpsManager::OP_RECORD_AUDIO && mShouldMonitorRecord) {
reg(AppOpsManager::OP_RECORD_AUDIO);
}
}
@@ -186,7 +191,7 @@
});
};
bool hasIt = check(mAppOp);
- if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+ if (mAppOp != AppOpsManager::OP_RECORD_AUDIO && mShouldMonitorRecord) {
hasIt = hasIt && check(AppOpsManager::OP_RECORD_AUDIO);
}
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index 76aff41..977d77b 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -47,6 +47,7 @@
uint32_t virtualDeviceId,
const audio_attributes_t &attr,
int32_t appOp,
+ bool shouldMonitorRecord,
wp<AudioPolicyService::AudioCommandThread> commandThread);
void onFirstRef() override;
@@ -74,6 +75,7 @@
const uint32_t mVirtualDeviceId;
const audio_attributes_t mAttr;
const int32_t mAppOp;
+ const bool mShouldMonitorRecord;
wp<AudioPolicyService::AudioCommandThread> mCommandThread;
};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index b07d8d5..158ee69 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -93,6 +93,10 @@
ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE,
+ } },
};
/**
@@ -125,4 +129,8 @@
ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE,
+ ANDROID_COLOR_CORRECTION_COLOR_TINT,
+ } },
};
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index a03d199..6394ec1 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1833,6 +1833,67 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addColorCorrectionAvailableModesTag(
+ CameraMetadata& c) {
+ status_t res = OK;
+
+ // The COLOR_CORRECTION_AVAILABLE_MODES key advertises the
+ // supported color correction modes. Previously, if color correction was
+ // supported (COLOR_CORRECTION_MODE was not null), it was assumed
+ // that all existing options, TRANSFORM_MATRIX, FAST, and HIGH_QUALITY, were supported.
+ // However, a new optional mode, CCT, has been introduced. To indicate
+ // whether CCT is supported, the camera device must now explicitly list all
+ // available modes using the COLOR_CORRECTION_AVAILABLE_MODES key.
+ // If the camera device doesn't set COLOR_CORRECTION_AVAILABLE_MODES,
+ // this code falls back to checking for the COLOR_CORRECTION_MODE key.
+ // If present, this adds the required supported modes TRANSFORM_MATRIX,
+ // FAST, HIGH_QUALITY.
+ auto entry = c.find(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ if (entry.count != 0) {
+ return res;
+ }
+
+ auto reqKeys = c.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ if (reqKeys.count == 0) {
+ ALOGE("%s: No supported camera request keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ bool colorCorrectionModeAvailable = false;
+ for (size_t i = 0; i < reqKeys.count; i++) {
+ if (reqKeys.data.i32[i] == ANDROID_COLOR_CORRECTION_MODE) {
+ colorCorrectionModeAvailable = true;
+ break;
+ }
+ }
+
+ if (!colorCorrectionModeAvailable) {
+ return res;
+ }
+
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> colorCorrectionAvailableModes = {
+ ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
+ ANDROID_COLOR_CORRECTION_MODE_FAST,
+ ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY };
+ supportedChTags.reserve(chTags.count + 1);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ c.update(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ colorCorrectionAvailableModes.data(), colorCorrectionAvailableModes.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSessionConfigQueryVersionTag() {
sp<ProviderInfo> parentProvider = mParentProvider.promote();
if (parentProvider == nullptr) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index b686a58..f0db8bc 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -761,6 +761,7 @@
status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
+ status_t addColorCorrectionAvailableModesTag(CameraMetadata& ch);
status_t addSessionConfigQueryVersionTag();
static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 4bfe11d..e1efd90 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -596,6 +596,14 @@
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&
@@ -683,6 +691,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 6cedb04..edaee6e 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -675,6 +675,13 @@
ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
@@ -785,6 +792,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
}
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 836d4e0..58c6549 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -524,16 +524,18 @@
std::chrono::nanoseconds timeSinceLastFrame) {
std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
- if (surfaceTimestamp.count() < 0 ||
- surfaceTimestamp.count() == lastSurfaceTimestamp) {
- if (lastSurfaceTimestamp > 0) {
- // The timestamps were provided by the producer but we are
- // repeating the last frame, so we increase the previous timestamp by
- // the elapsed time sinced its capture, otherwise the camera framework
- // will discard the frame.
- surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
- timeSinceLastFrame.count());
- }
+ if (lastSurfaceTimestamp > 0 &&
+ surfaceTimestamp.count() <= lastSurfaceTimestamp) {
+ // The timestamps were provided by the producer but we are
+ // repeating the last frame, so we increase the previous timestamp by
+ // the elapsed time sinced its capture, otherwise the camera framework
+ // will discard the frame.
+ surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
+ timeSinceLastFrame.count());
+ ALOGI(
+ "Surface's timestamp is stall. Artificially increasing the surface "
+ "timestamp by %lld",
+ timeSinceLastFrame.count());
}
mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
std::memory_order_relaxed);
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index e7d14a0..e49e9e7 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -25,6 +25,7 @@
#include <sstream>
#include <vector>
+#include <system/aaudio/AAudio.h>
#include <utils/Singleton.h>
@@ -195,20 +196,28 @@
? AAudioConvert_inputPresetToAudioSource(params->getInputPreset())
: AUDIO_SOURCE_DEFAULT;
audio_flags_mask_t flags;
+ std::optional<std::string> optTags = {};
if (direction == AAUDIO_DIRECTION_OUTPUT) {
flags = AAudio_computeAudioFlagsMask(
params->getAllowedCapturePolicy(),
params->getSpatializationBehavior(),
params->isContentSpatialized(),
AUDIO_OUTPUT_FLAG_FAST);
+ optTags = params->getTags();
} else {
flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
| AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
}
- return {
+ audio_attributes_t nativeAttributes = {
.content_type = contentType,
.usage = usage,
.source = source,
.flags = flags,
- .tags = "" };
+ .tags = ""
+ };
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(nativeAttributes.tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ nativeAttributes.tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
+ }
+ return nativeAttributes;
}