Merge "Avoid using fromServiceSpecificError on success paths" into sc-dev
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7387442..dab2fef 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -24,6 +24,28 @@
using namespace android;
+// Formats not listed in the public API, but still available to AImageReader
+// Enum value must match corresponding enum in ui/PublicFormat.h (which is not
+// available to VNDK)
+enum AIMAGE_PRIVATE_FORMATS {
+ /**
+ * Unprocessed implementation-dependent raw
+ * depth measurements, opaque with 16 bit
+ * samples.
+ *
+ */
+
+ AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+ /**
+ * Device specific 10 bits depth RAW image format.
+ *
+ * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+ * and device specific bit layout.</p>
+ */
+ AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
+};
+
/**
* ACameraMetadata Implementation
*/
@@ -290,6 +312,10 @@
format = AIMAGE_FORMAT_DEPTH_POINT_CLOUD;
} else if (format == HAL_PIXEL_FORMAT_Y16) {
format = AIMAGE_FORMAT_DEPTH16;
+ } else if (format == HAL_PIXEL_FORMAT_RAW16) {
+ format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH);
+ } else if (format == HAL_PIXEL_FORMAT_RAW10) {
+ format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH10);
}
filteredDepthStreamConfigs.push_back(format);
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 70ce864..90515ab 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -3502,7 +3502,7 @@
* preCorrectionActiveArraySize covers the camera device's field of view "after" zoom. See
* ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
* <p>For camera devices with the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -3964,7 +3964,7 @@
* configurations which belong to this physical camera, and it will advertise and will only
* advertise the maximum supported resolutions for a particular format.</p>
* <p>If this camera device isn't a physical camera device constituting a logical camera,
- * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* camera, this field represents the multi-resolution input/output stream configurations of
* default mode and max resolution modes. The sizes will be the maximum resolution of a
* particular format for default mode and max resolution mode.</p>
@@ -4867,12 +4867,12 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
* When operating in
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
- * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability would typically perform pixel binning in order to improve low light
* performance, noise reduction etc. However, in
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
* mode (supported only
- * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* sensors), sensors typically operate in unbinned mode allowing for a larger image size.
* The stream configurations supported in
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
@@ -4905,7 +4905,7 @@
* </ul></p>
*
* <p>This key will only be present in devices advertisting the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
* RAW targets will have a regular bayer pattern.</p>
*/
@@ -5231,7 +5231,7 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
* counterparts.
* This key will only be present for devices which advertise the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability.</p>
* <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
*
@@ -5263,7 +5263,7 @@
* is, when ACAMERA_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
* This key will only be present for devices which advertise the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability.</p>
*
* @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
@@ -5291,7 +5291,7 @@
* when ACAMERA_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
* This key will only be present for devices which advertise the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability.</p>
* <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
*
@@ -5321,7 +5321,7 @@
* <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
* will have a regular bayer pattern.</p>
* <p>This key will not be present for sensors which don't have the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability.</p>
*/
ACAMERA_SENSOR_INFO_BINNING_FACTOR = // int32[2]
@@ -9264,13 +9264,13 @@
/**
* <p>This is the default sensor pixel mode. This is the only sensor pixel mode
* supported unless a camera device advertises
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
*/
ACAMERA_SENSOR_PIXEL_MODE_DEFAULT = 0,
/**
* <p>This sensor pixel mode is offered by devices with capability
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
* In this mode, sensors typically do not bin pixels, as a result can offer larger
* image sizes.</p>
*/
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index ea76cbb..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
return UNKNOWN_ERROR;
}
- if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
- if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+ if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+ int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
ALOGE("Failed to set AAC encoder parameters");
return UNKNOWN_ERROR;
}
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
new file mode 120000
index 0000000..136279c
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/.clang-format
@@ -0,0 +1 @@
+../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 1f95eaf..efc5813 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -33,11 +33,11 @@
using android::C2AllocatorIon;
#include "media_c2_hidl_test_common.h"
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kDecodeTestParameters;
-
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
struct CompToURL {
std::string mime;
@@ -46,36 +46,26 @@
};
std::vector<CompToURL> kCompToURL = {
- {"mp4a-latm",
- "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
- {"mp4a-latm",
- "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
- {"audio/mpeg",
- "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
- {"audio/mpeg",
- "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
- {"3gpp",
- "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
- {"3gpp",
- "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
- {"amr-wb",
- "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
- {"amr-wb",
- "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
- {"vorbis",
- "bbb_vorbis_stereo_128kbps_48000hz.vorbis", "bbb_vorbis_stereo_128kbps_48000hz.info"},
- {"opus",
- "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
- {"g711-alaw",
- "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
- {"g711-mlaw",
- "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
- {"gsm",
- "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
- {"raw",
- "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
- {"flac",
- "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
+ {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
+ {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
+ "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
+ {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+ {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+ "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
+ {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
+ {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+ "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
+ {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
+ {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+ "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
+ {"vorbis", "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+ "bbb_vorbis_stereo_128kbps_48000hz.info"},
+ {"opus", "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
+ {"g711-alaw", "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
+ {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
+ {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
+ {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+ {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
};
class LinearBuffer : public C2Buffer {
@@ -212,9 +202,8 @@
}
};
-class Codec2AudioDecHidlTest
- : public Codec2AudioDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioDecHidlTest : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -438,10 +427,8 @@
ASSERT_EQ(mComponent->stop(), C2_OK);
}
-class Codec2AudioDecDecodeTest
- : public Codec2AudioDecHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<DecodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -452,9 +439,8 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
- ;
- bool signalEOS = !std::get<3>(GetParam()).compare("true");
+ uint32_t streamIndex = std::get<2>(GetParam());
+ bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
char mURL[512], info[512];
android::Vector<FrameInfo> Info;
@@ -771,9 +757,8 @@
ASSERT_EQ(mComponent->stop(), C2_OK);
}
-class Codec2AudioDecCsdInputTests
- : public Codec2AudioDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2AudioDecCsdInputTests : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<CsdFlushTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -819,7 +804,7 @@
ASSERT_EQ(eleStream.is_open(), true);
bool signalEOS = false;
- bool flushCsd = !std::get<2>(GetParam()).compare("true");
+ bool flushCsd = std::get<2>(GetParam());
ALOGV("sending %d csd data ", numCsds);
int framesToDecode = numCsds;
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -875,16 +860,16 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
// DecodeTest with StreamIndex and EOS / No EOS
INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2AudioDecDecodeTest,
testing::ValuesIn(kDecodeTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
testing::ValuesIn(kCsdFlushTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
} // anonymous namespace
@@ -893,18 +878,18 @@
kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
for (auto params : kTestParameters) {
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 5e8809e..562c77f 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -35,8 +35,9 @@
#include "media_c2_hidl_test_common.h"
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kEncodeTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
+
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
class LinearBuffer : public C2Buffer {
public:
@@ -170,9 +171,8 @@
}
};
-class Codec2AudioEncHidlTest
- : public Codec2AudioEncHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioEncHidlTest : public Codec2AudioEncHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -246,7 +246,8 @@
*nChannels = 1;
*nSampleRate = 16000;
*samplesPerFrame = 160;
- } else return false;
+ } else
+ return false;
return true;
}
@@ -258,10 +259,8 @@
const char* mURL;
};
static const CompToURL kCompToURL[] = {
- {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"},
- {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
- {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"},
- {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
+ {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"}, {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
+ {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"}, {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
{"opus", "bbb_raw_2ch_48khz_s16le.raw"},
};
@@ -363,10 +362,8 @@
ASSERT_EQ(mDisableTest, false);
}
-class Codec2AudioEncEncodeTest
- : public Codec2AudioEncHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioEncEncodeTest : public Codec2AudioEncHidlTestBase,
+ public ::testing::WithParamInterface<EncodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -379,9 +376,9 @@
char mURL[512];
strcpy(mURL, sResourceDir.c_str());
GetURLForComponent(mURL);
- bool signalEOS = !std::get<2>(GetParam()).compare("true");
+ bool signalEOS = std::get<2>(GetParam());
// Ratio w.r.t to mInputMaxBufSize
- int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
+ int32_t inputMaxBufRatio = std::get<3>(GetParam());
int32_t nChannels;
int32_t nSampleRate;
@@ -428,8 +425,7 @@
ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
ASSERT_TRUE(false);
}
- if ((mMime.find("flac") != std::string::npos) ||
- (mMime.find("opus") != std::string::npos) ||
+ if ((mMime.find("flac") != std::string::npos) || (mMime.find("opus") != std::string::npos) ||
(mMime.find("mp4a-latm") != std::string::npos)) {
ASSERT_TRUE(mCsd) << "CSD buffer missing";
}
@@ -753,13 +749,13 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
// EncodeTest with EOS / No EOS and inputMaxBufRatio
// inputMaxBufRatio is ratio w.r.t. to mInputMaxBufSize
INSTANTIATE_TEST_SUITE_P(EncodeTest, Codec2AudioEncEncodeTest,
testing::ValuesIn(kEncodeTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
} // anonymous namespace
@@ -768,13 +764,13 @@
kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
for (auto params : kTestParameters) {
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "1"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false, 1));
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "2"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false, 2));
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "1"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true, 1));
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "2"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index de34705..1f1681d 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -27,13 +27,13 @@
std::string sComponentNamePrefix = "";
static constexpr struct option kArgOptions[] = {
- {"res", required_argument, 0, 'P'},
- {"prefix", required_argument, 0, 'p'},
- {"help", required_argument, 0, 'h'},
- {nullptr, 0, nullptr, 0},
+ {"res", required_argument, 0, 'P'},
+ {"prefix", required_argument, 0, 'p'},
+ {"help", required_argument, 0, 'h'},
+ {nullptr, 0, nullptr, 0},
};
-void printUsage(char *me) {
+void printUsage(char* me) {
std::cerr << "VTS tests to test codec2 components \n";
std::cerr << "Usage: " << me << " [options] \n";
std::cerr << "\t -P, --res: Mandatory path to a folder that contains test resources \n";
@@ -49,17 +49,17 @@
int option_index;
while ((arg = getopt_long(argc, argv, ":P:p:h", kArgOptions, &option_index)) != -1) {
switch (arg) {
- case 'P':
- sResourceDir = optarg;
- break;
- case 'p':
- sComponentNamePrefix = optarg;
- break;
- case 'h':
- printUsage(argv[0]);
- break;
- default:
- break;
+ case 'P':
+ sResourceDir = optarg;
+ break;
+ case 'p':
+ sComponentNamePrefix = optarg;
+ break;
+ case 'h':
+ printUsage(argv[0]);
+ break;
+ default:
+ break;
}
}
}
@@ -134,8 +134,7 @@
for (size_t i = 0; i < updates.size(); ++i) {
C2Param* param = updates[i].get();
if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
- C2StreamInitDataInfo::output* csdBuffer =
- (C2StreamInitDataInfo::output*)(param);
+ C2StreamInitDataInfo::output* csdBuffer = (C2StreamInitDataInfo::output*)(param);
size_t csdSize = csdBuffer->flexCount();
if (csdSize > 0) csd = true;
} else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
@@ -160,8 +159,7 @@
typedef std::unique_lock<std::mutex> ULock;
ULock l(queueLock);
workQueue.push_back(std::move(work));
- if (!flushedIndices.empty() &&
- (frameIndexIt != flushedIndices.end())) {
+ if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
flushedIndices.erase(frameIndexIt);
}
queueCondition.notify_all();
@@ -178,15 +176,15 @@
}
// Return all test parameters, a list of tuple of <instance, component>
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters() {
+const std::vector<TestParameters>& getTestParameters() {
return getTestParameters(C2Component::DOMAIN_OTHER, C2Component::KIND_OTHER);
}
// Return all test parameters, a list of tuple of <instance, component> with matching domain and
// kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
- C2Component::domain_t domain, C2Component::kind_t kind) {
- static std::vector<std::tuple<std::string, std::string>> parameters;
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+ C2Component::kind_t kind) {
+ static std::vector<TestParameters> parameters;
auto instances = android::Codec2Client::GetServiceNames();
for (std::string instance : instances) {
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index a2f1561..e74f247 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -40,7 +40,8 @@
using namespace ::std::chrono;
-static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+using TestParameters = std::tuple<std::string, std::string>;
+static std::vector<TestParameters> kTestParameters;
// Resource directory
extern std::string sResourceDir;
@@ -54,6 +55,18 @@
int64_t timestamp;
};
+template <typename... T>
+static inline std::string PrintInstanceTupleNameToString(
+ const testing::TestParamInfo<std::tuple<T...>>& info) {
+ std::stringstream ss;
+ std::apply([&ss](auto&&... elems) { ((ss << elems << '_'), ...); }, info.param);
+ ss << info.index;
+ std::string param_string = ss.str();
+ auto isNotAlphaNum = [](char c) { return !std::isalnum(c); };
+ std::replace_if(param_string.begin(), param_string.end(), isNotAlphaNum, '_');
+ return param_string;
+}
+
/*
* Handle Callback functions onWorkDone(), onTripped(),
* onError(), onDeath(), onFramesRendered()
@@ -114,12 +127,12 @@
void parseArgs(int argc, char** argv);
// Return all test parameters, a list of tuple of <instance, component>.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters();
+const std::vector<TestParameters>& getTestParameters();
// Return all test parameters, a list of tuple of <instance, component> with matching domain and
// kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
- C2Component::domain_t domain, C2Component::kind_t kind);
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+ C2Component::kind_t kind);
/*
* common functions declarations
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 0648dd9..29acd33 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -53,9 +53,8 @@
}
namespace {
-
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kInputTestParameters;
+using InputTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<InputTestParameters> kInputTestParameters;
// google.codec2 Component test setup
class Codec2ComponentHidlTestBase : public ::testing::Test {
@@ -120,9 +119,8 @@
}
};
-class Codec2ComponentHidlTest
- : public Codec2ComponentHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2ComponentHidlTest : public Codec2ComponentHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -317,10 +315,8 @@
ASSERT_EQ(err, C2_OK);
}
-class Codec2ComponentInputTests
- : public Codec2ComponentHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2ComponentInputTests : public Codec2ComponentHidlTestBase,
+ public ::testing::WithParamInterface<InputTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -330,8 +326,8 @@
TEST_P(Codec2ComponentInputTests, InputBufferTest) {
description("Tests for different inputs");
- uint32_t flags = std::stoul(std::get<2>(GetParam()));
- bool isNullBuffer = !std::get<3>(GetParam()).compare("true");
+ uint32_t flags = std::get<2>(GetParam());
+ bool isNullBuffer = std::get<3>(GetParam());
if (isNullBuffer)
ALOGD("Testing for null input buffer with flag : %u", flags);
else
@@ -350,11 +346,10 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_CASE_P(NonStdInputs, Codec2ComponentInputTests,
- testing::ValuesIn(kInputTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ testing::ValuesIn(kInputTestParameters), PrintInstanceTupleNameToString<>);
} // anonymous namespace
// TODO: Add test for Invalid work,
@@ -364,18 +359,15 @@
kTestParameters = getTestParameters();
for (auto params : kTestParameters) {
kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+ kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+ C2FrameData::FLAG_END_OF_STREAM, true));
kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params),
- std::to_string(C2FrameData::FLAG_END_OF_STREAM), "true"));
- kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
- kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params),
- std::to_string(C2FrameData::FLAG_CODEC_CONFIG), "false"));
- kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params),
- std::to_string(C2FrameData::FLAG_END_OF_STREAM), "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+ kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+ C2FrameData::FLAG_CODEC_CONFIG, false));
+ kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+ C2FrameData::FLAG_END_OF_STREAM, false));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index f29da0e..d0a1c31 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -40,10 +40,11 @@
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kDecodeTestParameters;
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
struct CompToURL {
std::string mime;
@@ -52,43 +53,30 @@
std::string chksum;
};
std::vector<CompToURL> kCompToURL = {
- {"avc",
- "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
- "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
- {"avc",
- "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
- "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
- {"hevc",
- "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
- "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
- {"hevc",
- "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
- "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
- {"mpeg2",
- "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info", ""},
- {"mpeg2",
- "bbb_mpeg2_352x288_1mbps_60fps.m2v","bbb_mpeg2_352x288_1mbps_60fps.info", ""},
- {"3gpp",
- "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
- {"mp4v-es",
- "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
- {"vp8",
- "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
- {"vp8",
- "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
- "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
- {"vp9",
- "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
- {"vp9",
- "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
- "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
- {"vp9",
- "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
- "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
- {"av01",
- "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
- {"av01",
- "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+ {"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
+ "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
+ {"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
+ "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
+ {"hevc", "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
+ "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
+ {"hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
+ "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
+ {"mpeg2", "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info",
+ ""},
+ {"mpeg2", "bbb_mpeg2_352x288_1mbps_60fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+ {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
+ {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
+ ""},
+ {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
+ {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+ "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
+ {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
+ {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
+ "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+ {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+ "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
+ {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
+ {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
};
class LinearBuffer : public C2Buffer {
@@ -251,8 +239,7 @@
if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
if (mReorderDepth < 0) {
C2PortReorderBufferDepthTuning::output reorderBufferDepth;
- mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
- nullptr);
+ mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK, nullptr);
mReorderDepth = reorderBufferDepth.value;
if (mReorderDepth > 0) {
// TODO: Add validation for reordered output
@@ -333,9 +320,8 @@
}
};
-class Codec2VideoDecHidlTest
- : public Codec2VideoDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoDecHidlTest : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -541,10 +527,8 @@
return false;
}
-class Codec2VideoDecDecodeTest
- : public Codec2VideoDecHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<DecodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -556,8 +540,8 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
- bool signalEOS = !std::get<2>(GetParam()).compare("true");
+ uint32_t streamIndex = std::get<2>(GetParam());
+ bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
char mURL[512], info[512], chksum[512];
@@ -657,8 +641,8 @@
description("Adaptive Decode Test");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
- strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
- strcasestr(mMime.c_str(), "mpeg2"))) {
+ strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
+ strcasestr(mMime.c_str(), "mpeg2"))) {
return;
}
@@ -987,9 +971,8 @@
}
}
-class Codec2VideoDecCsdInputTests
- : public Codec2VideoDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2VideoDecCsdInputTests : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<CsdFlushTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -1022,7 +1005,7 @@
bool flushedDecoder = false;
bool signalEOS = false;
bool keyFrame = false;
- bool flushCsd = !std::get<2>(GetParam()).compare("true");
+ bool flushCsd = std::get<2>(GetParam());
ALOGV("sending %d csd data ", numCsds);
int framesToDecode = numCsds;
@@ -1092,16 +1075,16 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
// DecodeTest with StreamIndex and EOS / No EOS
INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2VideoDecDecodeTest,
testing::ValuesIn(kDecodeTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
testing::ValuesIn(kCsdFlushTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
} // anonymous namespace
@@ -1111,22 +1094,22 @@
kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
for (auto params : kTestParameters) {
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index e116fe1..23ceff4 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -41,10 +41,11 @@
: C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
};
-static std::vector<std::tuple<std::string, std::string, std::string, std::string, std::string>>
- kEncodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kEncodeResolutionTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, bool, bool>;
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
+
+using EncodeResolutionTestParameters = std::tuple<std::string, std::string, int32_t, int32_t>;
+static std::vector<EncodeResolutionTestParameters> kEncodeResolutionTestParameters;
namespace {
@@ -205,9 +206,8 @@
}
};
-class Codec2VideoEncHidlTest
- : public Codec2VideoEncHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoEncHidlTest : public Codec2VideoEncHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -376,10 +376,8 @@
ASSERT_EQ(mDisableTest, false);
}
-class Codec2VideoEncEncodeTest
- : public Codec2VideoEncHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string, std::string>> {
+class Codec2VideoEncEncodeTest : public Codec2VideoEncHidlTestBase,
+ public ::testing::WithParamInterface<EncodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -393,10 +391,10 @@
char mURL[512];
int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
- bool signalEOS = !std::get<2>(GetParam()).compare("true");
+ bool signalEOS = std::get<3>(GetParam());
// Send an empty frame to receive CSD data from encoder.
- bool sendEmptyFirstFrame = !std::get<3>(GetParam()).compare("true");
- mConfigBPictures = !std::get<4>(GetParam()).compare("true");
+ bool sendEmptyFirstFrame = std::get<3>(GetParam());
+ mConfigBPictures = std::get<4>(GetParam());
strcpy(mURL, sResourceDir.c_str());
GetURLForComponent(mURL);
@@ -484,8 +482,7 @@
ASSERT_TRUE(false);
}
- if ((mMime.find("vp8") != std::string::npos) ||
- (mMime.find("3gpp") != std::string::npos)) {
+ if ((mMime.find("vp8") != std::string::npos) || (mMime.find("3gpp") != std::string::npos)) {
ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
} else if (mMime.find("vp9") == std::string::npos) {
ASSERT_TRUE(mCsd) << "CSD Buffer not received";
@@ -665,8 +662,7 @@
class Codec2VideoEncResolutionTest
: public Codec2VideoEncHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+ public ::testing::WithParamInterface<EncodeResolutionTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -678,8 +674,8 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
std::ifstream eleStream;
- int32_t nWidth = std::stoi(std::get<2>(GetParam()));
- int32_t nHeight = std::stoi(std::get<3>(GetParam()));
+ int32_t nWidth = std::get<2>(GetParam());
+ int32_t nHeight = std::get<3>(GetParam());
ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
mEos = false;
@@ -711,14 +707,16 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_SUITE_P(NonStdSizes, Codec2VideoEncResolutionTest,
- ::testing::ValuesIn(kEncodeResolutionTestParameters));
+ ::testing::ValuesIn(kEncodeResolutionTestParameters),
+ PrintInstanceTupleNameToString<>);
// EncodeTest with EOS / No EOS
INSTANTIATE_TEST_SUITE_P(EncodeTestwithEOS, Codec2VideoEncEncodeTest,
- ::testing::ValuesIn(kEncodeTestParameters));
+ ::testing::ValuesIn(kEncodeTestParameters),
+ PrintInstanceTupleNameToString<>);
TEST_P(Codec2VideoEncHidlTest, AdaptiveBitrateTest) {
description("Encodes input file for different bitrates");
@@ -812,27 +810,23 @@
parseArgs(argc, argv);
kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
for (auto params : kTestParameters) {
- constexpr char const* kBoolString[] = { "false", "true" };
for (size_t i = 0; i < 1 << 3; ++i) {
kEncodeTestParameters.push_back(std::make_tuple(
- std::get<0>(params), std::get<1>(params),
- kBoolString[i & 1],
- kBoolString[(i >> 1) & 1],
- kBoolString[(i >> 2) & 1]));
+ std::get<0>(params), std::get<1>(params), i & 1, (i >> 1) & 1, (i >> 2) & 1));
}
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "52", "18"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 52, 18));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "365", "365"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 365, 365));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "484", "362"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 484, 362));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "244", "488"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 244, 488));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "852", "608"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 852, 608));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1400", "442"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1400, 442));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 7969a6f..27e87e6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -362,7 +362,10 @@
.limitTo(D::OUTPUT & D::READ));
add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
- .limitTo(D::ENCODER & D::OUTPUT));
+ .limitTo(D::ENCODER & D::CODED));
+ // Some audio decoders require bitrate information to be set
+ add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+ .limitTo(D::AUDIO & D::DECODER & D::CODED));
// we also need to put the bitrate in the max bitrate field
add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
.limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -730,6 +733,17 @@
return C2Value();
}));
+ add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+ .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+ .withMapper([mapper](C2Value v) -> C2Value {
+ C2Config::profile_t c2 = PROFILE_UNUSED;
+ int32_t sdk;
+ if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+ return c2;
+ }
+ return PROFILE_UNUSED;
+ }));
+
// convert to dBFS and add default
add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
.limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1322,6 +1336,14 @@
}
}
+ // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+ // as SDK doesn't have a way to signal default sbr mode based on profile and
+ // requires that the key isn't present in format to signal that
+ int sbrMode;
+ if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+ msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+ }
+
{ // convert color info
// move default color to color aspect if not read from the component
int32_t tmp;
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index 9cec23f..af054c7 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -143,6 +143,7 @@
if (buffer->size() > 0) {
mCurrentOrdinal.timestamp = timeUs;
+ mCurrentOrdinal.customOrdinal = timeUs;
}
size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
@@ -219,6 +220,7 @@
++mCurrentOrdinal.frameIndex;
mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+ mCurrentOrdinal.customOrdinal = mCurrentOrdinal.timestamp;
mCurrentBlock.reset();
mWriteView.reset();
}
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 01190b5..0fd4ef2 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -113,7 +113,7 @@
return NULL;
}
sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
- if (frameMem == NULL) {
+ if (frameMem == NULL || frameMem->unsecurePointer() == NULL) {
ALOGE("not enough memory for VideoFrame size=%zu", size);
return NULL;
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c21ea8f..50ebeef 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -29,7 +29,6 @@
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -140,8 +139,6 @@
static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
-static const char *kCodecPlaybackDuration =
- "android.media.mediacodec.playback-duration"; /* in sec */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped"; /* 0/1 */
@@ -722,8 +719,6 @@
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
- mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
- mIsSurfaceToScreen(false),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
@@ -830,10 +825,6 @@
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
- int64_t playbackDuration = mPlaybackDurationAccumulator->getDurationInSeconds();
- if (playbackDuration > 0) {
- mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDuration, playbackDuration);
- }
if (mLifetimeStartNs > 0) {
nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
@@ -971,22 +962,6 @@
ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
}
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
- if (msg->what() != kWhatOutputFramesRendered) {
- ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
- return;
- }
- // Playback duration only counts if the buffers are going to the screen.
- if (!mIsSurfaceToScreen) {
- return;
- }
- int64_t renderTimeNs;
- size_t index = 0;
- while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
- mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
- }
-}
-
bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
{
if (nbuckets <= 0 || width <= 0) {
@@ -3149,7 +3124,6 @@
ALOGV("TunnelPeekState: %s -> %s",
asString(previousState),
asString(TunnelPeekState::kBufferRendered));
- updatePlaybackDuration(msg);
// check that we have a notification set
if (mOnFrameRenderedNotification != NULL) {
sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4850,10 +4824,6 @@
return ALREADY_EXISTS;
}
- // in case we don't connect, ensure that we don't signal the surface is
- // connected to the screen
- mIsSurfaceToScreen = false;
-
err = nativeWindowConnect(surface.get(), "connectToSurface");
if (err == OK) {
// Require a fresh set of buffers after each connect by using a unique generation
@@ -4879,10 +4849,6 @@
if (!mAllowFrameDroppingBySurface) {
disableLegacyBufferDropPostQ(surface);
}
- // keep track whether or not the buffers of the connected surface go to the screen
- int result = 0;
- surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
- mIsSurfaceToScreen = result != 0;
}
}
// do not return ALREADY_EXISTS unless surfaces are the same
@@ -4900,7 +4866,6 @@
}
// assume disconnected even on error
mSurface.clear();
- mIsSurfaceToScreen = false;
}
return err;
}
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/PlaybackDurationAccumulator.h
deleted file mode 100644
index cb5f0c4..0000000
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PLAYBACK_DURATION_ACCUMULATOR_H_
-
-namespace android {
-
-// Accumulates playback duration by processing render times of individual frames and by ignoring
-// frames rendered during inactive playbacks such as seeking, pausing, or re-buffering.
-class PlaybackDurationAccumulator {
-private:
- // Controls the maximum delta between render times before considering the playback is not
- // active and has stalled.
- static const int64_t MAX_PRESENTATION_DURATION_NS = 500 * 1000 * 1000;
-
-public:
- PlaybackDurationAccumulator() {
- mPlaybackDurationNs = 0;
- mPreviousRenderTimeNs = 0;
- }
-
- // Process a render time expressed in nanoseconds.
- void processRenderTime(int64_t newRenderTimeNs) {
- // If we detect wrap-around or out of order frames, just ignore the duration for this
- // and the next frame.
- if (newRenderTimeNs < mPreviousRenderTimeNs) {
- mPreviousRenderTimeNs = 0;
- }
- if (mPreviousRenderTimeNs > 0) {
- int64_t presentationDurationNs = newRenderTimeNs - mPreviousRenderTimeNs;
- if (presentationDurationNs < MAX_PRESENTATION_DURATION_NS) {
- mPlaybackDurationNs += presentationDurationNs;
- }
- }
- mPreviousRenderTimeNs = newRenderTimeNs;
- }
-
- int64_t getDurationInSeconds() {
- return mPlaybackDurationNs / 1000 / 1000 / 1000; // Nanoseconds to seconds.
- }
-
-private:
- // The playback duration accumulated so far.
- int64_t mPlaybackDurationNs;
- // The previous render time used to compute the next presentation duration.
- int64_t mPreviousRenderTimeNs;
-};
-
-}
-
-#endif
-
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index d7b1794..3f93e6d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -58,7 +58,6 @@
struct PersistentSurface;
class SoftwareRenderer;
class Surface;
-class PlaybackDurationAccumulator;
namespace hardware {
namespace cas {
namespace native {
@@ -414,7 +413,6 @@
void updateLowLatency(const sp<AMessage> &msg);
constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
void updateTunnelPeek(const sp<AMessage> &msg);
- void updatePlaybackDuration(const sp<AMessage> &msg);
sp<AMessage> mOutputFormat;
sp<AMessage> mInputFormat;
@@ -482,9 +480,6 @@
std::shared_ptr<BufferChannelBase> mBufferChannel;
- PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
- bool mIsSurfaceToScreen;
-
MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index b019448..05115b9 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -40,6 +40,14 @@
*/
AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+ /**
+ * Device specific 10 bits depth RAW image format.
+ *
+ * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+ * and device specific bit layout.</p>
+ */
+ AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
};
// TODO: this only supports ImageReader
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index b75901a..1067e24 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -72,6 +72,7 @@
case AIMAGE_FORMAT_Y8:
case AIMAGE_FORMAT_HEIC:
case AIMAGE_FORMAT_DEPTH_JPEG:
+ case AIMAGE_FORMAT_RAW_DEPTH10:
return true;
case AIMAGE_FORMAT_PRIVATE:
// For private format, cpu usage is prohibited.
@@ -102,6 +103,7 @@
case AIMAGE_FORMAT_Y8:
case AIMAGE_FORMAT_HEIC:
case AIMAGE_FORMAT_DEPTH_JPEG:
+ case AIMAGE_FORMAT_RAW_DEPTH10:
return 1;
case AIMAGE_FORMAT_PRIVATE:
return 0;
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index e19dd3a..71bc6d9 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -50,7 +50,10 @@
*/
typedef struct AImage AImage;
-// Formats not listed here will not be supported by AImageReader
+/**
+ * AImage supported formats: AImageReader only guarantees the support for the formats
+ * listed here.
+ */
enum AIMAGE_FORMATS {
/**
* 32 bits RGBA format, 8 bits for each of the four channels.
@@ -813,7 +816,7 @@
* Available since API level 26.
*
* @param image the {@link AImage} of interest.
- * @param outBuffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
+ * @param buffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
* handle.
* @return <ul>
* <li>{@link AMEDIA_OK} if the method call succeeds.</li>
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index d86f3c7..4bd7f2a 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -328,10 +328,10 @@
* still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
* {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
* be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
- * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
- * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
- * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
- * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * For example, you can obtain an EGLClientBuffer from the {@link AHardwareBuffer} by using
+ * eglGetNativeClientBufferANDROID extension and pass that EGLClientBuffer to
+ * eglCreateImageKHR to create an EGLImage resource type, which may then be bound to a
+ * texture via glEGLImageTargetTexture2DOES on supported devices. This can be useful for
* transporting textures that may be shared cross-process.</p>
* <p>In general, when software access to image data is not necessary, an {@link AImageReader}
* created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
@@ -339,7 +339,7 @@
*
* <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
* especially if \c format is {@link AIMAGE_FORMAT_PRIVATE}, \c usage must not include either
- * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</p>
*
* @param width The default width in pixels of the Images that this reader will produce.
* @param height The default height in pixels of the Images that this reader will produce.
@@ -358,7 +358,7 @@
* <th>Compatible usage flags</th>
* </tr>
* <tr>
- * <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ * <td>non-{@link AIMAGE_FORMAT_PRIVATE} formats defined in {@link NdkImage.h}
* </td>
* <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
* {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
@@ -441,6 +441,10 @@
AImageReader* reader,
AHardwareBuffer* buffer);
+/**
+ * A listener to the AHardwareBuffer removal event, use
+ * {@link AImageReader_setBufferRemovedListener} to register the listener object to AImageReader.
+ */
typedef struct AImageReader_BufferRemovedListener {
/// Optional application context passed as the first parameter of the callback.
void* context;
diff --git a/media/ndk/include/media/NdkMediaError.h b/media/ndk/include/media/NdkMediaError.h
index 2be1d6e..02fdc79 100644
--- a/media/ndk/include/media/NdkMediaError.h
+++ b/media/ndk/include/media/NdkMediaError.h
@@ -40,7 +40,11 @@
__BEGIN_DECLS
+/**
+ * Media error message types returned from NDK media functions.
+ */
typedef enum {
+ /** The requested media operation completed successfully. */
AMEDIA_OK = 0,
/**
@@ -55,14 +59,34 @@
AMEDIACODEC_ERROR_RECLAIMED = 1101,
AMEDIA_ERROR_BASE = -10000,
+
+ /** The called media function failed with an unknown error. */
AMEDIA_ERROR_UNKNOWN = AMEDIA_ERROR_BASE,
+
+ /** The input media data is corrupt or incomplete. */
AMEDIA_ERROR_MALFORMED = AMEDIA_ERROR_BASE - 1,
+
+ /** The required operation or media formats are not supported. */
AMEDIA_ERROR_UNSUPPORTED = AMEDIA_ERROR_BASE - 2,
+
+ /** An invalid (or already closed) object is used in the function call. */
AMEDIA_ERROR_INVALID_OBJECT = AMEDIA_ERROR_BASE - 3,
+
+ /** At least one of the invalid parameters is used. */
AMEDIA_ERROR_INVALID_PARAMETER = AMEDIA_ERROR_BASE - 4,
+
+ /** The media object is not in the right state for the required operation. */
AMEDIA_ERROR_INVALID_OPERATION = AMEDIA_ERROR_BASE - 5,
+
+ /** Media stream ends while processing the requested operation. */
AMEDIA_ERROR_END_OF_STREAM = AMEDIA_ERROR_BASE - 6,
+
+ /** An Error occurred when the Media object is carrying IO operation. */
AMEDIA_ERROR_IO = AMEDIA_ERROR_BASE - 7,
+
+ /** The required operation would have to be blocked (on I/O or others),
+ * but blocking is not enabled.
+ */
AMEDIA_ERROR_WOULD_BLOCK = AMEDIA_ERROR_BASE - 8,
AMEDIA_DRM_ERROR_BASE = -20000,
@@ -77,10 +101,20 @@
AMEDIA_DRM_LICENSE_EXPIRED = AMEDIA_DRM_ERROR_BASE - 9,
AMEDIA_IMGREADER_ERROR_BASE = -30000,
+
+ /** There are no more image buffers to read/write image data. */
AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE = AMEDIA_IMGREADER_ERROR_BASE - 1,
+
+ /** The AImage object has used up the allowed maximum image buffers. */
AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED = AMEDIA_IMGREADER_ERROR_BASE - 2,
+
+ /** The required image buffer could not be locked to read. */
AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE = AMEDIA_IMGREADER_ERROR_BASE - 3,
+
+ /** The media data or buffer could not be unlocked. */
AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE = AMEDIA_IMGREADER_ERROR_BASE - 4,
+
+ /** The media/buffer needs to be locked to perform the required operation. */
AMEDIA_IMGREADER_IMAGE_NOT_LOCKED = AMEDIA_IMGREADER_ERROR_BASE - 5,
} media_status_t;
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 2294c49..a7d47fb 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -74,6 +74,7 @@
"libmediautils",
"libnbaio",
"libnblog",
+ "libpermission",
"libpowermanager",
"libmediautils",
"libmemunreachable",
@@ -95,6 +96,7 @@
],
export_shared_lib_headers: [
+ "libpermission",
"media_permission-aidl-cpp",
],
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 2e59baa..2436248 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -219,6 +219,10 @@
void flushAck();
bool isResumePending();
void resumeAck();
+ // For direct or offloaded tracks ensure that the pause state is acknowledged
+ // by the playback thread in case of an immediate flush.
+ bool isPausePending() const { return mPauseHwPending; }
+ void pauseAck();
void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
@@ -314,6 +318,7 @@
sp<AudioTrackServerProxy> mAudioTrackServerProxy;
bool mResumeToStopping; // track was paused in stopping state.
bool mFlushHwPending; // track requests for thread flush
+ bool mPauseHwPending = false; // direct/offload track request for thread pause
audio_output_flags_t mFlags;
// If the last track change was notified to the client with readAndClearHasChanged
std::atomic_flag mChangeNotified = ATOMIC_FLAG_INIT;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c83fc80..997f24a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -5880,8 +5880,15 @@
sp<Track> l = mActiveTracks.getLatest();
bool last = l.get() == track;
- if (track->isPausing()) {
- track->setPaused();
+ if (track->isPausePending()) {
+ track->pauseAck();
+ // It is possible a track might have been flushed or stopped.
+ // Other operations such as flush pending might occur on the next prepare.
+ if (track->isPausing()) {
+ track->setPaused();
+ }
+ // Always perform pause, as an immediate flush will change
+ // the pause state to be no longer isPausing().
if (mHwSupportsPause && last && !mHwPaused) {
doHwPause = true;
mHwPaused = true;
@@ -6423,8 +6430,15 @@
continue;
}
- if (track->isPausing()) {
- track->setPaused();
+ if (track->isPausePending()) {
+ track->pauseAck();
+ // It is possible a track might have been flushed or stopped.
+ // Other operations such as flush pending might occur on the next prepare.
+ if (track->isPausing()) {
+ track->setPaused();
+ }
+ // Always perform pause if last, as an immediate flush will change
+ // the pause state to be no longer isPausing().
if (last) {
if (mHwSupportsPause && !mHwPaused) {
doHwPause = true;
@@ -8094,6 +8108,9 @@
{
ALOGV("RecordThread::getActiveMicrophones");
AutoMutex _l(mLock);
+ if (mInput == nullptr || mInput->stream == nullptr) {
+ return NO_INIT;
+ }
status_t status = mInput->stream->getActiveMicrophones(activeMicrophones);
return status;
}
@@ -8103,6 +8120,9 @@
{
ALOGV("setPreferredMicrophoneDirection(%d)", direction);
AutoMutex _l(mLock);
+ if (mInput == nullptr || mInput->stream == nullptr) {
+ return NO_INIT;
+ }
return mInput->stream->setPreferredMicrophoneDirection(direction);
}
@@ -8110,6 +8130,9 @@
{
ALOGV("setPreferredMicrophoneFieldDimension(%f)", zoom);
AutoMutex _l(mLock);
+ if (mInput == nullptr || mInput->stream == nullptr) {
+ return NO_INIT;
+ }
return mInput->stream->setPreferredMicrophoneFieldDimension(zoom);
}
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index db7528d..21651af 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1219,6 +1219,9 @@
mState = PAUSING;
ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
__func__, mId, (int)mThreadIoHandle);
+ if (isOffloadedOrDirect()) {
+ mPauseHwPending = true;
+ }
playbackThread->broadcast_l();
break;
@@ -1306,6 +1309,11 @@
mFlushHwPending = false;
}
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+ mPauseHwPending = false;
+}
+
void AudioFlinger::PlaybackThread::Track::reset()
{
// Do not reset twice to avoid discarding data written just after a flush and before
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 0537365..552919d 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -226,6 +226,8 @@
return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
} else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_ARC) != 0) {
return AUDIO_DEVICE_OUT_HDMI_ARC;
+ } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_EARC) != 0) {
+ return AUDIO_DEVICE_OUT_HDMI_EARC;
} else if (deviceTypes.count(AUDIO_DEVICE_OUT_AUX_LINE) != 0) {
return AUDIO_DEVICE_OUT_AUX_LINE;
} else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPDIF) != 0) {
@@ -240,4 +242,4 @@
return a2dpDevices.empty() ? AUDIO_DEVICE_NONE : a2dpDevices[0];
}
}
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 27f89e3..edcdf5a 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -196,7 +196,7 @@
if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
- AUDIO_DEVICE_OUT_HDMI_ARC}));
+ AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC}));
}
}
} break;
@@ -366,7 +366,9 @@
if (strategy == STRATEGY_MEDIA) {
// ARC, SPDIF and AUX_LINE can co-exist with others.
devices3 = availableOutputDevices.getDevicesFromTypes({
- AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE});
+ AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+ AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE,
+ });
}
devices2.add(devices3);
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 1756c98..838cdd5 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -77,6 +77,7 @@
{"AUDIO_DEVICE_IN_DEFAULT", 1LL << 28},
// R values above.
{"AUDIO_DEVICE_IN_BLE_HEADSET", 1LL << 29},
+ {"AUDIO_DEVICE_IN_HDMI_EARC", 1LL << 30},
};
return map;
}
@@ -123,7 +124,8 @@
{"AUDIO_DEVICE_OUT_DEFAULT", 1LL << 30},
// R values above.
{"AUDIO_DEVICE_OUT_BLE_HEADSET", 1LL << 31},
- {"AUDIO_DEVICE_OUT_BLE_SPAEKER", 1LL << 32},
+ {"AUDIO_DEVICE_OUT_BLE_SPEAKER", 1LL << 32},
+ {"AUDIO_DEVICE_OUT_HDMI_EARC", 1LL << 33},
};
return map;
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 694094c..8638f36 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -291,10 +291,6 @@
.set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
.record(); });
- // Send it now because the timestamp gets rounded up when stopStream() is called below.
- // Also we don't need the timestamps while we are shutting down.
- sendCurrentTimestamp();
-
result = stopTimestampThread();
if (result != AAUDIO_OK) {
disconnect_l();
@@ -340,9 +336,6 @@
setState(AAUDIO_STREAM_STATE_STOPPING);
- // Send it now because the timestamp gets rounded up when stopStream() is called below.
- // Also we don't need the timestamps while we are shutting down.
- sendCurrentTimestamp(); // warning - this calls a virtual function
result = stopTimestampThread();
if (result != AAUDIO_OK) {
disconnect_l();
@@ -408,10 +401,11 @@
timestampScheduler.start(AudioClock::getNanoseconds());
int64_t nextTime = timestampScheduler.nextAbsoluteTime();
int32_t loopCount = 0;
+ aaudio_result_t result = AAUDIO_OK;
while(mThreadEnabled.load()) {
loopCount++;
if (AudioClock::getNanoseconds() >= nextTime) {
- aaudio_result_t result = sendCurrentTimestamp();
+ result = sendCurrentTimestamp();
if (result != AAUDIO_OK) {
ALOGE("%s() timestamp thread got result = %d", __func__, result);
break;
@@ -423,6 +417,11 @@
AudioClock::sleepUntilNanoTime(nextTime);
}
}
+ // This was moved from the calls in stop_l() and pause_l(), which could cause a deadlock
+ // if it resulted in a call to disconnect.
+ if (result == AAUDIO_OK) {
+ (void) sendCurrentTimestamp();
+ }
ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< TIMESTAMPS",
__func__, getTypeText(), loopCount);
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 06c9f21..8e5c8ef 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -80,7 +80,7 @@
// because we had to wait until we generated the handle.
void logOpen(aaudio_handle_t streamHandle);
- aaudio_result_t close();
+ aaudio_result_t close() EXCLUDES(mLock);
/**
* Start the flow of audio data.
@@ -88,7 +88,7 @@
* This is not guaranteed to be synchronous but it currently is.
* An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
*/
- aaudio_result_t start();
+ aaudio_result_t start() EXCLUDES(mLock);
/**
* Stop the flow of data so that start() can resume without loss of data.
@@ -96,7 +96,7 @@
* This is not guaranteed to be synchronous but it currently is.
* An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
*/
- aaudio_result_t pause();
+ aaudio_result_t pause() EXCLUDES(mLock);
/**
* Stop the flow of data after the currently queued data has finished playing.
@@ -105,14 +105,14 @@
* An AAUDIO_SERVICE_EVENT_STOPPED will be sent to the client when complete.
*
*/
- aaudio_result_t stop();
+ aaudio_result_t stop() EXCLUDES(mLock);
/**
* Discard any data held by the underlying HAL or Service.
*
* An AAUDIO_SERVICE_EVENT_FLUSHED will be sent to the client when complete.
*/
- aaudio_result_t flush();
+ aaudio_result_t flush() EXCLUDES(mLock);
virtual aaudio_result_t startClient(const android::AudioClient& client,
const audio_attributes_t *attr __unused,
@@ -126,9 +126,9 @@
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority);
+ aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority) EXCLUDES(mLock);
- aaudio_result_t unregisterAudioThread(pid_t clientThreadId);
+ aaudio_result_t unregisterAudioThread(pid_t clientThreadId) EXCLUDES(mLock);
bool isRunning() const {
return mState == AAUDIO_STREAM_STATE_STARTED;
@@ -137,7 +137,7 @@
/**
* Fill in a parcelable description of stream.
*/
- aaudio_result_t getDescription(AudioEndpointParcelable &parcelable);
+ aaudio_result_t getDescription(AudioEndpointParcelable &parcelable) EXCLUDES(mLock);
void setRegisteredThread(pid_t pid) {
mRegisteredClientThread = pid;
@@ -153,7 +153,7 @@
void run() override; // to implement Runnable
- void disconnect();
+ void disconnect() EXCLUDES(mLock);
const android::AudioClient &getAudioClient() {
return mMmapClient;
@@ -248,7 +248,7 @@
aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
- aaudio_result_t sendCurrentTimestamp();
+ aaudio_result_t sendCurrentTimestamp() EXCLUDES(mLock);
aaudio_result_t sendXRunCount(int32_t xRunCount);
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 6ba1725..667465a 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -73,7 +73,8 @@
aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
- aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+ aaudio_result_t getFreeRunningPosition(int64_t *positionFrames,
+ int64_t *timeNanos) EXCLUDES(mLock) override;
aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;