Merge "Updated the fuzzer to use native service"
diff --git a/MainlineFiles.cfg b/MainlineFiles.cfg
index 490bbbf..bf39c1a 100644
--- a/MainlineFiles.cfg
+++ b/MainlineFiles.cfg
@@ -21,11 +21,20 @@
#
# matching is purely prefix
# so 'foo' will match 'foo', 'foo.c', 'foo/bar/baz'
-# if you want to exclude a directory, best to use a pattern like "foo/"
+# if you want to specify a directory, best to use a pattern like "foo/"
#
+apex/
media/codec2/components/
-media/codecs/
-media/extractors/
-media/libstagefright/mpeg2ts/
-media/libstagefright/flac/
+media/codec2/core/
+media/codec2/hidl/
+media/codec2/sfplugin/utils/
+media/codec2/vndk/
+media/libstagefright/data/media_codecs_sw.xml
+media/module/
+services/mediacodec/
+
+# source code used in both framework and mainline libraries
+media/libstagefright/HevcUtils.cpp
+media/libstagefright/MediaSource.cpp
+media/libstagefright/Utils.cpp
diff --git a/OWNERS b/OWNERS
index 87bc809..3c7a3ab 100644
--- a/OWNERS
+++ b/OWNERS
@@ -10,3 +10,7 @@
## Only contact for media changes as a fallback
etalvala@google.com
shuzhenwang@google.com
+
+# mainline related
+per-file MainlineFiles.cfg=essick@google.com
+per-file MainlineFiles.cfg=file:/media/janitors/reliability_mainline_OWNERS
diff --git a/camera/Android.bp b/camera/Android.bp
index e44202b..a933553 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -47,7 +47,7 @@
name: "camera_headers",
export_include_dirs: ["include"],
}
-cc_library_shared {
+cc_library {
name: "libcamera_client",
aidl: {
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index d1aa36a..0706ac1 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -130,6 +130,12 @@
return err;
}
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+ if ((err = parcel->readInt32(&colorSpace)) != OK) {
+ ALOGE("%s: Failed to read color space from parcel", __FUNCTION__);
+ return err;
+ }
+
mWidth = width;
mHeight = height;
mFormat = format;
@@ -146,6 +152,7 @@
mHistogramCounts = std::move(histogramCounts);
mDynamicRangeProfile = dynamicRangeProfile;
mStreamUseCase = streamUseCase;
+ mColorSpace = colorSpace;
return OK;
}
@@ -238,6 +245,11 @@
return err;
}
+ if ((err = parcel->writeInt32(mColorSpace)) != OK) {
+ ALOGE("%s: Failed to write color space", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 11d4960..d50566d 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -26,8 +26,8 @@
#include <system/camera_metadata.h>
#include <utils/String8.h>
-namespace android {
+namespace android {
const int OutputConfiguration::INVALID_ROTATION = -1;
const int OutputConfiguration::INVALID_SET_ID = -1;
@@ -81,6 +81,10 @@
return mDynamicRangeProfile;
}
+int32_t OutputConfiguration::getColorSpace() const {
+ return mColorSpace;
+}
+
int64_t OutputConfiguration::getStreamUseCase() const {
return mStreamUseCase;
}
@@ -103,6 +107,7 @@
mIsShared(false),
mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO) {
@@ -191,6 +196,11 @@
ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
return err;
}
+ int32_t colorSpace;
+ if ((err = parcel->readInt32(&colorSpace)) != OK) {
+ ALOGE("%s: Failed to read surface color space flag from parcel", __FUNCTION__);
+ return err;
+ }
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
if ((err = parcel->readInt64(&streamUseCase)) != OK) {
@@ -230,6 +240,7 @@
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
mDynamicRangeProfile = dynamicProfile;
+ mColorSpace = colorSpace;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
@@ -252,6 +263,7 @@
mPhysicalCameraId = physicalId;
mIsMultiResolution = false;
mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ mColorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
@@ -265,6 +277,7 @@
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO) { }
@@ -317,6 +330,9 @@
err = parcel->writeInt64(mDynamicRangeProfile);
if (err != OK) return err;
+ err = parcel->writeInt32(mColorSpace);
+ if (err != OK) return err;
+
err = parcel->writeInt64(mStreamUseCase);
if (err != OK) return err;
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 094a3c1..27ebb7a 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -43,7 +43,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index aaa88b2..90ee924 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -67,22 +67,26 @@
int64_t mDynamicRangeProfile;
// Stream use case
int64_t mStreamUseCase;
+ // Color space
+ int32_t mColorSpace;
CameraStreamStats() :
mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
- mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
- int streamUseCase)
+ int streamUseCase, int32_t colorSpace)
: mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
mDynamicRangeProfile(dynamicRangeProfile),
- mStreamUseCase(streamUseCase) {}
+ mStreamUseCase(streamUseCase),
+ mColorSpace(colorSpace) {}
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index b7c7f7f..a713b40 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -61,6 +61,7 @@
int getWidth() const;
int getHeight() const;
int64_t getDynamicRangeProfile() const;
+ int32_t getColorSpace() const;
bool isDeferred() const;
bool isShared() const;
String16 getPhysicalCameraId() const;
@@ -111,6 +112,7 @@
mIsMultiResolution == other.mIsMultiResolution &&
sensorPixelModesUsedEqual(other) &&
mDynamicRangeProfile == other.mDynamicRangeProfile &&
+ mColorSpace == other.mColorSpace &&
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode);
@@ -153,6 +155,9 @@
if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
return mDynamicRangeProfile < other.mDynamicRangeProfile;
}
+ if (mColorSpace != other.mColorSpace) {
+ return mColorSpace < other.mColorSpace;
+ }
if (mStreamUseCase != other.mStreamUseCase) {
return mStreamUseCase < other.mStreamUseCase;
}
@@ -187,6 +192,7 @@
bool mIsMultiResolution;
std::vector<int32_t> mSensorPixelModesUsed;
int64_t mDynamicRangeProfile;
+ int32_t mColorSpace;
int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 05124c0..1018b41 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -537,6 +537,7 @@
case ACAMERA_CONTROL_ENABLE_ZSL:
case ACAMERA_CONTROL_EXTENDED_SCENE_MODE:
case ACAMERA_CONTROL_ZOOM_RATIO:
+ case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
case ACAMERA_HOT_PIXEL_MODE:
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 729182e..7388678 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -209,7 +209,8 @@
* Query the capabilities of a camera device. These capabilities are
* immutable for a given camera.
*
- * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ * <p>See {@link ACameraMetadata} document and <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details.</p>
*
* <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
* characteristics.</p>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index b331d50..a9f53dd 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -96,9 +96,12 @@
/**
* The tag identifying the entry.
*
- * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+ * <p> It is one of the values defined in
+ * <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * , and defines how the
* entry should be interpreted and which parts of the API provide it.
- * See {@link NdkCameraMetadataTags.h} for more details. </p>
+ * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details. </p>
*/
uint32_t tag;
@@ -141,9 +144,11 @@
/**
* The tag identifying the entry.
*
- * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+ * <p> It is one of the values defined in <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * , and defines how the
* entry should be interpreted and which parts of the API provide it.
- * See {@link NdkCameraMetadataTags.h} for more details. </p>
+ * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details. </p>
*/
uint32_t tag;
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 0d156a5..def883b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2044,6 +2044,105 @@
*/
ACAMERA_CONTROL_ZOOM_RATIO = // float
ACAMERA_CONTROL_START + 47,
+ /**
+ * <p>The desired CaptureRequest settings override with which certain keys are
+ * applied earlier so that they can take effect sooner.</p>
+ *
+ * <p>Type: int32 (acamera_metadata_enum_android_control_settings_override_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>There are some CaptureRequest keys which can be applied earlier than others
+ * when controls within a CaptureRequest aren't required to take effect at the same time.
+ * One such example is zoom. Zoom can be applied at a later stage of the camera pipeline.
+ * As soon as the camera device receives the CaptureRequest, it can apply the requested
+ * zoom value onto an earlier request that's already in the pipeline, thus improves zoom
+ * latency.</p>
+ * <p>This key's value in the capture result reflects whether the controls for this capture
+ * are overridden "by" a newer request. This means that if a capture request turns on
+ * settings override, the capture result of an earlier request will contain the key value
+ * of ZOOM. On the other hand, if a capture request has settings override turned on,
+ * but all newer requests have it turned off, the key's value in the capture result will
+ * be OFF because this capture isn't overridden by a newer capture. In the two examples
+ * below, the capture results columns illustrate the settingsOverride values in different
+ * scenarios.</p>
+ * <p>Assuming the zoom settings override can speed up by 1 frame, below example illustrates
+ * the speed-up at the start of capture session:</p>
+ * <pre><code>Camera session created
+ * Request 1 (zoom=1.0x, override=ZOOM) ->
+ * Request 2 (zoom=1.2x, override=ZOOM) ->
+ * Request 3 (zoom=1.4x, override=ZOOM) -> Result 1 (zoom=1.2x, override=ZOOM)
+ * Request 4 (zoom=1.6x, override=ZOOM) -> Result 2 (zoom=1.4x, override=ZOOM)
+ * Request 5 (zoom=1.8x, override=ZOOM) -> Result 3 (zoom=1.6x, override=ZOOM)
+ * -> Result 4 (zoom=1.8x, override=ZOOM)
+ * -> Result 5 (zoom=1.8x, override=OFF)
+ * </code></pre>
+ * <p>The application can turn on settings override and use zoom as normal. The example
+ * shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom
+ * values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4).</p>
+ * <p>The application must make sure the settings override doesn't interfere with user
+ * journeys requiring simultaneous application of all controls in CaptureRequest on the
+ * requested output targets. For example, if the application takes a still capture using
+ * CameraCaptureSession#capture, and the repeating request immediately sets a different
+ * zoom value using override, the inflight still capture could have its zoom value
+ * overwritten unexpectedly.</p>
+ * <p>So the application is strongly recommended to turn off settingsOverride when taking
+ * still/burst captures, and turn it back on when there is only repeating viewfinder
+ * request and no inflight still/burst captures.</p>
+ * <p>Below is the example demonstrating the transitions in and out of the
+ * settings override:</p>
+ * <pre><code>Request 1 (zoom=1.0x, override=OFF)
+ * Request 2 (zoom=1.2x, override=OFF)
+ * Request 3 (zoom=1.4x, override=ZOOM) -> Result 1 (zoom=1.0x, override=OFF)
+ * Request 4 (zoom=1.6x, override=ZOOM) -> Result 2 (zoom=1.4x, override=ZOOM)
+ * Request 5 (zoom=1.8x, override=OFF) -> Result 3 (zoom=1.6x, override=ZOOM)
+ * -> Result 4 (zoom=1.6x, override=OFF)
+ * -> Result 5 (zoom=1.8x, override=OFF)
+ * </code></pre>
+ * <p>This example shows that:</p>
+ * <ul>
+ * <li>The application "ramps in" settings override by setting the control to ZOOM.
+ * In the example, request #3 enables zoom settings override. Because the camera device
+ * can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the
+ * value specified in request #3.</li>
+ * <li>The application "ramps out" of settings override by setting the control to OFF. In
+ * the example, request #5 changes the override to OFF. Because request #4's zoom
+ * takes effect in result #3, result #4's zoom remains the same until new value takes
+ * effect in result #5.</li>
+ * </ul>
+ */
+ ACAMERA_CONTROL_SETTINGS_OVERRIDE = // int32 (acamera_metadata_enum_android_control_settings_override_t)
+ ACAMERA_CONTROL_START + 49,
+ /**
+ * <p>List of available settings overrides supported by the camera device that can
+ * be used to speed up certain controls.</p>
+ *
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>When not all controls within a CaptureRequest are required to take effect
+ * at the same time on the outputs, the camera device may apply certain request keys sooner
+ * to improve latency. This list contains such supported settings overrides. Each settings
+ * override corresponds to a set of CaptureRequest keys that can be sped up when applying.</p>
+ * <p>A supported settings override can be passed in via
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_SETTINGS_OVERRIDE">CaptureRequest#CONTROL_SETTINGS_OVERRIDE</a>, and the
+ * CaptureRequest keys corresponding to the override are applied as soon as possible, not
+ * bound by per-frame synchronization. See ACAMERA_CONTROL_SETTINGS_OVERRIDE for the
+ * CaptureRequest keys for each override.</p>
+ * <p>OFF is always included in this list.</p>
+ *
+ * @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
+ */
+ ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES = // int32[n]
+ ACAMERA_CONTROL_START + 50,
ACAMERA_CONTROL_END,
/**
@@ -3520,6 +3619,26 @@
*/
ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
ACAMERA_REQUEST_START + 19,
+ /**
+ * <p>A list of all possible color space profiles supported by a camera device.</p>
+ *
+ * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A color space profile is a combination of a color space, an image format, and a dynamic range
+ * profile. If a camera does not support the
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+ * capability, the dynamic range profile will always be
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>. Camera clients can
+ * use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+ * a color space.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)
+ ACAMERA_REQUEST_START + 21,
ACAMERA_REQUEST_END,
/**
@@ -8475,6 +8594,40 @@
} acamera_metadata_enum_android_control_extended_scene_mode_t;
+// ACAMERA_CONTROL_SETTINGS_OVERRIDE
+typedef enum acamera_metadata_enum_acamera_control_settings_override {
+ /**
+ * <p>No keys are applied sooner than the other keys when applying CaptureRequest
+ * settings to the camera device. This is the default value.</p>
+ */
+ ACAMERA_CONTROL_SETTINGS_OVERRIDE_OFF = 0,
+
+ /**
+ * <p>Zoom related keys are applied sooner than the other keys in the CaptureRequest. The
+ * zoom related keys are:</p>
+ * <ul>
+ * <li>ACAMERA_CONTROL_ZOOM_RATIO</li>
+ * <li>ACAMERA_SCALER_CROP_REGION</li>
+ * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+ * </ul>
+ * <p>Even though ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+ * and ACAMERA_CONTROL_AF_REGIONS are not directly zoom related, applications
+ * typically scale these regions together with ACAMERA_SCALER_CROP_REGION to have a
+ * consistent mapping within the current field of view. In this aspect, they are
+ * related to ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_REGIONS
+ * @see ACAMERA_CONTROL_AF_REGIONS
+ * @see ACAMERA_CONTROL_AWB_REGIONS
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ */
+ ACAMERA_CONTROL_SETTINGS_OVERRIDE_ZOOM = 1,
+
+} acamera_metadata_enum_android_control_settings_override_t;
+
// ACAMERA_EDGE_MODE
@@ -9448,6 +9601,99 @@
} acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
+// ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_color_space_profiles_map {
+ /**
+ * <p>Default value, when not explicitly specified. The Camera device will choose the color
+ * space to employ.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED = -1,
+
+ /**
+ * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB = 0,
+
+ /**
+ * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB = 1,
+
+ /**
+ * <p>RGB color space scRGB-nl standardized as IEC 61966-2-2:2003.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2,
+
+ /**
+ * <p>RGB color space scRGB standardized as IEC 61966-2-2:2003.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB
+ = 3,
+
+ /**
+ * <p>RGB color space BT.709 standardized as Rec. ITU-R BT.709-5.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709 = 4,
+
+ /**
+ * <p>RGB color space BT.2020 standardized as Rec. ITU-R BT.2020-1.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020 = 5,
+
+ /**
+ * <p>RGB color space DCI-P3 standardized as SMPTE RP 431-2-2007.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3 = 6,
+
+ /**
+ * <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3 = 7,
+
+ /**
+ * <p>RGB color space NTSC, 1953 standard.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953 = 8,
+
+ /**
+ * <p>RGB color space SMPTE C.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C = 9,
+
+ /**
+ * <p>RGB color space Adobe RGB (1998).</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB = 10,
+
+ /**
+ * <p>RGB color space ProPhoto RGB standardized as ROMM RGB ISO 22028-2:2013.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11,
+
+ /**
+ * <p>RGB color space ACES standardized as SMPTE ST 2065-1:2012.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES = 12,
+
+ /**
+ * <p>RGB color space ACEScg standardized as Academy S-2014-004.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG = 13,
+
+ /**
+ * <p>XYZ color space CIE XYZ. This color space assumes standard illuminant D50 as its white
+ * point.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ = 14,
+
+ /**
+ * <p>Lab color space CIE L<em>a</em>b*. This color space uses CIE XYZ D50 as a profile conversion
+ * space.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB = 15,
+
+} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 77c934a..bb4ef56 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -165,7 +165,10 @@
Mutex::Autolock _l(mLock);
if (mCameraService != nullptr) {
mCameraService->unlinkToDeath(mDeathNotifier);
- mCameraService->removeListener(mCameraServiceListener);
+ auto stat = mCameraService->removeListener(mCameraServiceListener);
+ if (!stat.isOk()) {
+ ALOGE("Failed to remove listener to camera service %s", stat.description().c_str());
+ }
}
mDeathNotifier.clear();
if (mCbLooper != nullptr) {
@@ -475,6 +478,10 @@
ALOGE("%s: Cannot find camera callback fp!", __FUNCTION__);
return;
}
+ if (cb == nullptr) {
+ // Physical camera callback is null
+ return;
+ }
found = msg->findPointer(kContextKey, &context);
if (!found) {
ALOGE("%s: Cannot find callback context!", __FUNCTION__);
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..bae8706
--- /dev/null
+++ b/camera/tests/fuzzer/Android.bp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_camera_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
+cc_defaults {
+ name: "camera_defaults",
+ static_libs: [
+ "libcamera_client",
+ ],
+ shared_libs: [
+ "libbase",
+ "libcutils",
+ "libutils",
+ "liblog",
+ "libbinder",
+ "libgui",
+ "libcamera_metadata",
+ "libnativewindow",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
+
+cc_fuzz {
+ name: "camera_fuzzer",
+ srcs: [
+ "camera_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_c2CaptureRequest_fuzzer",
+ srcs: [
+ "camera_c2CaptureRequest_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_c2ConcurrentCamera_fuzzer",
+ srcs: [
+ "camera_c2ConcurrentCamera_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_c2SubmitInfo_fuzzer",
+ srcs: [
+ "camera_c2SubmitInfo_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_c2SessionConfiguration_fuzzer",
+ srcs: [
+ "camera_c2SessionConfiguration_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_c2OutputConfiguration_fuzzer",
+ srcs: [
+ "camera_c2OutputConfiguration_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_vendorTagDescriptor_fuzzer",
+ srcs: [
+ "camera_vendorTagDescriptor_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+ include_dirs: [
+ "system/media/camera/tests",
+ "system/media/private/camera/include",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_Parameters_fuzzer",
+ srcs: [
+ "camera_Parameters_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_SessionStats_fuzzer",
+ srcs: [
+ "camera_SessionStats_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_captureResult_fuzzer",
+ srcs: [
+ "camera_captureResult_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
diff --git a/camera/tests/fuzzer/README.md b/camera/tests/fuzzer/README.md
new file mode 100644
index 0000000..c07ac04
--- /dev/null
+++ b/camera/tests/fuzzer/README.md
@@ -0,0 +1,74 @@
+# Fuzzers for libcamera_client
+
+## Plugin Design Considerations
+The fuzzer plugins for libcamera_client are designed based on the understanding of the
+source code and try to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzers.
+
+libcamera_client supports the following parameters:
+1. Command (parameter name: `cmd`)
+2. Video Buffer Mode (parameter name: `videoBufferMode`)
+3. Preview Callback Flag (parameter name: `previewCallbackFlag`)
+4. Facing (parameter name: `facing`)
+5. Orientation (parameter name: `orientation`)
+6. Format (parameter name: `format`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cmd` | 0.`CAMERA_CMD_START_SMOOTH_ZOOM` 1.`CAMERA_CMD_STOP_SMOOTH_ZOOM` 3.`CAMERA_CMD_SET_DISPLAY_ORIENTATION` 4.`CAMERA_CMD_ENABLE_SHUTTER_SOUND` 5.`CAMERA_CMD_PLAY_RECORDING_SOUND` 6.`CAMERA_CMD_START_FACE_DETECTION` 7.`CAMERA_CMD_STOP_FACE_DETECTION` 8.`CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG` 9.`CAMERA_CMD_PING` 10.`CAMERA_CMD_SET_VIDEO_BUFFER_COUNT` 11.`CAMERA_CMD_SET_VIDEO_FORMAT`| Value obtained from FuzzedDataProvider|
+| `videoBufferMode` |0. `ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV` 1.`ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA` 2.`ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE`| Value obtained from FuzzedDataProvider|
+| `previewCallbackFlag` | 0. `CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK` 1.`CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK` 2.`CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK` 3.`CAMERA_FRAME_CALLBACK_FLAG_NOOP` 4.`CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER` 5.`CAMERA_FRAME_CALLBACK_FLAG_CAMERA` 6.`CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER`| Value obtained from FuzzedDataProvider|
+| `facing` | 0.`android::hardware::CAMERA_FACING_BACK` 1.`android::hardware::CAMERA_FACING_FRONT`| Value obtained from FuzzedDataProvider|
+| `orientation` | 0.`0` 1.`90` 2.`180`3.`270`| Value obtained from FuzzedDataProvider|
+| `format` | 0.`CameraParameters::PIXEL_FORMAT_YUV422SP` 1.`CameraParameters::PIXEL_FORMAT_YUV420SP` 2.`CameraParameters::PIXEL_FORMAT_YUV422I` 3.`CameraParameters::PIXEL_FORMAT_YUV420P` 4.`CameraParameters::PIXEL_FORMAT_RGB565` 5.`CameraParameters::PIXEL_FORMAT_RGBA8888` 6.`CameraParameters::PIXEL_FORMAT_JPEG` 7.`CameraParameters::PIXEL_FORMAT_BAYER_RGGB` 8.`CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE`| Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugins are always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugins feed the entire input data to the module.
+This ensures that the plugins tolerate any kind of input (empty, huge,
+malformed, etc) and dont `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_fuzzer, camera2CaptureRequest_fuzzer, camera2ConcurrentCamera_fuzzer, camera2SubmitInfo_fuzzer, camera2SessionConfiguration_fuzzer, camera2OutputConfiguration_fuzzer, vendorTagDescriptor_fuzzer, cameraParameters_fuzzer, cameraSessionStats_fuzzer and captureResult_fuzzer binaries
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) camera_fuzzer
+ $ mm -j$(nproc) camera_c2CaptureRequest_fuzzer
+ $ mm -j$(nproc) camera_c2ConcurrentCamera_fuzzer
+ $ mm -j$(nproc) camera_c2SubmitInfo_fuzzer
+ $ mm -j$(nproc) camera_c2SessionConfiguration_fuzzer
+ $ mm -j$(nproc) camera_c2OutputConfiguration_fuzzer
+ $ mm -j$(nproc) camera_vendorTagDescriptor_fuzzer
+ $ mm -j$(nproc) camera_Parameters_fuzzer
+ $ mm -j$(nproc) camera_SessionStats_fuzzer
+ $ mm -j$(nproc) camera_captureResult_fuzzer
+```
+#### Steps to run
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_fuzzer/camera_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2CaptureRequest_fuzzer/camera_c2CaptureRequest_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2ConcurrentCamera_fuzzer/camera_c2ConcurrentCamera_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SubmitInfo_fuzzer/camera_c2SubmitInfo_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SessionConfiguration_fuzzer/camera_c2SessionConfiguration_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2OutputConfiguration_fuzzer/camera_c2OutputConfiguration_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_vendorTagDescriptor_fuzzer/camera_vendorTagDescriptor_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_Parameters_fuzzer/camera_Parameters_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_SessionStats_fuzzer/camera_SessionStats_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_captureResult_fuzzer/camera_captureResult_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/camera/tests/fuzzer/camera2common.h b/camera/tests/fuzzer/camera2common.h
new file mode 100644
index 0000000..14a1b1b
--- /dev/null
+++ b/camera/tests/fuzzer/camera2common.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef CAMERA2COMMON_H
+#define CAMERA2COMMON_H
+
+#include <binder/Parcel.h>
+
+using namespace android;
+
+template <class type>
+void invokeReadWriteNullParcel(type* obj) {
+ Parcel* parcelNull = nullptr;
+ obj->writeToParcel(parcelNull);
+ obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteNullParcelsp(sp<type> obj) {
+ Parcel* parcelNull = nullptr;
+ obj->writeToParcel(parcelNull);
+ obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteParcel(type* obj) {
+ Parcel* parcel = new Parcel();
+ obj->writeToParcel(parcel);
+ parcel->setDataPosition(0);
+ obj->readFromParcel(parcel);
+ delete parcel;
+}
+
+template <class type>
+void invokeReadWriteParcelsp(sp<type> obj) {
+ Parcel* parcel = new Parcel();
+ obj->writeToParcel(parcel);
+ parcel->setDataPosition(0);
+ obj->readFromParcel(parcel);
+ delete parcel;
+}
+
+#endif // CAMERA2COMMON_H
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
new file mode 100644
index 0000000..45b3526
--- /dev/null
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraParameters.h>
+#include <CameraParameters2.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
+
+using namespace std;
+using namespace android;
+
+string kValidFormats[] = {
+ CameraParameters::PIXEL_FORMAT_YUV422SP, CameraParameters::PIXEL_FORMAT_YUV420SP,
+ CameraParameters::PIXEL_FORMAT_YUV422I, CameraParameters::PIXEL_FORMAT_YUV420P,
+ CameraParameters::PIXEL_FORMAT_RGB565, CameraParameters::PIXEL_FORMAT_RGBA8888,
+ CameraParameters::PIXEL_FORMAT_JPEG, CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+ CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE};
+
+class CameraParametersFuzzer {
+ public:
+ void process(const uint8_t* data, size_t size);
+ ~CameraParametersFuzzer() {
+ delete mCameraParameters;
+ delete mCameraParameters2;
+ }
+
+ private:
+ void invokeCameraParameters();
+ template <class type>
+ void initCameraParameters(type** obj);
+ template <class type>
+ void cameraParametersCommon(type* obj);
+ CameraParameters* mCameraParameters = nullptr;
+ CameraParameters2* mCameraParameters2 = nullptr;
+ FuzzedDataProvider* mFDP = nullptr;
+};
+
+template <class type>
+void CameraParametersFuzzer::initCameraParameters(type** obj) {
+ if (mFDP->ConsumeBool()) {
+ *obj = new type();
+ } else {
+ string params;
+ if (mFDP->ConsumeBool()) {
+ int32_t width = mFDP->ConsumeIntegral<int32_t>();
+ int32_t height = mFDP->ConsumeIntegral<int32_t>();
+ int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
+ int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
+ params = CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
+ params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+ if (mFDP->ConsumeBool()) {
+ params += CameraParameters::KEY_PREVIEW_FPS_RANGE;
+ params += '=' + to_string(minFps) + ',' + to_string(maxFps) + ';';
+ }
+ if (mFDP->ConsumeBool()) {
+ params += CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
+ params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+ }
+ if (mFDP->ConsumeBool()) {
+ params += CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
+ params += '=' + mFDP->PickValueInArray(kValidFormats) + ';';
+ }
+ } else {
+ params = mFDP->ConsumeRandomLengthString();
+ }
+ *obj = new type(String8(params.c_str()));
+ }
+}
+
+template <class type>
+void CameraParametersFuzzer::cameraParametersCommon(type* obj) {
+ Vector<Size> supportedPreviewSizes;
+ obj->getSupportedPreviewSizes(supportedPreviewSizes);
+ int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
+ int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
+ obj->setPreviewSize(previewWidth, previewHeight);
+ obj->getPreviewSize(&previewWidth, &previewHeight);
+
+ Vector<Size> supportedVideoSizes;
+ obj->getSupportedVideoSizes(supportedVideoSizes);
+ if (supportedVideoSizes.size() != 0) {
+ int32_t videoWidth, videoHeight, preferredVideoWidth, preferredVideoHeight;
+ if (mFDP->ConsumeBool()) {
+ int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(0, supportedVideoSizes.size() - 1);
+ obj->setVideoSize(supportedVideoSizes[idx].width, supportedVideoSizes[idx].height);
+ } else {
+ videoWidth = mFDP->ConsumeIntegral<int32_t>();
+ videoHeight = mFDP->ConsumeIntegral<int32_t>();
+ obj->setVideoSize(videoWidth, videoHeight);
+ }
+ obj->getVideoSize(&videoWidth, &videoHeight);
+ obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth, &preferredVideoHeight);
+ }
+
+ int32_t fps = mFDP->ConsumeIntegral<int32_t>();
+ obj->setPreviewFrameRate(fps);
+ obj->getPreviewFrameRate();
+ string previewFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+ : mFDP->ConsumeRandomLengthString();
+ obj->setPreviewFormat(previewFormat.c_str());
+
+ int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
+ int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
+ Vector<Size> supportedPictureSizes;
+ obj->setPictureSize(pictureWidth, pictureHeight);
+ obj->getPictureSize(&pictureWidth, &pictureHeight);
+ obj->getSupportedPictureSizes(supportedPictureSizes);
+ string pictureFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+ : mFDP->ConsumeRandomLengthString();
+ obj->setPictureFormat(pictureFormat.c_str());
+ obj->getPictureFormat();
+
+ if (mFDP->ConsumeBool()) {
+ obj->dump();
+ } else {
+ int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+ Vector<String16> args = {};
+ obj->dump(fd, args);
+ close(fd);
+ }
+}
+
+void CameraParametersFuzzer::invokeCameraParameters() {
+ initCameraParameters<CameraParameters>(&mCameraParameters);
+ cameraParametersCommon<CameraParameters>(mCameraParameters);
+ initCameraParameters<CameraParameters2>(&mCameraParameters2);
+ cameraParametersCommon<CameraParameters2>(mCameraParameters2);
+
+ int32_t minFPS, maxFPS;
+ mCameraParameters->getPreviewFpsRange(&minFPS, &maxFPS);
+ string format = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+ : mFDP->ConsumeRandomLengthString();
+ mCameraParameters->previewFormatToEnum(format.c_str());
+ mCameraParameters->isEmpty();
+ Vector<int32_t> formats;
+ mCameraParameters->getSupportedPreviewFormats(formats);
+}
+
+void CameraParametersFuzzer::process(const uint8_t* data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ invokeCameraParameters();
+ delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ CameraParametersFuzzer cameraParametersFuzzer;
+ cameraParametersFuzzer.process(data, size);
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
new file mode 100644
index 0000000..5866aaf
--- /dev/null
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraSessionStats.h>
+#include <binder/Parcel.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ CameraStreamStats* cameraStreamStats = nullptr;
+ Parcel parcelCamStreamStats;
+
+ if (fdp.ConsumeBool()) {
+ cameraStreamStats = new CameraStreamStats();
+ } else {
+ int32_t width = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(width);
+ }
+ int32_t height = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(height);
+ }
+ int32_t format = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(format);
+ }
+ float maxPreviewFps = fdp.ConsumeFloatingPoint<float>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeFloat(maxPreviewFps);
+ }
+ int32_t dataSpace = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(dataSpace);
+ }
+ int64_t usage = fdp.ConsumeIntegral<int64_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt64(usage);
+ }
+ int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt64(requestCount);
+ }
+ int64_t errorCount = fdp.ConsumeIntegral<int64_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt64(errorCount);
+ }
+ int32_t maxHalBuffers = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(maxHalBuffers);
+ }
+ int32_t maxAppBuffers = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(maxAppBuffers);
+ }
+ int32_t dynamicRangeProfile = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(dynamicRangeProfile);
+ }
+ int32_t streamUseCase = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(streamUseCase);
+ }
+ int32_t colorSpace = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(colorSpace);
+ }
+
+ cameraStreamStats = new CameraStreamStats(width, height, format, maxPreviewFps, dataSpace,
+ usage, maxHalBuffers, maxAppBuffers,
+ dynamicRangeProfile, streamUseCase, colorSpace);
+ }
+
+ parcelCamStreamStats.setDataPosition(0);
+ cameraStreamStats->readFromParcel(&parcelCamStreamStats);
+ invokeReadWriteNullParcel<CameraStreamStats>(cameraStreamStats);
+ invokeReadWriteParcel<CameraStreamStats>(cameraStreamStats);
+
+ CameraSessionStats* cameraSessionStats = nullptr;
+ Parcel parcelCamSessionStats;
+
+ if (fdp.ConsumeBool()) {
+ cameraSessionStats = new CameraSessionStats();
+ } else {
+ string camId = fdp.ConsumeRandomLengthString();
+ String16 cameraId(camId.c_str());
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeString16(cameraId);
+ }
+ int32_t facing = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeInt32(facing);
+ }
+ int32_t newCameraState = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeInt32(newCameraState);
+ }
+ string name = fdp.ConsumeRandomLengthString();
+ String16 clientName(name.c_str());
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeString16(clientName);
+ }
+ int32_t apiLevel = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeInt32(apiLevel);
+ }
+ bool isNdk = fdp.ConsumeBool();
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeBool(isNdk);
+ }
+ int32_t latencyMs = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeInt32(latencyMs);
+ }
+
+ cameraSessionStats = new CameraSessionStats(cameraId, facing, newCameraState, clientName,
+ apiLevel, isNdk, latencyMs);
+ }
+
+ if (fdp.ConsumeBool()) {
+ int32_t internalReconfigure = fdp.ConsumeIntegral<int32_t>();
+ parcelCamSessionStats.writeInt32(internalReconfigure);
+ }
+
+ if (fdp.ConsumeBool()) {
+ int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+ parcelCamSessionStats.writeInt64(requestCount);
+ }
+
+ if (fdp.ConsumeBool()) {
+ int64_t resultErrorCount = fdp.ConsumeIntegral<int64_t>();
+ parcelCamSessionStats.writeInt64(resultErrorCount);
+ }
+
+ if (fdp.ConsumeBool()) {
+ bool deviceError = fdp.ConsumeBool();
+ parcelCamSessionStats.writeBool(deviceError);
+ }
+
+ parcelCamSessionStats.setDataPosition(0);
+ cameraSessionStats->readFromParcel(&parcelCamSessionStats);
+ invokeReadWriteNullParcel<CameraSessionStats>(cameraSessionStats);
+ invokeReadWriteParcel<CameraSessionStats>(cameraSessionStats);
+
+ delete cameraStreamStats;
+ delete cameraSessionStats;
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
new file mode 100644
index 0000000..06215a5
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraMetadata.h>
+#include <camera2/CaptureRequest.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/view/Surface.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kNonZeroRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 1;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ sp<CaptureRequest> captureRequest = new CaptureRequest();
+ Parcel parcelCamCaptureReq;
+
+ size_t physicalCameraSettingsSize =
+ fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeInt32(physicalCameraSettingsSize);
+ }
+
+ for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
+ string id = fdp.ConsumeRandomLengthString();
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeString16(String16(id.c_str()));
+ }
+ CameraMetadata cameraMetadata;
+ if (fdp.ConsumeBool()) {
+ cameraMetadata = CameraMetadata();
+ } else {
+ size_t entryCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+ size_t dataCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+ cameraMetadata = CameraMetadata(entryCapacity, dataCapacity);
+ }
+ captureRequest->mPhysicalCameraSettings.push_back({id, cameraMetadata});
+ if (fdp.ConsumeBool()) {
+ cameraMetadata.writeToParcel(&parcelCamCaptureReq);
+ }
+ }
+
+ captureRequest->mIsReprocess = fdp.ConsumeBool();
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeInt32(captureRequest->mIsReprocess);
+ }
+
+ captureRequest->mSurfaceConverted = fdp.ConsumeBool();
+ if (fdp.ConsumeBool() && captureRequest->mSurfaceConverted) {
+ // 0-sized array
+ parcelCamCaptureReq.writeInt32(0);
+ }
+
+ if (!captureRequest->mSurfaceConverted) {
+ size_t surfaceListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeInt32(surfaceListSize);
+ }
+ for (size_t idx = 0; idx < surfaceListSize; ++idx) {
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+ static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+ fdp.ConsumeIntegral<uint32_t>() /* width */,
+ fdp.ConsumeIntegral<uint32_t>() /* height */,
+ fdp.ConsumeIntegral<int32_t>() /* format */,
+ fdp.ConsumeIntegral<int32_t>() /* flags */);
+ if (surfaceControl) {
+ sp<Surface> surface = surfaceControl->getSurface();
+ captureRequest->mSurfaceList.push_back(surface);
+ if (fdp.ConsumeBool()) {
+ view::Surface surfaceShim;
+ surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
+ surfaceShim.graphicBufferProducer = surface->getIGraphicBufferProducer();
+ surfaceShim.writeToParcel(&parcelCamCaptureReq);
+ }
+ surface.clear();
+ }
+ composerClient.clear();
+ surfaceControl.clear();
+ }
+ }
+
+ size_t indexListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeInt32(indexListSize);
+ }
+
+ for (size_t idx = 0; idx < indexListSize; ++idx) {
+ int32_t streamIdx = fdp.ConsumeIntegral<int32_t>();
+ int32_t surfaceIdx = fdp.ConsumeIntegral<int32_t>();
+ captureRequest->mStreamIdxList.push_back(streamIdx);
+ captureRequest->mSurfaceIdxList.push_back(surfaceIdx);
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeInt32(streamIdx);
+ }
+ if (fdp.ConsumeBool()) {
+ parcelCamCaptureReq.writeInt32(surfaceIdx);
+ }
+ }
+
+ invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+ invokeReadWriteNullParcelsp<CaptureRequest>(captureRequest);
+ parcelCamCaptureReq.setDataPosition(0);
+ captureRequest->readFromParcel(&parcelCamCaptureReq);
+ captureRequest.clear();
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
new file mode 100644
index 0000000..12b5bc3
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/ConcurrentCamera.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ ConcurrentCameraIdCombination camIdCombination;
+
+ if (fdp.ConsumeBool()) {
+ size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+ for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
+ string concurrentCameraId = fdp.ConsumeRandomLengthString();
+ camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+ }
+ }
+
+ invokeReadWriteNullParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+ invokeReadWriteParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+
+ CameraIdAndSessionConfiguration camIdAndSessionConfig;
+
+ if (fdp.ConsumeBool()) {
+ camIdAndSessionConfig.mCameraId = fdp.ConsumeRandomLengthString();
+ if (fdp.ConsumeBool()) {
+ camIdAndSessionConfig.mSessionConfiguration = SessionConfiguration();
+ } else {
+ int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+ int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+ int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+ int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+ camIdAndSessionConfig.mSessionConfiguration =
+ SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+ }
+ }
+
+ invokeReadWriteNullParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+ invokeReadWriteParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..51ac4e8
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ OutputConfiguration* outputConfiguration = nullptr;
+
+ if (fdp.ConsumeBool()) {
+ outputConfiguration = new OutputConfiguration();
+ } else {
+ int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+ string phyCameraId = fdp.ConsumeRandomLengthString();
+ String16 physicalCameraId(phyCameraId.c_str());
+ int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+ bool isShared = fdp.ConsumeBool();
+
+ if (fdp.ConsumeBool()) {
+ sp<IGraphicBufferProducer> iGBP = nullptr;
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+ static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+ fdp.ConsumeIntegral<uint32_t>() /* width */,
+ fdp.ConsumeIntegral<uint32_t>() /* height */,
+ fdp.ConsumeIntegral<int32_t>() /* format */,
+ fdp.ConsumeIntegral<int32_t>() /* flags */);
+ if (surfaceControl) {
+ sp<Surface> surface = surfaceControl->getSurface();
+ iGBP = surface->getIGraphicBufferProducer();
+ }
+ outputConfiguration = new OutputConfiguration(iGBP, rotation, physicalCameraId,
+ surfaceSetID, isShared);
+ iGBP.clear();
+ composerClient.clear();
+ surfaceControl.clear();
+ } else {
+ size_t iGBPSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ vector<sp<IGraphicBufferProducer>> iGBPs;
+ for (size_t idx = 0; idx < iGBPSize; ++idx) {
+ sp<IGraphicBufferProducer> iGBP = nullptr;
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+ static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+ fdp.ConsumeIntegral<uint32_t>() /* width */,
+ fdp.ConsumeIntegral<uint32_t>() /* height */,
+ fdp.ConsumeIntegral<int32_t>() /* format */,
+ fdp.ConsumeIntegral<int32_t>() /* flags */);
+ if (surfaceControl) {
+ sp<Surface> surface = surfaceControl->getSurface();
+ iGBP = surface->getIGraphicBufferProducer();
+ iGBPs.push_back(iGBP);
+ }
+ iGBP.clear();
+ composerClient.clear();
+ surfaceControl.clear();
+ }
+ outputConfiguration = new OutputConfiguration(iGBPs, rotation, physicalCameraId,
+ surfaceSetID, isShared);
+ }
+ }
+
+ outputConfiguration->getRotation();
+ outputConfiguration->getSurfaceSetID();
+ outputConfiguration->getSurfaceType();
+ outputConfiguration->getWidth();
+ outputConfiguration->getHeight();
+ outputConfiguration->isDeferred();
+ outputConfiguration->isShared();
+ outputConfiguration->getPhysicalCameraId();
+
+ OutputConfiguration outputConfiguration2;
+ outputConfiguration->gbpsEqual(outputConfiguration2);
+ outputConfiguration->sensorPixelModesUsedEqual(outputConfiguration2);
+ outputConfiguration->gbpsLessThan(outputConfiguration2);
+ outputConfiguration->sensorPixelModesUsedLessThan(outputConfiguration2);
+ outputConfiguration->getGraphicBufferProducers();
+ sp<IGraphicBufferProducer> gbp;
+ outputConfiguration->addGraphicProducer(gbp);
+ invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration);
+ invokeReadWriteParcel<OutputConfiguration>(outputConfiguration);
+ delete outputConfiguration;
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..b2de95d
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ SessionConfiguration* sessionConfiguration = nullptr;
+
+ if (fdp.ConsumeBool()) {
+ sessionConfiguration = new SessionConfiguration();
+ } else {
+ int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+ int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+ int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+ int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+ sessionConfiguration =
+ new SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+ }
+
+ sessionConfiguration->getInputWidth();
+ sessionConfiguration->getInputHeight();
+ sessionConfiguration->getInputFormat();
+ sessionConfiguration->getOperatingMode();
+
+ OutputConfiguration* outputConfiguration = nullptr;
+
+ if (fdp.ConsumeBool()) {
+ outputConfiguration = new OutputConfiguration();
+ sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+ } else {
+ sp<IGraphicBufferProducer> iGBP = nullptr;
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+ static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()),
+ fdp.ConsumeIntegral<uint32_t>(), fdp.ConsumeIntegral<uint32_t>(),
+ fdp.ConsumeIntegral<int32_t>(), fdp.ConsumeIntegral<int32_t>());
+ if (surfaceControl) {
+ sp<Surface> surface = surfaceControl->getSurface();
+ iGBP = surface->getIGraphicBufferProducer();
+ surface.clear();
+ }
+ int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+ string phyCameraId = fdp.ConsumeRandomLengthString();
+ String16 physicalCameraId(phyCameraId.c_str());
+ int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+ bool isShared = fdp.ConsumeBool();
+ outputConfiguration =
+ new OutputConfiguration(iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+ sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+ }
+
+ sessionConfiguration->getOutputConfigurations();
+ SessionConfiguration sessionConfiguration2;
+ sessionConfiguration->outputsEqual(sessionConfiguration2);
+ sessionConfiguration->outputsLessThan(sessionConfiguration2);
+ sessionConfiguration->inputIsMultiResolution();
+
+ invokeReadWriteNullParcel<SessionConfiguration>(sessionConfiguration);
+ invokeReadWriteParcel<SessionConfiguration>(sessionConfiguration);
+
+ delete sessionConfiguration;
+ delete outputConfiguration;
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
new file mode 100644
index 0000000..dc40b0f
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/SubmitInfo.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ SubmitInfo submitInfo;
+ submitInfo.mRequestId = fdp.ConsumeIntegral<int32_t>();
+ submitInfo.mLastFrameNumber = fdp.ConsumeIntegral<int64_t>();
+ invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
new file mode 100644
index 0000000..03cf9c4
--- /dev/null
+++ b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CaptureResult.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::impl;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ PhysicalCaptureResultInfo* physicalCaptureResultInfo = nullptr;
+
+ if (fdp.ConsumeBool()) {
+ physicalCaptureResultInfo = new PhysicalCaptureResultInfo();
+ } else {
+ string camId = fdp.ConsumeRandomLengthString();
+ String16 cameraId(camId.c_str());
+ CameraMetadata cameraMetadata = CameraMetadata();
+ physicalCaptureResultInfo = new PhysicalCaptureResultInfo(cameraId, cameraMetadata);
+ }
+
+ invokeReadWriteParcel<PhysicalCaptureResultInfo>(physicalCaptureResultInfo);
+
+ CaptureResult* captureResult = new CaptureResult();
+
+ if (fdp.ConsumeBool()) {
+ captureResult->mMetadata = CameraMetadata();
+ }
+ if (fdp.ConsumeBool()) {
+ captureResult->mResultExtras = CaptureResultExtras();
+ string errCamId = fdp.ConsumeRandomLengthString();
+ String16 errCameraId(errCamId.c_str());
+ captureResult->mResultExtras.errorPhysicalCameraId = errCameraId;
+ captureResult->mResultExtras.isValid();
+ invokeReadWriteNullParcel<CaptureResultExtras>(&(captureResult->mResultExtras));
+ }
+ if (fdp.ConsumeBool()) {
+ size_t physicalMetadatasSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ for (size_t idx = 0; idx < physicalMetadatasSize; ++idx) {
+ captureResult->mPhysicalMetadatas.push_back(PhysicalCaptureResultInfo());
+ }
+ }
+
+ invokeReadWriteNullParcel<CaptureResult>(captureResult);
+ invokeReadWriteParcel<CaptureResult>(captureResult);
+ CaptureResult captureResult2(*captureResult);
+ CaptureResult captureResult3(move(captureResult2));
+
+ delete captureResult;
+ delete physicalCaptureResultInfo;
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
new file mode 100644
index 0000000..f45500e
--- /dev/null
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -0,0 +1,404 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <Camera.h>
+#include <CameraBase.h>
+#include <CameraMetadata.h>
+#include <CameraParameters.h>
+#include <CameraUtils.h>
+#include <VendorTagDescriptor.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <utils/Log.h>
+#include "camera2common.h"
+#include <android/hardware/ICameraService.h>
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr int32_t kFrameRateMin = 1;
+constexpr int32_t kFrameRateMax = 120;
+constexpr int32_t kCamIdMin = 0;
+constexpr int32_t kCamIdMax = 1;
+constexpr int32_t kNumMin = 0;
+constexpr int32_t kNumMax = 1024;
+constexpr int32_t kMemoryDealerSize = 1000;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+constexpr int32_t kValidCMD[] = {CAMERA_CMD_START_SMOOTH_ZOOM,
+ CAMERA_CMD_STOP_SMOOTH_ZOOM,
+ CAMERA_CMD_SET_DISPLAY_ORIENTATION,
+ CAMERA_CMD_ENABLE_SHUTTER_SOUND,
+ CAMERA_CMD_PLAY_RECORDING_SOUND,
+ CAMERA_CMD_START_FACE_DETECTION,
+ CAMERA_CMD_STOP_FACE_DETECTION,
+ CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG,
+ CAMERA_CMD_PING,
+ CAMERA_CMD_SET_VIDEO_BUFFER_COUNT,
+ CAMERA_CMD_SET_VIDEO_FORMAT};
+
+constexpr int32_t kValidVideoBufferMode[] = {ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV,
+ ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA,
+ ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE};
+
+constexpr int32_t kValidPreviewCallbackFlag[] = {
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK, CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK,
+ CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK, CAMERA_FRAME_CALLBACK_FLAG_NOOP,
+ CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER, CAMERA_FRAME_CALLBACK_FLAG_CAMERA,
+ CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER};
+
+constexpr int32_t kValidFacing[] = {android::hardware::CAMERA_FACING_BACK,
+ android::hardware::CAMERA_FACING_FRONT};
+
+constexpr int32_t kValidOrientation[] = {0, 90, 180, 270};
+
+class TestCameraListener : public CameraListener {
+ public:
+ virtual ~TestCameraListener() = default;
+
+ void notify(int32_t /*msgType*/, int32_t /*ext1*/, int32_t /*ext2*/) override { return; };
+ void postData(int32_t /*msgType*/, const sp<IMemory>& /*dataPtr*/,
+ camera_frame_metadata_t* /*metadata*/) override {
+ return;
+ };
+ void postDataTimestamp(nsecs_t /*timestamp*/, int32_t /*msgType*/,
+ const sp<IMemory>& /*dataPtr*/) override {
+ return;
+ };
+ void postRecordingFrameHandleTimestamp(nsecs_t /*timestamp*/,
+ native_handle_t* /*handle*/) override {
+ return;
+ };
+ void postRecordingFrameHandleTimestampBatch(
+ const std::vector<nsecs_t>& /*timestamps*/,
+ const std::vector<native_handle_t*>& /*handles*/) override {
+ return;
+ };
+};
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+ public:
+ void process(const uint8_t* data, size_t size);
+ ~CameraFuzzer() {
+ delete mCameraMetadata;
+ mComposerClient.clear();
+ mSurfaceControl.clear();
+ mSurface.clear();
+ mCamera.clear();
+ mMemoryDealer.clear();
+ mIMem.clear();
+ mCameraListener.clear();
+ mCameraService.clear();
+ }
+
+ private:
+ bool initCamera();
+ void initCameraMetadata();
+ void invokeCamera();
+ void invokeCameraUtils();
+ void invokeCameraBase();
+ void invokeCameraMetadata();
+ void invokeSetParameters();
+ sp<Camera> mCamera = nullptr;
+ CameraMetadata* mCameraMetadata = nullptr;
+ sp<SurfaceComposerClient> mComposerClient = nullptr;
+ sp<SurfaceControl> mSurfaceControl = nullptr;
+ sp<Surface> mSurface = nullptr;
+ sp<MemoryDealer> mMemoryDealer = nullptr;
+ sp<IMemory> mIMem = nullptr;
+ sp<TestCameraListener> mCameraListener = nullptr;
+ sp<ICameraService> mCameraService = nullptr;
+ sp<ICamera> cameraDevice = nullptr;
+ FuzzedDataProvider* mFDP = nullptr;
+
+ // CameraClient interface
+ void notifyCallback(int32_t, int32_t, int32_t) override { return; };
+ void dataCallback(int32_t, const sp<IMemory>&, camera_frame_metadata_t*) override { return; };
+ void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory>&) override { return; };
+ void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t*) override { return; };
+ void recordingFrameHandleCallbackTimestampBatch(const std::vector<nsecs_t>&,
+ const std::vector<native_handle_t*>&) override {
+ return;
+ };
+};
+
+bool CameraFuzzer::initCamera() {
+ ProcessState::self()->startThreadPool();
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.camera"));
+ mCameraService = interface_cast<ICameraService>(binder);
+ mCameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
+ String16("CAMERAFUZZ"), hardware::ICameraService::USE_CALLING_UID,
+ hardware::ICameraService::USE_CALLING_PID,
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__, &cameraDevice);
+ mCamera = Camera::create(cameraDevice);
+ if (!mCamera) {
+ return false;
+ }
+ return true;
+}
+
+void CameraFuzzer::invokeSetParameters() {
+ String8 s = mCamera->getParameters();
+ CameraParameters params(s);
+ int32_t width = mFDP->ConsumeIntegral<int32_t>();
+ int32_t height = mFDP->ConsumeIntegral<int32_t>();
+ params.setVideoSize(width, height);
+ int32_t frameRate = mFDP->ConsumeIntegralInRange<int32_t>(kFrameRateMin, kFrameRateMax);
+ params.setPreviewFrameRate(frameRate);
+ mCamera->setParameters(params.flatten());
+}
+
+void CameraFuzzer::invokeCamera() {
+ if (!initCamera()) {
+ return;
+ }
+
+ int32_t cameraId = mFDP->ConsumeIntegralInRange<int32_t>(kCamIdMin, kCamIdMax);
+ Camera::getNumberOfCameras();
+ CameraInfo cameraInfo;
+ cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+ : mFDP->ConsumeIntegral<int>();
+ cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+ : mFDP->ConsumeIntegral<int>();
+ Camera::getCameraInfo(cameraId, &cameraInfo);
+ mCamera->reconnect();
+
+ mComposerClient = new SurfaceComposerClient;
+ mSurfaceControl = mComposerClient->createSurface(
+ static_cast<String8>(mFDP->ConsumeRandomLengthString().c_str()) /* name */,
+ mFDP->ConsumeIntegral<uint32_t>() /* width */,
+ mFDP->ConsumeIntegral<uint32_t>() /* height */,
+ mFDP->ConsumeIntegral<int32_t>() /* format */,
+ mFDP->ConsumeIntegral<int32_t>() /* flags */);
+ if (mSurfaceControl) {
+ mSurface = mSurfaceControl->getSurface();
+ mCamera->setPreviewTarget(mSurface->getIGraphicBufferProducer());
+ mCamera->startPreview();
+ mCamera->stopPreview();
+ mCamera->previewEnabled();
+ mCamera->startRecording();
+ mCamera->stopRecording();
+ }
+
+ mCamera->lock();
+ mCamera->unlock();
+ mCamera->autoFocus();
+ mCamera->cancelAutoFocus();
+
+ int32_t msgType = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->takePicture(msgType);
+ invokeSetParameters();
+ int32_t cmd;
+ if (mFDP->ConsumeBool()) {
+ cmd = mFDP->PickValueInArray(kValidCMD);
+ } else {
+ cmd = mFDP->ConsumeIntegral<int32_t>();
+ }
+ int32_t arg1 = mFDP->ConsumeIntegral<int32_t>();
+ int32_t arg2 = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->sendCommand(cmd, arg1, arg2);
+
+ int32_t videoBufferMode = mFDP->PickValueInArray(kValidVideoBufferMode);
+ mCamera->setVideoBufferMode(videoBufferMode);
+ if (mSurfaceControl) {
+ mSurface = mSurfaceControl->getSurface();
+ mCamera->setVideoTarget(mSurface->getIGraphicBufferProducer());
+ }
+ mCameraListener = sp<TestCameraListener>::make();
+ mCamera->setListener(mCameraListener);
+ int32_t previewCallbackFlag;
+ if (mFDP->ConsumeBool()) {
+ previewCallbackFlag = mFDP->PickValueInArray(kValidPreviewCallbackFlag);
+ } else {
+ previewCallbackFlag = mFDP->ConsumeIntegral<int32_t>();
+ }
+ mCamera->setPreviewCallbackFlags(previewCallbackFlag);
+ if (mSurfaceControl) {
+ mSurface = mSurfaceControl->getSurface();
+ mCamera->setPreviewCallbackTarget(mSurface->getIGraphicBufferProducer());
+ }
+
+ mCamera->getRecordingProxy();
+ int32_t mode = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->setAudioRestriction(mode);
+ mCamera->getGlobalAudioRestriction();
+ mCamera->recordingEnabled();
+
+ mMemoryDealer = new MemoryDealer(kMemoryDealerSize);
+ mIMem = mMemoryDealer->allocate(kMemoryDealerSize);
+ mCamera->releaseRecordingFrame(mIMem);
+
+ int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ native_handle_t* handle = native_handle_create(numFds, numInts);
+ mCamera->releaseRecordingFrameHandle(handle);
+
+ int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
+ int32_t ext = mFDP->ConsumeIntegral<int32_t>();
+ int32_t ext2 = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->notifyCallback(msgTypeNC, ext, ext2);
+
+ int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
+ mCamera->dataCallbackTimestamp(timestamp, msgTypeNC, mIMem);
+ mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+}
+
+void CameraFuzzer::invokeCameraUtils() {
+ CameraMetadata staticMetadata;
+ int32_t orientVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+ : mFDP->ConsumeIntegral<int32_t>();
+ uint8_t facingVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+ : mFDP->ConsumeIntegral<uint8_t>();
+ staticMetadata.update(ANDROID_SENSOR_ORIENTATION, &orientVal, 1);
+ staticMetadata.update(ANDROID_LENS_FACING, &facingVal, 1);
+ int32_t transform = 0;
+ CameraUtils::getRotationTransform(
+ staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */, &transform /*out*/);
+ CameraUtils::isCameraServiceDisabled();
+}
+
+void CameraFuzzer::invokeCameraBase() {
+ CameraInfo cameraInfo;
+ cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+ : mFDP->ConsumeIntegral<int>();
+ cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+ : mFDP->ConsumeIntegral<int>();
+ invokeReadWriteParcel<CameraInfo>(&cameraInfo);
+
+ CameraStatus* cameraStatus = nullptr;
+
+ if (mFDP->ConsumeBool()) {
+ cameraStatus = new CameraStatus();
+ } else {
+ string cid = mFDP->ConsumeRandomLengthString();
+ String8 id(cid.c_str());
+ int32_t status = mFDP->ConsumeIntegral<int32_t>();
+ size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ vector<String8> unavailSubIds;
+ for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
+ string subId = mFDP->ConsumeRandomLengthString();
+ String8 unavailSubId(subId.c_str());
+ unavailSubIds.push_back(unavailSubId);
+ }
+ string clientPkg = mFDP->ConsumeRandomLengthString();
+ String8 clientPackage(clientPkg.c_str());
+ cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+ }
+
+ invokeReadWriteParcel<CameraStatus>(cameraStatus);
+ delete cameraStatus;
+}
+
+void CameraFuzzer::initCameraMetadata() {
+ if (mFDP->ConsumeBool()) {
+ mCameraMetadata = new CameraMetadata();
+ } else {
+ size_t entryCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ size_t dataCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ mCameraMetadata = new CameraMetadata(entryCapacity, dataCapacity);
+ }
+}
+
+void CameraFuzzer::invokeCameraMetadata() {
+ initCameraMetadata();
+
+ const camera_metadata_t* metadataBuffer = nullptr;
+ if (mFDP->ConsumeBool()) {
+ metadataBuffer = mCameraMetadata->getAndLock();
+ }
+
+ mCameraMetadata->entryCount();
+ mCameraMetadata->isEmpty();
+ mCameraMetadata->bufferSize();
+ mCameraMetadata->sort();
+
+ uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+ uint8_t dataUint8 = mFDP->ConsumeIntegral<uint8_t>();
+ int32_t dataInt32 = mFDP->ConsumeIntegral<int32_t>();
+ int64_t dataInt64 = mFDP->ConsumeIntegral<int64_t>();
+ float dataFloat = mFDP->ConsumeFloatingPoint<float>();
+ double dataDouble = mFDP->ConsumeFloatingPoint<double>();
+ camera_metadata_rational dataRational;
+ dataRational.numerator = mFDP->ConsumeIntegral<int32_t>();
+ dataRational.denominator = mFDP->ConsumeIntegral<int32_t>();
+ string dataStr = mFDP->ConsumeRandomLengthString();
+ String8 dataString(dataStr.c_str());
+ size_t data_count = 1;
+ mCameraMetadata->update(tag, &dataUint8, data_count);
+ mCameraMetadata->update(tag, &dataInt32, data_count);
+ mCameraMetadata->update(tag, &dataFloat, data_count);
+ mCameraMetadata->update(tag, &dataInt64, data_count);
+ mCameraMetadata->update(tag, &dataRational, data_count);
+ mCameraMetadata->update(tag, &dataDouble, data_count);
+ mCameraMetadata->update(tag, dataString);
+
+ uint32_t tagExists = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->exists(tagExists);
+
+ uint32_t tagFind = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->find(tagFind);
+
+ uint32_t tagErase = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->erase(tagErase);
+
+ mCameraMetadata->unlock(metadataBuffer);
+ std::vector<int32_t> tagsRemoved;
+ uint64_t vendorId = mFDP->ConsumeIntegral<uint64_t>();
+ mCameraMetadata->removePermissionEntries(vendorId, &tagsRemoved);
+
+ string name = mFDP->ConsumeRandomLengthString();
+ VendorTagDescriptor vTags;
+ uint32_t tagName = mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->getTagFromName(name.c_str(), &vTags, &tagName);
+
+ invokeReadWriteNullParcel<CameraMetadata>(mCameraMetadata);
+ invokeReadWriteParcel<CameraMetadata>(mCameraMetadata);
+
+ int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+ int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+ int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+ mCameraMetadata->dump(fd, verbosity, indentation);
+
+ CameraMetadata metadataCopy(mCameraMetadata->release());
+ CameraMetadata otherCameraMetadata;
+ mCameraMetadata->swap(otherCameraMetadata);
+ close(fd);
+}
+
+void CameraFuzzer::process(const uint8_t* data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ invokeCamera();
+ invokeCameraUtils();
+ invokeCameraBase();
+ invokeCameraMetadata();
+ delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ sp<CameraFuzzer> cameraFuzzer = new CameraFuzzer();
+ cameraFuzzer->process(data, size);
+ cameraFuzzer.clear();
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
new file mode 100644
index 0000000..e14d9ce
--- /dev/null
+++ b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VendorTagDescriptor.h>
+#include <binder/Parcel.h>
+#include <camera_metadata_tests_fake_vendor.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <system/camera_vendor_tags.h>
+
+#include <camera_metadata_hidden.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kVendorTagDescriptorId = -1;
+
+extern "C" {
+
+static int zero_get_tag_count(const vendor_tag_ops_t*) {
+ return 0;
+}
+
+static int default_get_tag_count(const vendor_tag_ops_t*) {
+ return VENDOR_TAG_COUNT_ERR;
+}
+
+static void default_get_all_tags(const vendor_tag_ops_t*, uint32_t*) {}
+
+static const char* default_get_section_name(const vendor_tag_ops_t*, uint32_t) {
+ return VENDOR_SECTION_NAME_ERR;
+}
+
+static const char* default_get_tag_name(const vendor_tag_ops_t*, uint32_t) {
+ return VENDOR_TAG_NAME_ERR;
+}
+
+static int default_get_tag_type(const vendor_tag_ops_t*, uint32_t) {
+ return VENDOR_TAG_TYPE_ERR;
+}
+
+} /*extern "C"*/
+
+static void FillWithDefaults(vendor_tag_ops_t* vOps) {
+ vOps->get_tag_count = default_get_tag_count;
+ vOps->get_all_tags = default_get_all_tags;
+ vOps->get_section_name = default_get_section_name;
+ vOps->get_tag_name = default_get_tag_name;
+ vOps->get_tag_type = default_get_tag_type;
+}
+
+class VendorTagDescriptorFuzzer {
+ public:
+ void process(const uint8_t* data, size_t size);
+ ~VendorTagDescriptorFuzzer() {
+ mVendorTagDescriptor.clear();
+ mVendorTagDescriptorCache.clear();
+ }
+
+ private:
+ void initVendorTagDescriptor();
+ void invokeVendorTagDescriptor();
+ void invokeVendorTagDescriptorCache();
+ void invokeVendorTagErrorConditions();
+ sp<VendorTagDescriptor> mVendorTagDescriptor = nullptr;
+ sp<VendorTagDescriptorCache> mVendorTagDescriptorCache = nullptr;
+ FuzzedDataProvider* mFDP = nullptr;
+};
+
+void VendorTagDescriptorFuzzer::initVendorTagDescriptor() {
+ if (mFDP->ConsumeBool()) {
+ mVendorTagDescriptor = new VendorTagDescriptor();
+ } else {
+ const vendor_tag_ops_t* vOps = &fakevendor_ops;
+ VendorTagDescriptor::createDescriptorFromOps(vOps, mVendorTagDescriptor);
+ }
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptor() {
+ initVendorTagDescriptor();
+
+ sp<VendorTagDescriptor> vdesc = new VendorTagDescriptor();
+ vdesc->copyFrom(*mVendorTagDescriptor);
+ VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
+ VendorTagDescriptor::getGlobalVendorTagDescriptor();
+
+ int32_t tagCount = mVendorTagDescriptor->getTagCount();
+ if (tagCount > 0) {
+ uint32_t tagArray[tagCount];
+ mVendorTagDescriptor->getTagArray(tagArray);
+ uint32_t tag;
+ for (int32_t i = 0; i < tagCount; ++i) {
+ tag = tagArray[i];
+ get_local_camera_metadata_section_name_vendor_id(tag, kVendorTagDescriptorId);
+ get_local_camera_metadata_tag_name_vendor_id(tag, kVendorTagDescriptorId);
+ get_local_camera_metadata_tag_type_vendor_id(tag, kVendorTagDescriptorId);
+ mVendorTagDescriptor->getSectionIndex(tag);
+ }
+ mVendorTagDescriptor->getAllSectionNames();
+ }
+
+ String8 name((mFDP->ConsumeRandomLengthString()).c_str());
+ String8 section((mFDP->ConsumeRandomLengthString()).c_str());
+ uint32_t lookupTag;
+ mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
+
+ int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+ int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+ int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+ mVendorTagDescriptor->dump(fd, verbosity, indentation);
+
+ invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+ VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+ vdesc.clear();
+ close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptorCache() {
+ mVendorTagDescriptorCache = new VendorTagDescriptorCache();
+ uint64_t id = mFDP->ConsumeIntegral<uint64_t>();
+ initVendorTagDescriptor();
+
+ mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor);
+ VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
+ VendorTagDescriptorCache::getGlobalVendorTagCache();
+ sp<VendorTagDescriptor> tagDesc;
+ mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
+
+ int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
+ if (tagCount > 0) {
+ uint32_t tagArray[tagCount];
+ mVendorTagDescriptorCache->getTagArray(tagArray, id);
+ uint32_t tag;
+ for (int32_t i = 0; i < tagCount; ++i) {
+ tag = tagArray[i];
+ get_local_camera_metadata_section_name_vendor_id(tag, id);
+ get_local_camera_metadata_tag_name_vendor_id(tag, id);
+ get_local_camera_metadata_tag_type_vendor_id(tag, id);
+ }
+ }
+
+ int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+ int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+ int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+ mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
+
+ invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
+ VendorTagDescriptorCache::isVendorCachePresent(id);
+ mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors();
+ mVendorTagDescriptorCache->clearGlobalVendorTagCache();
+ tagDesc.clear();
+ close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagErrorConditions() {
+ sp<VendorTagDescriptor> vDesc;
+ vendor_tag_ops_t vOps;
+ FillWithDefaults(&vOps);
+ vOps.get_tag_count = zero_get_tag_count;
+
+ if (mFDP->ConsumeBool()) {
+ VendorTagDescriptor::createDescriptorFromOps(/*vOps*/ NULL, vDesc);
+ } else {
+ VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc);
+ int32_t tagCount = vDesc->getTagCount();
+ uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t badTagArray[tagCount + 1];
+ vDesc->getTagArray(badTagArray);
+ vDesc->getSectionName(badTag);
+ vDesc->getTagName(badTag);
+ vDesc->getTagType(badTag);
+ VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+ VendorTagDescriptor::getGlobalVendorTagDescriptor();
+ VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc);
+ invokeReadWriteNullParcelsp<VendorTagDescriptor>(vDesc);
+ vDesc.clear();
+ }
+}
+
+void VendorTagDescriptorFuzzer::process(const uint8_t* data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ invokeVendorTagDescriptor();
+ invokeVendorTagDescriptorCache();
+ invokeVendorTagErrorConditions();
+ delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ VendorTagDescriptorFuzzer vendorTagDescriptorFuzzer;
+ vendorTagDescriptorFuzzer.process(data, size);
+ return 0;
+}
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 79b1263..d757cd6 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -419,30 +419,39 @@
/*
* Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
*
- * The metadata (version 1) is written as a binary array with the following format:
+ * The metadata (version 2) is written as a binary array with the following format:
* - winscope magic string (#VV1NSC0PET1ME2#, 16B).
- * - the metadata version number (4B).
- * - Realtime-to-monotonic time offset in nanoseconds (8B).
- * - the recorded frames count (8B)
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
* - for each recorded frame:
- * - System time in monotonic clock timebase in nanoseconds (8B).
+ * - System time in elapsed clock timebase in nanoseconds (8B little endian).
*
- * All numbers are Little Endian encoded.
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
*/
static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
ALOGV("Writing winscope metadata");
static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
- static constexpr std::uint32_t metadataVersion = 1;
- const std::int64_t realToMonotonicTimeOffsetNs =
- systemTime(SYSTEM_TIME_REALTIME) - systemTime(SYSTEM_TIME_MONOTONIC);
+ static constexpr std::uint32_t metadataVersion = 2;
+
+ const auto elapsedTimeNs = android::elapsedRealtimeNano();
+ const std::int64_t elapsedToMonotonicTimeOffsetNs =
+ elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+ const std::int64_t realToElapsedTimeOffsetNs =
+ systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
sp<ABuffer> buffer = new ABuffer(
kWinscopeMagicString.size() +
sizeof(decltype(metadataVersion)) +
- sizeof(decltype(realToMonotonicTimeOffsetNs)) +
+ sizeof(decltype(realToElapsedTimeOffsetNs)) +
sizeof(decltype(framesCount)) +
framesCount * sizeof(std::uint64_t)
);
@@ -454,14 +463,16 @@
writeValueLE(metadataVersion, pos);
pos += sizeof(decltype(metadataVersion));
- writeValueLE(realToMonotonicTimeOffsetNs, pos);
- pos += sizeof(decltype(realToMonotonicTimeOffsetNs));
+ writeValueLE(realToElapsedTimeOffsetNs, pos);
+ pos += sizeof(decltype(realToElapsedTimeOffsetNs));
writeValueLE(framesCount, pos);
pos += sizeof(decltype(framesCount));
for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
- writeValueLE<std::uint64_t>(timestampMonotonicUs * 1000, pos);
+ const auto timestampElapsedNs =
+ elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+ writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
pos += sizeof(std::uint64_t);
}
@@ -1159,13 +1170,13 @@
{ NULL, 0, NULL, 0 }
};
- std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
- if (!displayId) {
- fprintf(stderr, "Failed to get ID for internal display\n");
+ const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+ if (ids.empty()) {
+ fprintf(stderr, "Failed to get ID for any displays\n");
return 1;
}
- gPhysicalDisplayId = *displayId;
+ gPhysicalDisplayId = ids.front();
while (true) {
int optionIndex = 0;
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index beeab54..c43f8ce 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -411,7 +411,10 @@
composerClient = new SurfaceComposerClient;
CHECK_EQ(composerClient->initCheck(), (status_t)OK);
- const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+ const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+ CHECK(!ids.empty());
+
+ const sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
CHECK(display != nullptr);
ui::DisplayMode mode;
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index 67c68e6..f042d5e 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -749,7 +749,10 @@
composerClient = new SurfaceComposerClient;
CHECK_EQ((status_t)OK, composerClient->initCheck());
- const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+ const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+ CHECK(!ids.empty());
+
+ const android::sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
CHECK(display != nullptr);
ui::DisplayMode mode;
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index bc7e41e..185491f 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -78,10 +78,14 @@
int fd = open(outputFileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
if (fd < 0) {
- ALOGE("couldn't open file");
- return fd;
+ ALOGE("couldn't open output file %s", outputFileName);
+ return 1;
}
- sp<MediaMuxer> muxer = new MediaMuxer(fd, container);
+ sp<MediaMuxer> muxer = MediaMuxer::create(fd, container);
+ if (muxer == nullptr) {
+ fprintf(stderr, "unable to instantiate muxer for format %d\n", container);
+ return 1;
+ }
close(fd);
size_t trackCount = extractor->countTracks();
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 40b2392..1ffe801 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -318,7 +318,13 @@
sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
CHECK_EQ(composerClient->initCheck(), (status_t)OK);
- const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+ const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+ if (ids.empty()) {
+ SLOGE("Failed to get ID for any displays\n");
+ return 1;
+ }
+
+ const sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
CHECK(display != nullptr);
ui::DisplayMode mode;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
index 201cf02..afc9b6a 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
@@ -144,6 +144,11 @@
clearDataLengths.push_back(ss.numBytesOfClearData);
encryptedDataLengths.push_back(ss.numBytesOfEncryptedData);
}
+ if (in_args.keyId.size() != kBlockSize || in_args.iv.size() != kBlockSize) {
+ android_errorWriteLog(0x534e4554, "244569759");
+ detailedError = "invalid decrypt parameter size";
+ return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+ }
auto res =
mSession->decrypt(in_args.keyId.data(), in_args.iv.data(),
srcPtr, static_cast<uint8_t*>(destPtr),
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 12aa4ea..054eabd 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -177,7 +177,7 @@
UNUSED(in_optionalParameters);
KeyRequestType keyRequestType = KeyRequestType::UNKNOWN;
- std::string defaultUrl("https://default.url");
+ std::string defaultUrl("");
_aidl_return->request = {};
_aidl_return->requestType = keyRequestType;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index 3a675f6..7bc320d 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -206,6 +206,11 @@
return Void();
} else if (mode == Mode::AES_CTR) {
size_t bytesDecrypted;
+ if (keyId.size() != kBlockSize || iv.size() != kBlockSize) {
+ android_errorWriteLog(0x534e4554, "244569759");
+ _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid decrypt parameter size");
+ return Void();
+ }
Status_V1_2 res = mSession->decrypt(keyId.data(), iv.data(), srcPtr,
static_cast<uint8_t*>(destPtr), toVector(subSamples), &bytesDecrypted);
if (res == Status_V1_2::OK) {
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 97e0b1d..4df8783 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -42,9 +42,14 @@
mWidth(width), mHeight(height),
mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
- mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
- mSize(hasData ? (bpp * width * height) : 0),
- mIccSize(iccSize), mBitDepth(bitDepth) {
+ mRotationAngle(angle), mBytesPerPixel(bpp), mIccSize(iccSize),
+ mBitDepth(bitDepth) {
+ uint32_t multVal;
+ mRowBytes = __builtin_mul_overflow(bpp, width, &multVal) ? 0 : multVal;
+ mSize = __builtin_mul_overflow(multVal, height, &multVal) ? 0 : multVal;
+ if (hasData && (mRowBytes == 0 || mSize == 0)) {
+ ALOGE("Frame rowBytes/ size overflow %dx%d bpp %d", width, height, bpp);
+ }
}
void init(const VideoFrame& copy, const void* iccData, size_t iccSize) {
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 4e4a9a1..d1b08bd 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -275,7 +275,8 @@
mStreamInfo(nullptr),
mSignalledError(false),
mOutputPortDelay(kDefaultOutputPortDelay),
- mOutputDelayRingBuffer(nullptr) {
+ mOutputDelayRingBuffer(nullptr),
+ mDeviceApiLevel(android_get_device_api_level()) {
}
C2SoftAacDec::~C2SoftAacDec() {
@@ -891,7 +892,7 @@
work->worklets.front()->output.configUpdate.push_back(
C2Param::Copy(currentBoostFactor));
- if (android_get_device_api_level() < __ANDROID_API_S__) {
+ if (mDeviceApiLevel < __ANDROID_API_S__) {
// We used to report DRC compression mode in the output format
// in Q and R, but stopped doing that in S
C2StreamDrcCompressionModeTuning::input currentCompressMode(0u,
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index b45f148..f85d45f 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -97,6 +97,7 @@
int32_t mOutputDelayRingBufferWritePos;
int32_t mOutputDelayRingBufferReadPos;
int32_t mOutputDelayRingBufferFilled;
+ int mDeviceApiLevel;
bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
int32_t outputDelayRingBufferSamplesAvailable();
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index a2a79d5..257cf4e 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -23,3 +23,23 @@
srcs: ["C2SoftAomDec.cpp"],
static_libs: ["libaom"],
}
+
+cc_library {
+ name: "libcodec2_soft_av1enc",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_all-defaults",
+ ],
+
+ static_libs: ["libaom"],
+
+ srcs: ["C2SoftAomEnc.cpp"],
+
+ export_include_dirs: ["."],
+
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ ],
+
+}
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
new file mode 100644
index 0000000..f49c1c4
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -0,0 +1,870 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAomEnc"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAomEnc.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
+
+#define DEFAULT_SPEED 10
+
+C2SoftAomEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
+ C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+ setDerivedInstance(this);
+
+ addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(
+ 0u, C2Config::BITRATE_VARIABLE))
+ .withFields({C2F(mBitrateMode, value)
+ .oneOf({C2Config::BITRATE_CONST,
+ C2Config::BITRATE_VARIABLE})})
+ .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ // TODO: More restriction?
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+ .withConstValue(new C2StreamIntraRefreshTuning::output(
+ 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+ .build());
+
+ addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(0u, PROFILE_AV1_0,
+ LEVEL_AV1_4_1))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(PROFILE_AV1_0),
+ C2F(mProfileLevel, level)
+ .oneOf({LEVEL_AV1_2, LEVEL_AV1_2_1, LEVEL_AV1_2_2,
+ LEVEL_AV1_2_3, LEVEL_AV1_3, LEVEL_AV1_3_1,
+ LEVEL_AV1_3_2, LEVEL_AV1_3_3, LEVEL_AV1_4,
+ LEVEL_AV1_4_1}),
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+
+ addParameter(DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({C2_FALSE, C2_TRUE})})
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields(
+ {C2F(mColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
+ .withSetter(ColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields(
+ {C2F(mCodedColorAspects, range)
+ .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries)
+ .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer)
+ .inRange(C2Color::TRANSFER_UNSPECIFIED,
+ C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix)
+ .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
+ .withSetter(CodedColorAspectsSetter, mColorAspects)
+ .build());
+}
+
+C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value < 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+}
+
+C2R C2SoftAomEnc::IntfImpl::SizeSetter(bool mayBlock,
+ const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+ C2P<C2StreamPictureSizeInfo::input>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+}
+
+C2R C2SoftAomEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output>& me) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_AV1_0;
+ }
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ me.set().level = LEVEL_AV1_4_1;
+ }
+ return C2R::Ok();
+}
+
+uint32_t C2SoftAomEnc::IntfImpl::getSyncFramePeriod() const {
+ if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+}
+
+C2R C2SoftAomEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
+ C2P<C2StreamColorAspectsInfo::input>& me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+}
+C2R C2SoftAomEnc::IntfImpl::CodedColorAspectsSetter(
+ bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+ const C2P<C2StreamColorAspectsInfo::input>& coded) {
+ (void)mayBlock;
+ me.set().range = coded.v.range;
+ me.set().primaries = coded.v.primaries;
+ me.set().transfer = coded.v.transfer;
+ me.set().matrix = coded.v.matrix;
+ return C2R::Ok();
+}
+
+C2SoftAomEnc::C2SoftAomEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mCodecContext(nullptr),
+ mCodecConfiguration(nullptr),
+ mCodecInterface(nullptr),
+ mStrideAlign(2),
+ mBitrateControlMode(AOM_VBR),
+ mMinQuantizer(0),
+ mMaxQuantizer(0),
+ mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
+ mSignalledOutputEos(false),
+ mSignalledError(false),
+ mHeadersReceived(false) {
+ ALOGV("Constructor");
+}
+
+C2SoftAomEnc::~C2SoftAomEnc() {
+ ALOGV("Destructor");
+ onRelease();
+}
+
+c2_status_t C2SoftAomEnc::onInit() {
+ ALOGV("Init");
+
+ status_t err = initEncoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAomEnc::onStop() {
+ onRelease();
+ return C2_OK;
+}
+
+void C2SoftAomEnc::onReset() {
+ (void)onStop();
+}
+
+void C2SoftAomEnc::onRelease() {
+ if (mCodecContext) {
+ aom_codec_destroy(mCodecContext);
+ delete mCodecContext;
+ mCodecContext = nullptr;
+ }
+
+ if (mCodecConfiguration) {
+ delete mCodecConfiguration;
+ mCodecConfiguration = nullptr;
+ }
+
+ // this one is not allocated by us
+ mCodecInterface = nullptr;
+}
+
+c2_status_t C2SoftAomEnc::onFlush_sm() {
+ return onStop();
+}
+
+aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
+ aom_codec_err_t codec_return = AOM_CODEC_OK;
+
+ codec_return = aom_codec_control(mCodecContext, AOME_SET_CPUUSED, DEFAULT_SPEED);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ROW_MT, 1);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_CDEF, 1);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_TPL_MODEL, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_DELTAQ_MODE, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_ORDER_HINT, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_AQ_MODE, 3);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_MODE_COST_UPD_FREQ, 3);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_MV_COST_UPD_FREQ, 3);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_PALETTE, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_OBMC, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_NOISE_SENSITIVITY, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_WARPED_MOTION, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_REF_FRAME_MVS, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_CFL_INTRA, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_SMOOTH_INTRA, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_ANGLE_DELTA, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_FILTER_INTRA, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_DISABLE_TRELLIS_QUANT, 1);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DIST_WTD_COMP, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DIFF_WTD_COMP, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DUAL_FILTER, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTERINTRA_COMP, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTERINTRA_WEDGE, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTRA_EDGE_FILTER, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTRABC, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_MASKED_COMP, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_PAETH_INTRA, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_QM, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_RECT_PARTITIONS, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_RESTORATION, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_TX64, 0);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+ codec_return = aom_codec_control(mCodecContext, AV1E_SET_MAX_REFERENCE_FRAMES, 3);
+ if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+BailOut:
+ return codec_return;
+}
+
+status_t C2SoftAomEnc::initEncoder() {
+ aom_codec_err_t codec_return;
+ status_t result = UNKNOWN_ERROR;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ // Fetch config
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mBitrateMode = mIntf->getBitrateMode_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mIntraRefresh = mIntf->getIntraRefresh_l();
+ mRequestSync = mIntf->getRequestSync_l();
+ }
+
+ switch (mBitrateMode->value) {
+ case C2Config::BITRATE_CONST:
+ mBitrateControlMode = AOM_CBR;
+ break;
+ case C2Config::BITRATE_VARIABLE:
+ [[fallthrough]];
+ default:
+ mBitrateControlMode = AOM_VBR;
+ break;
+ }
+
+ mCodecInterface = aom_codec_av1_cx();
+ if (!mCodecInterface) goto CleanUp;
+
+ ALOGD("AOM: initEncoder. BRMode: %u. KF: %u. QP: %u - %u", (uint32_t)mBitrateControlMode,
+ mIntf->getSyncFramePeriod(), mMinQuantizer, mMaxQuantizer);
+
+ mCodecConfiguration = new aom_codec_enc_cfg_t;
+ if (!mCodecConfiguration) goto CleanUp;
+
+ codec_return = aom_codec_enc_config_default(mCodecInterface, mCodecConfiguration,
+ AOM_USAGE_REALTIME); // RT mode
+ if (codec_return != AOM_CODEC_OK) {
+ ALOGE("Error populating default configuration for aom encoder.");
+ goto CleanUp;
+ }
+
+ mCodecConfiguration->g_w = mSize->width;
+ mCodecConfiguration->g_h = mSize->height;
+
+ mCodecConfiguration->g_threads = 0;
+ mCodecConfiguration->g_error_resilient = 0;
+
+ // timebase unit is microsecond
+ // g_timebase is in seconds (i.e. 1/1000000 seconds)
+ mCodecConfiguration->g_timebase.num = 1;
+ mCodecConfiguration->g_timebase.den = 1000000;
+ // rc_target_bitrate is in kbps, mBitrate in bps
+ mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+ mCodecConfiguration->rc_end_usage = AOM_CBR;
+ // Disable frame drop - not allowed in MediaCodec now.
+ mCodecConfiguration->rc_dropframe_thresh = 0;
+ // Disable lagged encoding.
+ mCodecConfiguration->g_lag_in_frames = 0;
+
+ // Disable spatial resizing.
+ mCodecConfiguration->rc_resize_mode = 0;
+ // Single-pass mode.
+ mCodecConfiguration->g_pass = AOM_RC_ONE_PASS;
+
+ // Maximum key frame interval - for CBR boost to 3000
+ mCodecConfiguration->kf_max_dist = 3000;
+ // Encoder determines optimal key frame placement automatically.
+ mCodecConfiguration->kf_mode = AOM_KF_AUTO;
+ // Initial value of the buffer level in ms.
+ mCodecConfiguration->rc_buf_initial_sz = 500;
+ // Amount of data that the encoder should try to maintain in ms.
+ mCodecConfiguration->rc_buf_optimal_sz = 600;
+ // The amount of data that may be buffered by the decoding
+ // application in ms.
+ mCodecConfiguration->rc_buf_sz = 1000;
+
+ if (mBitrateControlMode == AOM_CBR) {
+ // Maximum amount of bits that can be subtracted from the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_undershoot_pct = 100;
+ // Maximum amount of bits that can be added to the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_overshoot_pct = 10;
+ } else {
+ // Maximum amount of bits that can be subtracted from the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_undershoot_pct = 100;
+ // Maximum amount of bits that can be added to the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_overshoot_pct = 25;
+ }
+
+ if (mIntf->getSyncFramePeriod() >= 0) {
+ mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
+ mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
+ mCodecConfiguration->kf_mode = AOM_KF_AUTO;
+ }
+ if (mMinQuantizer > 0) {
+ mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+ }
+ if (mMaxQuantizer > 0) {
+ mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+ }
+
+ mCodecContext = new aom_codec_ctx_t;
+ if (!mCodecContext) goto CleanUp;
+ codec_return = aom_codec_enc_init(mCodecContext, mCodecInterface, mCodecConfiguration,
+ 0); // flags
+ if (codec_return != AOM_CODEC_OK) {
+ ALOGE("Error initializing aom encoder");
+ goto CleanUp;
+ }
+
+ codec_return = setupCodecParameters();
+ if (codec_return != AOM_CODEC_OK) {
+ ALOGE("Error setting up codec parameters");
+ goto CleanUp;
+ }
+
+ mHeadersReceived = false;
+
+ {
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+ if (((uint64_t)width * height) > ((uint64_t)INT32_MAX / 3)) {
+ ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
+ } else {
+ uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2);
+ if (!mConversionBuffer.size()) {
+ ALOGE("Allocating conversion buffer failed.");
+ } else {
+ mNumInputFrames = -1;
+ return OK;
+ }
+ }
+ }
+
+CleanUp:
+ onRelease();
+ return result;
+}
+
+void C2SoftAomEnc::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ // Initialize encoder if not already
+ if (!mCodecContext && OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if (!mHeadersReceived) {
+ Av1Config av1_config;
+ constexpr uint32_t header_length = 2048;
+ uint8_t header[header_length];
+ size_t header_bytes;
+ aom_fixed_buf_t* obu_sequence_header = aom_codec_get_global_headers(mCodecContext);
+ int ret = 1;
+ if (obu_sequence_header) {
+ if (get_av1config_from_obu(reinterpret_cast<const uint8_t*>(obu_sequence_header->buf),
+ obu_sequence_header->sz, false, &av1_config) == 0) {
+ ret = write_av1config(&av1_config, header_length, &header_bytes, header);
+
+ } else {
+ ALOGE("Can not get config");
+ }
+ free(obu_sequence_header->buf);
+ free(obu_sequence_header);
+ }
+
+ if (ret) {
+ ALOGE("Can not write config");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ mHeadersReceived = true;
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(header_bytes, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ work->workletsProcessed = 1u;
+ return;
+ }
+ memcpy(csd->m.value, header, header_bytes);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+ ALOGV("CSD Produced of size %zu bytes", header_bytes);
+ }
+
+ std::shared_ptr<const C2GraphicView> rView;
+ std::shared_ptr<C2Buffer> inputBuffer;
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ rView = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (rView->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", rView->error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } else {
+ ALOGV("Empty input Buffer");
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front();
+ if (inBuffer.width() < mSize->width || inBuffer.height() < mSize->height) {
+ ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)", inBuffer.width(), mSize->width,
+ inBuffer.height(), mSize->height);
+ mSignalledError = true;
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ aom_image_t raw_frame;
+ const C2PlanarLayout& layout = rView->layout();
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+ if (width > 0x8000 || height > 0x8000) {
+ ALOGE("Image too big: %u x %u", width, height);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ case C2PlanarLayout::TYPE_RGBA: {
+ std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ colorAspects = mIntf->getCodedColorAspects_l();
+ }
+ ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
+ mConversionBuffer.size(), *rView.get(), colorAspects->matrix,
+ colorAspects->range);
+ aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, width, height, mStrideAlign,
+ mConversionBuffer.data());
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*rView)) {
+ ALOGE("input is not YUV420");
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1 &&
+ layout.planes[layout.PLANE_U].colInc == 1 &&
+ layout.planes[layout.PLANE_V].colInc == 1) {
+ // I420 compatible - though with custom offset and stride
+ aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, width, height, mStrideAlign,
+ (uint8_t*)rView->data()[0]);
+ raw_frame.planes[1] = (uint8_t*)rView->data()[1];
+ raw_frame.planes[2] = (uint8_t*)rView->data()[2];
+ raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
+ raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
+ raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
+ } else {
+ // copy to I420
+ MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
+ if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
+ status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, stride, vstride, mStrideAlign,
+ mConversionBuffer.data());
+ aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+ } else {
+ ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+ mConversionBuffer.size());
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ }
+ break;
+ }
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ aom_enc_frame_flags_t flags = 0;
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh =
+ mIntf->getIntraRefresh_l();
+ std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync =
+ mIntf->getRequestSync_l();
+ lock.unlock();
+
+ if (intraRefresh != mIntraRefresh) {
+ mIntraRefresh = intraRefresh;
+ ALOGV("Got mIntraRefresh request");
+ }
+
+ if (requestSync != mRequestSync) {
+ // we can handle IDR immediately
+ if (requestSync->value) {
+ // unset request
+ C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({&clearSync}, C2_MAY_BLOCK, &failures);
+ ALOGV("Got sync request");
+ flags |= AOM_EFLAG_FORCE_KF;
+ }
+ mRequestSync = requestSync;
+ }
+
+ if (bitrate != mBitrate) {
+ mBitrate = bitrate;
+ mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+ aom_codec_err_t res = aom_codec_enc_config_set(mCodecContext, mCodecConfiguration);
+ if (res != AOM_CODEC_OK) {
+ ALOGE("aom encoder failed to update bitrate: %s", aom_codec_err_to_string(res));
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
+ uint64_t input_timestamp = work->input.ordinal.timestamp.peekull();
+ uint32_t frame_duration;
+ if (input_timestamp > mLastTimestamp) {
+ frame_duration = (uint32_t)(input_timestamp - mLastTimestamp);
+ } else {
+ // Use default of 30 fps in case of 0 frame rate.
+ float frame_rate = mFrameRate->value;
+ if (frame_rate < 0.001) {
+ frame_rate = 30.0;
+ }
+ frame_duration = (uint32_t)(1000000 / frame_rate + 0.5);
+ }
+ mLastTimestamp = input_timestamp;
+
+ aom_codec_err_t codec_return =
+ aom_codec_encode(mCodecContext, &raw_frame, input_timestamp, frame_duration, flags);
+ if (codec_return != AOM_CODEC_OK) {
+ ALOGE("aom encoder failed to encode frame");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ bool populated = false;
+ aom_codec_iter_t encoded_packet_iterator = nullptr;
+ const aom_codec_cx_pkt_t* encoded_packet;
+ while ((encoded_packet = aom_codec_get_cx_data(mCodecContext, &encoded_packet_iterator))) {
+ if (encoded_packet->kind == AOM_CODEC_CX_FRAME_PKT) {
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
+ ++mNumInputFrames;
+
+ ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
+ uint32_t flags = 0;
+ if (end_of_stream) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
+ if (encoded_packet->data.frame.flags & AOM_FRAME_IS_KEY) {
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2Config::SYNC_FRAME));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
+ work->workletsProcessed = 1u;
+ populated = true;
+ if (end_of_stream) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled End Of Stream");
+ }
+ }
+ }
+ if (!populated) {
+ work->workletsProcessed = 0u;
+ }
+}
+
+c2_status_t C2SoftAomEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+ (void)pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+class C2SoftAomEncFactory : public C2ComponentFactory {
+ public:
+ C2SoftAomEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAomEnc(COMPONENT_NAME, id,
+ std::make_shared<C2SoftAomEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAomEnc::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAomEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAomEncFactory() override = default;
+
+ private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAomEncFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+ ::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
new file mode 100644
index 0000000..8c123d9
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AV1_ENC_H_
+#define ANDROID_C2_SOFT_AV1_ENC_H_
+
+#include <inttypes.h>
+
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Component.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "aom/aom_encoder.h"
+#include "aom/aomcx.h"
+#include "common/av1_config.h"
+
+namespace android {
+struct C2SoftAomEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftAomEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override final;
+ c2_status_t onStop() override final;
+ void onReset() override final;
+ void onRelease() override final;
+ c2_status_t onFlush_sm() override final;
+
+ void process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) override final;
+ c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override final;
+
+ protected:
+ virtual ~C2SoftAomEnc();
+
+ private:
+ std::shared_ptr<IntfImpl> mIntf;
+
+ // Initializes aom encoder with available settings.
+ status_t initEncoder();
+
+ // aom specific opaque data structure that
+ // stores encoder state
+ aom_codec_ctx_t* mCodecContext;
+
+ // aom specific data structure that
+ // stores encoder configuration
+ aom_codec_enc_cfg_t* mCodecConfiguration;
+
+ // aom specific read-only data structure
+ // that specifies algorithm interface
+ aom_codec_iface_t* mCodecInterface;
+
+ // align stride to the power of 2
+ int32_t mStrideAlign;
+
+ aom_rc_mode mBitrateControlMode;
+
+ // Minimum (best quality) quantizer
+ uint32_t mMinQuantizer;
+
+ // Maximum (worst quality) quantizer
+ uint32_t mMaxQuantizer;
+
+ // Last input buffer timestamp
+ uint64_t mLastTimestamp;
+
+ // Number of input frames
+ int64_t mNumInputFrames;
+
+ // Conversion buffer is needed to input to
+ // yuv420 planar format.
+ MemoryBlock mConversionBuffer;
+
+ // Signalled End Of Stream
+ bool mSignalledOutputEos;
+
+ // Signalled Error
+ bool mSignalledError;
+
+ bool mHeadersReceived;
+
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+
+ aom_codec_err_t setupCodecParameters();
+};
+
+class C2SoftAomEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper);
+
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me);
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+ C2P<C2StreamPictureSizeInfo::input>& me);
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me);
+
+ // unsafe getters
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
+ return mIntraRefresh;
+ }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+ std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+ return mBitrateMode;
+ }
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+ return mRequestSync;
+ }
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+ return mCodedColorAspects;
+ }
+ uint32_t getSyncFramePeriod() const;
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+ const C2P<C2StreamColorAspectsInfo::input>& coded);
+
+ private:
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+};
+
+} // namespace android
+#endif // ANDROID_C2_SOFT_AV1_ENC_H_
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 953afc5..96a4c4a 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -671,6 +671,9 @@
void C2SoftAvcDec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftAvcDec::deleteDecoder() {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 28cceca..5d2856a 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -1768,17 +1768,20 @@
// }
// }
// }
- std::shared_ptr<const C2GraphicView> view;
+ std::shared_ptr<C2GraphicView> view;
std::shared_ptr<C2Buffer> inputBuffer;
if (!work->input.buffers.empty()) {
inputBuffer = work->input.buffers[0];
- view = std::make_shared<const C2GraphicView>(
+ view = std::make_shared<C2GraphicView>(
inputBuffer->data().graphicBlocks().front().map().get());
if (view->error() != C2_OK) {
ALOGE("graphic view map err = %d", view->error());
work->workletsProcessed = 1u;
return;
}
+ //(b/232396154)
+ //workaround for incorrect crop size in view when using surface mode
+ view->setCrop_be(C2Rect(mSize->width, mSize->height));
}
do {
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index e51c511..f0e14d7 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -23,8 +23,8 @@
#include <SimpleC2Component.h>
#include <C2Config.h>
-#include "libgav1/src/gav1/decoder.h"
-#include "libgav1/src/gav1/decoder_settings.h"
+#include <gav1/decoder.h>
+#include <gav1/decoder_settings.h>
namespace android {
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index a27c218..15d6dcd 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -664,6 +664,9 @@
void C2SoftHevcDec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftHevcDec::deleteDecoder() {
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 60d5875..9c26c02 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -1109,14 +1109,14 @@
}
}
- std::shared_ptr<const C2GraphicView> view;
+ std::shared_ptr<C2GraphicView> view;
std::shared_ptr<C2Buffer> inputBuffer = nullptr;
bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
if (eos) mSignalledEos = true;
if (!work->input.buffers.empty()) {
inputBuffer = work->input.buffers[0];
- view = std::make_shared<const C2GraphicView>(
+ view = std::make_shared<C2GraphicView>(
inputBuffer->data().graphicBlocks().front().map().get());
if (view->error() != C2_OK) {
ALOGE("graphic view map err = %d", view->error());
@@ -1125,6 +1125,9 @@
work->workletsProcessed = 1u;
return;
}
+ //(b/232396154)
+ //workaround for incorrect crop size in view when using surface mode
+ view->setCrop_be(C2Rect(mSize->width, mSize->height));
}
IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 9a41910..439323c 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -732,6 +732,9 @@
void C2SoftMpeg2Dec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftMpeg2Dec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 54a1d0e..3bf9c48 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -256,7 +256,9 @@
mFramesConfigured = false;
mSignalledOutputEos = false;
mSignalledError = false;
-
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
return C2_OK;
}
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 3bfec66..d5e8c56 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -464,18 +464,21 @@
}
}
- std::shared_ptr<const C2GraphicView> rView;
+ std::shared_ptr<C2GraphicView> rView;
std::shared_ptr<C2Buffer> inputBuffer;
bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
if (!work->input.buffers.empty()) {
inputBuffer = work->input.buffers[0];
- rView = std::make_shared<const C2GraphicView>(
+ rView = std::make_shared<C2GraphicView>(
inputBuffer->data().graphicBlocks().front().map().get());
if (rView->error() != C2_OK) {
ALOGE("graphic view map err = %d", rView->error());
work->result = rView->error();
return;
}
+ //(b/232396154)
+ //workaround for incorrect crop size in view when using surface mode
+ rView->setCrop_be(C2Rect(mSize->width, mSize->height));
} else {
fillEmptyWork(work);
if (eos) {
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index f99ee24..5700e5d 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -59,7 +59,7 @@
addParameter(
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
- .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
.withFields({
C2F(mSize, width).inRange(2, 2048, 2),
C2F(mSize, height).inRange(2, 2048, 2),
@@ -81,7 +81,7 @@
addParameter(
DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
- .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
// TODO: More restriction?
.withFields({C2F(mFrameRate, value).greaterThan(0.)})
.withSetter(
@@ -127,10 +127,18 @@
C2F(mProfileLevel, profile).equalTo(
PROFILE_VP9_0
),
- C2F(mProfileLevel, level).equalTo(
- LEVEL_VP9_4_1),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_VP9_1,
+ C2Config::LEVEL_VP9_1_1,
+ C2Config::LEVEL_VP9_2,
+ C2Config::LEVEL_VP9_2_1,
+ C2Config::LEVEL_VP9_3,
+ C2Config::LEVEL_VP9_3_1,
+ C2Config::LEVEL_VP9_4,
+ C2Config::LEVEL_VP9_4_1,
+ }),
})
- .withSetter(ProfileLevelSetter)
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
.build());
#else
addParameter(
@@ -144,7 +152,7 @@
C2F(mProfileLevel, level).equalTo(
LEVEL_UNUSED),
})
- .withSetter(ProfileLevelSetter)
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
.build());
#endif
addParameter(
@@ -217,14 +225,81 @@
}
C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
- C2P<C2StreamProfileLevelInfo::output>& me) {
+ C2P<C2StreamProfileLevelInfo::output>& me,
+ const C2P<C2StreamPictureSizeInfo::input>& size,
+ const C2P<C2StreamFrameRateInfo::output>& frameRate,
+ const C2P<C2StreamBitrateInfo::output>& bitrate) {
(void)mayBlock;
+#ifdef VP9
if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
me.set().profile = PROFILE_VP9_0;
}
- if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ struct LevelLimits {
+ C2Config::level_t level;
+ float samplesPerSec;
+ uint64_t samples;
+ uint32_t bitrate;
+ size_t dimension;
+ };
+ constexpr LevelLimits kLimits[] = {
+ {LEVEL_VP9_1, 829440, 36864, 200000, 512},
+ {LEVEL_VP9_1_1, 2764800, 73728, 800000, 768},
+ {LEVEL_VP9_2, 4608000, 122880, 1800000, 960},
+ {LEVEL_VP9_2_1, 9216000, 245760, 3600000, 1344},
+ {LEVEL_VP9_3, 20736000, 552960, 7200000, 2048},
+ {LEVEL_VP9_3_1, 36864000, 983040, 12000000, 2752},
+ {LEVEL_VP9_4, 83558400, 2228224, 18000000, 4160},
+ {LEVEL_VP9_4_1, 160432128, 2228224, 30000000, 4160},
+ };
+
+ uint64_t samples = size.v.width * size.v.height;
+ float samplesPerSec = float(samples) * frameRate.v.value;
+ size_t dimension = std::max(size.v.width, size.v.height);
+
+ // Check if the supplied level meets the samples / bitrate requirements.
+ // If not, update the level with the lowest level meeting the requirements.
+ bool found = false;
+
+ // By default needsUpdate = false in case the supplied level does meet
+ // the requirements.
+ bool needsUpdate = false;
+ for (const LevelLimits& limit : kLimits) {
+ if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
+ bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
+ // This is the lowest level that meets the requirements, and if
+ // we haven't seen the supplied level yet, that means we don't
+ // need the update.
+ if (needsUpdate) {
+ ALOGD("Given level %x does not cover current configuration: "
+ "adjusting to %x",
+ me.v.level, limit.level);
+ me.set().level = limit.level;
+ }
+ found = true;
+ break;
+ }
+ if (me.v.level == limit.level) {
+ // We break out of the loop when the lowest feasible level is
+ // found. The fact that we're here means that our level doesn't
+ // meet the requirement and needs to be updated.
+ needsUpdate = true;
+ }
+ }
+ if (!found) {
+ // We set to the highest supported level.
me.set().level = LEVEL_VP9_4_1;
}
+#else
+ (void)size;
+ (void)frameRate;
+ (void)bitrate;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_VP8_0;
+ }
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ me.set().level = LEVEL_UNUSED;
+ }
+#endif
return C2R::Ok();
}
@@ -683,17 +758,20 @@
return;
}
- std::shared_ptr<const C2GraphicView> rView;
+ std::shared_ptr<C2GraphicView> rView;
std::shared_ptr<C2Buffer> inputBuffer;
if (!work->input.buffers.empty()) {
inputBuffer = work->input.buffers[0];
- rView = std::make_shared<const C2GraphicView>(
+ rView = std::make_shared<C2GraphicView>(
inputBuffer->data().graphicBlocks().front().map().get());
if (rView->error() != C2_OK) {
ALOGE("graphic view map err = %d", rView->error());
work->result = C2_CORRUPTED;
return;
}
+ //(b/232396154)
+ //workaround for incorrect crop size in view when using surface mode
+ rView->setCrop_be(C2Rect(mSize->width, mSize->height));
} else {
ALOGV("Empty input Buffer");
uint32_t flags = 0;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 714fadb..bfb4444 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -243,9 +243,10 @@
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
C2P<C2StreamPictureSizeInfo::input> &me);
- static C2R ProfileLevelSetter(
- bool mayBlock,
- C2P<C2StreamProfileLevelInfo::output> &me);
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+ const C2P<C2StreamPictureSizeInfo::input>& size,
+ const C2P<C2StreamFrameRateInfo::output>& frameRate,
+ const C2P<C2StreamBitrateInfo::output>& bitrate);
static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 35a3b53..319ba62 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -1613,6 +1613,7 @@
// assuming blob is const here
size_t size = blob.size();
size_t ix = 0;
+ size_t old_ix = 0;
const uint8_t *data = blob.data();
C2Param *p = nullptr;
@@ -1620,8 +1621,13 @@
p = C2ParamUtils::ParseFirst(data + ix, size - ix);
if (p) {
params->emplace_back(p);
+ old_ix = ix;
ix += p->size();
ix = align(ix, PARAMS_ALIGNMENT);
+ if (ix <= old_ix || ix > size) {
+ android_errorWriteLog(0x534e4554, "238083570");
+ break;
+ }
}
} while (p);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 124a893..bdc4828 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1075,8 +1075,7 @@
} else {
if ((config->mDomain & Config::IS_ENCODER) || !surface) {
if (vendorSdkVersion < __ANDROID_API_S__ &&
- (format == COLOR_FormatYUV420Flexible ||
- format == COLOR_FormatYUV420Planar ||
+ (format == COLOR_FormatYUV420Planar ||
format == COLOR_FormatYUV420PackedPlanar ||
format == COLOR_FormatYUV420SemiPlanar ||
format == COLOR_FormatYUV420PackedSemiPlanar)) {
@@ -1973,7 +1972,6 @@
}
mChannel->reset();
- mChannel->stopUseOutputSurface();
// thiz holds strong ref to this while the thread is running.
sp<CCodec> thiz(this);
std::thread([thiz, sendCallback] { thiz->release(sendCallback); }).detach();
@@ -1994,6 +1992,7 @@
comp = state->comp;
}
comp->release();
+ mChannel->stopUseOutputSurface();
{
Mutexed<State>::Locked state(mState);
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index c8e9930..6335f13 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -72,7 +72,7 @@
virtual void getArray(Vector<sp<MediaCodecBuffer>> *) const {}
/**
- * Return number of buffers the client owns.
+ * Return number of buffers owned by the client or the component.
*/
virtual size_t numActiveSlots() const = 0;
@@ -595,8 +595,7 @@
void flush();
/**
- * Return the number of buffers that are sent to the client but not released
- * yet.
+ * Return the number of buffers that are sent to the client or the component.
*/
size_t numActiveSlots() const;
@@ -716,8 +715,7 @@
void grow(size_t newSize, std::function<sp<Codec2Buffer>()> alloc);
/**
- * Return the number of buffers that are sent to the client but not released
- * yet.
+ * Return the number of buffers that are sent to the client or the component.
*/
size_t numActiveSlots() const;
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 876c96d..b9270de 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -533,7 +533,7 @@
* align(mHeight, 64) / plane.rowSampling;
}
- if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
+ if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
// FIXME: this is risky as reading/writing data out of bound results
// in an undefined behavior, but gralloc does assume a
// contiguous mapping
@@ -545,8 +545,7 @@
mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
}
- mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr),
- maxPtr - minPtr + 1);
+ mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
}
}
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ef5800d..332d3ac 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -38,7 +38,7 @@
!strcmp(deviceCodeName, "Tiramisu");
}
-bool isVendorApiOrFirstApiAtLeastT() {
+static bool isP010Allowed() {
// The first SDK the device shipped with.
static const int32_t kProductFirstApiLevel =
base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
@@ -47,6 +47,17 @@
// to signal which VSR requirements they conform to even if the first device SDK was higher.
static const int32_t kBoardFirstApiLevel =
base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+
+ // Some devices that launched prior to Android S may not support P010 correctly, even
+ // though they may advertise it as supported.
+ if (kProductFirstApiLevel != 0 && kProductFirstApiLevel < __ANDROID_API_S__) {
+ return false;
+ }
+
+ if (kBoardFirstApiLevel != 0 && kBoardFirstApiLevel < __ANDROID_API_S__) {
+ return false;
+ }
+
static const int32_t kBoardApiLevel =
base::GetIntProperty<int32_t>("ro.board.api_level", 0);
@@ -67,7 +78,7 @@
// API alone. For now limit P010 to devices that launched with Android T or known to conform
// to Android T VSR (as opposed to simply limiting to a T vendor image).
if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 &&
- !isVendorApiOrFirstApiAtLeastT()) {
+ !isP010Allowed()) {
return false;
}
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index dfdd84d..d7a9764 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -1081,6 +1081,7 @@
emplace("libcodec2_soft_amrwbenc.so");
//emplace("libcodec2_soft_av1dec_aom.so"); // deprecated for the gav1 implementation
emplace("libcodec2_soft_av1dec_gav1.so");
+ emplace("libcodec2_soft_av1enc.so");
emplace("libcodec2_soft_avcdec.so");
emplace("libcodec2_soft_avcenc.so");
emplace("libcodec2_soft_flacdec.so");
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index e67e42f..9960c58 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,6 +432,10 @@
if (fence) {
static constexpr int kFenceWaitTimeMs = 10;
+ if (bufferNeedsReallocation) {
+ mBuffers[slot].clear();
+ }
+
status_t status = fence->wait(kFenceWaitTimeMs);
if (status == -ETIME) {
// fence is not signalled yet.
diff --git a/media/janitors/avic_OWNERS b/media/janitors/avic_OWNERS
new file mode 100644
index 0000000..eca9978
--- /dev/null
+++ b/media/janitors/avic_OWNERS
@@ -0,0 +1,6 @@
+# Bug component: 1344
+# gerrit owner/approvers in the AVIC team
+arifdikici@google.com
+dichenzhang@google.com
+kyslov@google.com
+richardxie@google.com
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 69e3a5e..e0c87f7 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -1,10 +1,21 @@
# Bug component: 1344
# go/android-fwk-media-solutions for info on areas of ownership.
-# Main owners:
+# MediaRouter and native mirroring only:
adadukin@google.com
aquilescanta@google.com
+bishoygendy@google.com
ivanbuper@google.com
-# In case of emergency:
-andrewlewis@google.com #{LAST_RESORT_SUGGESTION}
+# MediaMuxer, MediaRecorder, and seamless transcoding only:
+andrewlewis@google.com
+claincly@google.com
+
+# Everything in go/android-fwk-media-solutions not covered above:
+bachinger@google.com
+christosts@google.com
+ibaker@google.com
+michaelkatz@google.com
+rohks@google.com
+tianyifeng@google.com
+tonihei@google.com
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index add28e0..4b417a7 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -36,6 +36,9 @@
symbol_file: "src/libaaudio.map.txt",
first_version: "26",
unversioned_until: "current",
+ export_header_libs: [
+ "libAAudio_headers",
+ ],
}
cc_library_headers {
diff --git a/media/libaaudio/TEST_MAPPING b/media/libaaudio/TEST_MAPPING
index 9aff137..5d3fb0a 100644
--- a/media/libaaudio/TEST_MAPPING
+++ b/media/libaaudio/TEST_MAPPING
@@ -1,5 +1,5 @@
{
- "postsubmit": [
+ "presubmit": [
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 9ca24aa..abfd8a7 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -277,6 +277,7 @@
AAUDIO_STREAM_STATE_CLOSED,
/**
* The stream is disconnected from audio device.
+ * @deprecated
*/
AAUDIO_STREAM_STATE_DISCONNECTED
};
@@ -886,6 +887,12 @@
* will be respected if both this function and {@link AAudioStreamBuilder_setChannelMask} are
* called.
*
+ * Note that if the channel count is two then it may get mixed to mono when the device only supports
+ * one channel. If the channel count is greater than two but the device's supported channel count is
+ * less than the requested value, the channels higher than the device channel will be dropped. If
+ * higher channels should be mixed or spatialized, use {@link AAudioStreamBuilder_setChannelMask}
+ * instead.
+ *
* Available since API level 26.
*
* @param builder reference provided by AAudio_createStreamBuilder()
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 07a96b7..27f519b 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -315,11 +315,10 @@
aaudio_result_t result = AAUDIO_OK;
ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
- aaudio_stream_state_t currentState = getState();
// Don't release a stream while it is running. Stop it first.
// If DISCONNECTED then we should still try to stop in case the
// error callback is still running.
- if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ if (isActive() || isDisconnected()) {
requestStop_l();
}
@@ -432,11 +431,11 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_stream_state_t originalState = getState();
- if (originalState == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ if (isDisconnected()) {
ALOGD("requestStart() but DISCONNECTED");
return AAUDIO_ERROR_DISCONNECTED;
}
+ aaudio_stream_state_t originalState = getState();
setState(AAUDIO_STREAM_STATE_STARTING);
// Clear any stale timestamps from the previous run.
@@ -456,7 +455,7 @@
ALOGD("%s() error = %d, stream was probably stolen", __func__, result);
// Stealing was added in R. Coerce result to improve backward compatibility.
result = AAUDIO_ERROR_DISCONNECTED;
- setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+ setDisconnected();
}
startTime = AudioClock::getNanoseconds();
@@ -473,7 +472,6 @@
result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
}
if (result != AAUDIO_OK) {
- // TODO(b/214607638): Do we want to roll back to original state or keep as disconnected?
setState(originalState);
}
return result;
@@ -499,8 +497,7 @@
// This must be called under mStreamLock.
aaudio_result_t AudioStreamInternal::stopCallback_l()
{
- if (isDataCallbackSet()
- && (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+ if (isDataCallbackSet() && (isActive() || isDisconnected())) {
mCallbackEnabled.store(false);
aaudio_result_t result = joinThread_l(nullptr); // may temporarily unlock mStreamLock
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
@@ -525,7 +522,7 @@
// and the callback may have stopped the stream.
// Check to make sure the stream still needs to be stopped.
// See also AudioStream::safeStop_l().
- if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+ if (!(isActive() || isDisconnected())) {
ALOGD("%s() returning early, not active or disconnected", __func__);
return AAUDIO_OK;
}
@@ -675,7 +672,7 @@
mAudioEndpoint->eraseDataMemory();
}
result = AAUDIO_ERROR_DISCONNECTED;
- setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+ setDisconnected();
ALOGW("%s - AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared", __func__);
break;
case AAUDIO_SERVICE_EVENT_VOLUME:
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 8a5186a..c9351e0 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -61,10 +61,9 @@
// If the stream is deleted when OPEN or in use then audio resources will leak.
// This would indicate an internal error. So we want to find this ASAP.
LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
- || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
- || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
- "~AudioStream() - still in use, state = %s",
- AudioGlobal_convertStreamStateToText(getState()));
+ || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
+ "~AudioStream() - still in use, state = %s disconnected = %d",
+ AudioGlobal_convertStreamStateToText(getState()), isDisconnected());
}
aaudio_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -158,6 +157,11 @@
std::lock_guard<std::mutex> lock(mStreamLock);
+ if (isDisconnected()) {
+ ALOGW("%s() stream is disconnected", __func__);
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+
switch (getState()) {
// Is this a good time to start?
case AAUDIO_STREAM_STATE_OPEN:
@@ -176,8 +180,13 @@
AudioGlobal_convertStreamStateToText(getState()));
return AAUDIO_ERROR_INVALID_STATE;
- // Don't start when the stream is dead!
case AAUDIO_STREAM_STATE_DISCONNECTED:
+ // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED, trying to
+ // start will finally return ERROR_DISCONNECTED.
+ ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+
+ // Don't start when the stream is dead!
case AAUDIO_STREAM_STATE_CLOSING:
case AAUDIO_STREAM_STATE_CLOSED:
default:
@@ -210,7 +219,11 @@
// Proceed with pausing.
case AAUDIO_STREAM_STATE_STARTING:
case AAUDIO_STREAM_STATE_STARTED:
+ break;
+
case AAUDIO_STREAM_STATE_DISCONNECTED:
+ // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED
+ ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
break;
// Transition from one inactive state to another.
@@ -289,7 +302,10 @@
// Proceed with stopping.
case AAUDIO_STREAM_STATE_STARTING:
case AAUDIO_STREAM_STATE_STARTED:
+ break;
case AAUDIO_STREAM_STATE_DISCONNECTED:
+ // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED
+ ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
break;
// Transition from one inactive state to another.
@@ -369,13 +385,8 @@
if (state == oldState) {
return; // no change
}
- // Track transition to DISCONNECTED state.
- if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
- android::mediametrics::LogItem(mMetricsId)
- .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
- .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(oldState))
- .record();
- }
+ LOG_ALWAYS_FATAL_IF(state == AAUDIO_STREAM_STATE_DISCONNECTED,
+ "Disconnected state must be separated from mState");
// CLOSED is a final state
if (oldState == AAUDIO_STREAM_STATE_CLOSED) {
ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
@@ -385,12 +396,6 @@
&& state != AAUDIO_STREAM_STATE_CLOSED) {
ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
- // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
- } else if (oldState == AAUDIO_STREAM_STATE_DISCONNECTED
- && !(state == AAUDIO_STREAM_STATE_CLOSING
- || state == AAUDIO_STREAM_STATE_CLOSED)) {
- ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
-
} else {
mState.store(state);
// Wake up a wakeForStateChange thread if it exists.
@@ -398,6 +403,21 @@
}
}
+void AudioStream::setDisconnected() {
+ const bool old = isDisconnected();
+ ALOGD("%s setting disconnected, current disconnected: %d, current state: %d",
+ __func__, old, getState());
+ if (old) {
+ return; // no change, the stream is already disconnected
+ }
+ mDisconnected.store(true);
+ // Track transition to DISCONNECTED state.
+ android::mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
+ .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
+ .record();
+}
+
aaudio_result_t AudioStream::waitForStateChange(aaudio_stream_state_t currentState,
aaudio_stream_state_t *nextState,
int64_t timeoutNanoseconds)
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index e36928d..50f6aa0 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -558,6 +558,11 @@
void setState(aaudio_stream_state_t state);
+ bool isDisconnected() const {
+ return mDisconnected.load();
+ }
+ void setDisconnected();
+
void setDeviceId(int32_t deviceId) {
mDeviceId = deviceId;
}
@@ -683,6 +688,8 @@
std::atomic<aaudio_stream_state_t> mState{AAUDIO_STREAM_STATE_UNINITIALIZED};
+ std::atomic_bool mDisconnected{false};
+
// These do not change after open().
int32_t mSamplesPerFrame = AAUDIO_UNSPECIFIED;
aaudio_channel_mask_t mChannelMask = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 04b4325..91fd5bf 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -24,7 +24,6 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
-#include <android/media/audio/common/AudioMMapPolicy.h>
#include <android/media/audio/common/AudioMMapPolicyInfo.h>
#include <android/media/audio/common/AudioMMapPolicyType.h>
#include <media/AudioSystem.h>
@@ -37,10 +36,10 @@
#include "core/AudioStreamBuilder.h"
#include "legacy/AudioStreamRecord.h"
#include "legacy/AudioStreamTrack.h"
+#include "utility/AAudioUtilities.h"
using namespace aaudio;
-using android::media::audio::common::AudioMMapPolicy;
using android::media::audio::common::AudioMMapPolicyInfo;
using android::media::audio::common::AudioMMapPolicyType;
@@ -95,37 +94,6 @@
return result;
}
-namespace {
-
-aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
- switch (aidl) {
- case AudioMMapPolicy::NEVER:
- return AAUDIO_POLICY_NEVER;
- case AudioMMapPolicy::AUTO:
- return AAUDIO_POLICY_AUTO;
- case AudioMMapPolicy::ALWAYS:
- return AAUDIO_POLICY_ALWAYS;
- case AudioMMapPolicy::UNSPECIFIED:
- default:
- return AAUDIO_UNSPECIFIED;
- }
-}
-
-// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
-// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
-aaudio_policy_t getAAudioPolicy(
- const std::vector<AudioMMapPolicyInfo>& policyInfos) {
- if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
- for (size_t i = 1; i < policyInfos.size(); ++i) {
- if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
- return AAUDIO_POLICY_AUTO;
- }
- }
- return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
-}
-
-} // namespace
-
// Try to open using MMAP path if that is allowed.
// Fall back to Legacy path if MMAP not available.
// Exact behavior is controlled by MMapPolicy.
@@ -150,7 +118,7 @@
// If not specified then get from a system property.
if (mmapPolicy == AAUDIO_UNSPECIFIED && android::AudioSystem::getMmapPolicyInfo(
AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
- mmapPolicy = getAAudioPolicy(policyInfos);
+ mmapPolicy = AAudio_getAAudioPolicy(policyInfos);
}
// If still not specified then use the default.
if (mmapPolicy == AAUDIO_UNSPECIFIED) {
@@ -161,7 +129,7 @@
aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
if (android::AudioSystem::getMmapPolicyInfo(
AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
- mmapExclusivePolicy = getAAudioPolicy(policyInfos);
+ mmapExclusivePolicy = AAudio_getAAudioPolicy(policyInfos);
}
if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index f32ef65..8595308 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -85,7 +85,7 @@
// AudioRecord::Buffer
// TODO define our own AudioBuffer and pass it from the subclasses.
size_t written = buffer.size();
- if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ if (isDisconnected()) {
ALOGW("%s() data, stream disconnected", __func__);
// This will kill the stream and prevent it from being restarted.
// That is OK because the stream is disconnected.
@@ -150,7 +150,7 @@
// AudioRecord::Buffer
// TODO define our own AudioBuffer and pass it from the subclasses.
size_t written = buffer.size();
- if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ if (isDisconnected()) {
ALOGW("%s() data, stream disconnected", __func__);
// This will kill the stream and prevent it from being restarted.
// That is OK because the stream is disconnected.
@@ -214,11 +214,11 @@
void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
// There is no need to disconnect if already in these states.
- if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+ if (!isDisconnected()
&& getState() != AAUDIO_STREAM_STATE_CLOSING
&& getState() != AAUDIO_STREAM_STATE_CLOSED
) {
- setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+ setDisconnected();
if (errorCallbackEnabled) {
maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
}
@@ -268,7 +268,7 @@
ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
if (getDeviceId() != AAUDIO_UNSPECIFIED
&& getDeviceId() != deviceId
- && getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+ && !isDisconnected()
) {
// Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
// If we have a data callback and the stream is active, then ask the data callback
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 9a136a7..da152b0 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -403,7 +403,7 @@
return result;
}
- if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ if (isDisconnected()) {
return AAUDIO_ERROR_DISCONNECTED;
}
@@ -446,7 +446,7 @@
// In this context, a DEAD_OBJECT is more likely to be a disconnect notification due to
// AudioRecord invalidation.
if (bytesActuallyRead == DEAD_OBJECT) {
- setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+ setDisconnected();
return AAUDIO_ERROR_DISCONNECTED;
}
return AAudioConvert_androidToAAudioResult(bytesActuallyRead);
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index fb3fcc1..10bd5f7 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -248,7 +248,7 @@
if (getState() != AAUDIO_STREAM_STATE_UNINITIALIZED) {
ALOGE("%s - Open canceled since state = %d", __func__, getState());
- if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED)
+ if (isDisconnected())
{
ALOGE("%s - Opening while state is disconnected", __func__);
safeReleaseClose();
@@ -432,7 +432,7 @@
return result;
}
- if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ if (isDisconnected()) {
return AAUDIO_ERROR_DISCONNECTED;
}
@@ -446,7 +446,7 @@
// in this context, a DEAD_OBJECT is more likely to be a disconnect notification due to
// AudioTrack invalidation
if (bytesWritten == DEAD_OBJECT) {
- setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+ setDisconnected();
return AAUDIO_ERROR_DISCONNECTED;
}
return AAudioConvert_androidToAAudioResult(bytesWritten);
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index a197ced..0afa11b 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -16,24 +16,28 @@
#define LOG_TAG "AAudio"
//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-#include <cutils/properties.h>
+#include <assert.h>
+#include <math.h>
#include <stdint.h>
+
+#include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
+#include <cutils/properties.h>
#include <sys/types.h>
+#include <system/audio.h>
#include <utils/Errors.h>
+#include <utils/Log.h>
#include "aaudio/AAudio.h"
#include "core/AudioGlobal.h"
-#include <aaudio/AAudioTesting.h>
-#include <math.h>
-#include <system/audio.h>
-#include <assert.h>
-
#include "utility/AAudioUtilities.h"
using namespace android;
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+
status_t AAudioConvert_aaudioToAndroidStatus(aaudio_result_t result) {
// This covers the case for AAUDIO_OK and for positive results.
if (result >= 0) {
@@ -638,3 +642,31 @@
}
return result;
}
+
+namespace {
+
+aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
+ switch (aidl) {
+ case AudioMMapPolicy::NEVER:
+ return AAUDIO_POLICY_NEVER;
+ case AudioMMapPolicy::AUTO:
+ return AAUDIO_POLICY_AUTO;
+ case AudioMMapPolicy::ALWAYS:
+ return AAUDIO_POLICY_ALWAYS;
+ case AudioMMapPolicy::UNSPECIFIED:
+ default:
+ return AAUDIO_UNSPECIFIED;
+ }
+}
+
+} // namespace
+
+aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos) {
+ if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
+ for (size_t i = 1; i < policyInfos.size(); ++i) {
+ if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+ return AAUDIO_POLICY_AUTO;
+ }
+ }
+ return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index b59ce1c..ac75306 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -19,14 +19,17 @@
#include <algorithm>
#include <functional>
+#include <vector>
#include <stdint.h>
#include <sys/types.h>
#include <unistd.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
#include <utils/Errors.h>
#include <system/audio.h>
#include "aaudio/AAudio.h"
+#include "aaudio/AAudioTesting.h"
/**
* Convert an AAudio result into the closest matching Android status.
@@ -343,4 +346,9 @@
AAUDIO_CHANNEL_INDEX_MASK_24 = AAUDIO_CHANNEL_BIT_INDEX | (1 << 24) - 1,
};
+// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
+// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+aaudio_policy_t AAudio_getAAudioPolicy(
+ const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos);
+
#endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 4b45909..438be0a 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -214,3 +214,17 @@
srcs: ["test_disconnect_race.cpp"],
shared_libs: ["libaaudio"],
}
+
+cc_test {
+ name: "aaudio_test_mmap_path",
+ defaults: [
+ "libaaudio_tests_defaults",
+ ],
+ srcs: ["test_mmap_path.cpp"],
+ shared_libs: [
+ "libaaudio",
+ "libaaudio_internal",
+ "libaudioclient",
+ "liblog",
+ ],
+}
diff --git a/media/libaaudio/tests/test_mmap_path.cpp b/media/libaaudio/tests/test_mmap_path.cpp
new file mode 100644
index 0000000..c8376f6
--- /dev/null
+++ b/media/libaaudio/tests/test_mmap_path.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_mmap_path"
+
+#include <vector>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+#include <android/log.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <media/AudioSystem.h>
+
+#include <gtest/gtest.h>
+
+#include "utility/AAudioUtilities.h"
+
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
+/**
+ * Open a stream via AAudio API and set the performance mode as LOW_LATENCY. When MMAP is supported,
+ * the stream is supposed to be on MMAP path instead of legacy path. This is guaranteed on pixel
+ * devices, but may not be guaranteed on other vendor devices.
+ * @param direction the direction for the stream
+ */
+static void openStreamAndVerify(aaudio_direction_t direction) {
+ std::vector<AudioMMapPolicyInfo> policyInfos;
+ ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfo(
+ AudioMMapPolicyType::DEFAULT, &policyInfos));
+ if (AAudio_getAAudioPolicy(policyInfos) == AAUDIO_POLICY_NEVER) {
+ // Query the system MMAP policy, if it is NEVER, it indicates there is no MMAP support.
+ // In that case, there is no need to run the test. The reason of adding the query is to
+ // avoid someone accidentally run the test on device that doesn't support MMAP,
+ // such as cuttlefish.
+ ALOGD("Skip test as mmap is not supported");
+ return;
+ }
+
+ AAudioStreamBuilder *aaudioBuilder = nullptr;
+ AAudioStream *aaudioStream = nullptr;
+
+ ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+ AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+ EXPECT_EQ(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAudioStream_getPerformanceMode(aaudioStream));
+ EXPECT_TRUE(AAudioStream_isMMapUsed(aaudioStream));
+
+ AAudioStream_close(aaudioStream);
+ AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+TEST(test_mmap_path, input) {
+ openStreamAndVerify(AAUDIO_DIRECTION_INPUT);
+}
+
+TEST(test_mmap_path, output) {
+ openStreamAndVerify(AAUDIO_DIRECTION_OUTPUT);
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index b6ddf56..6c198d3 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -48,7 +48,7 @@
cc_library {
name: "libaudiopolicy",
srcs: [
- "AudioAttributes.cpp",
+ "VolumeGroupAttributes.cpp",
"AudioPolicy.cpp",
"AudioProductStrategy.cpp",
"AudioVolumeGroup.cpp",
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index c2f7229..6bb0cbe 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -57,6 +57,10 @@
case RULE_EXCLUDE_USERID:
mValue.mUserId = (int) parcel->readInt32();
break;
+ case RULE_MATCH_AUDIO_SESSION_ID:
+ case RULE_EXCLUDE_AUDIO_SESSION_ID:
+ mValue.mAudioSessionId = (audio_session_t) parcel->readInt32();
+ break;
default:
ALOGE("Trying to build AudioMixMatchCriterion from unknown rule %d", mRule);
return BAD_VALUE;
@@ -71,6 +75,10 @@
return NO_ERROR;
}
+bool AudioMixMatchCriterion::isExcludeCriterion() const {
+ return mRule & RULE_EXCLUSION_MASK;
+}
+
//
// AudioMix implementation
//
@@ -91,10 +99,11 @@
if (size > MAX_CRITERIA_PER_MIX) {
size = MAX_CRITERIA_PER_MIX;
}
+ mCriteria.reserve(size);
for (size_t i = 0; i < size; i++) {
AudioMixMatchCriterion criterion;
if (criterion.readFromParcel(parcel) == NO_ERROR) {
- mCriteria.add(criterion);
+ mCriteria.push_back(criterion);
}
}
return NO_ERROR;
@@ -135,18 +144,18 @@
return NO_ERROR;
}
-void AudioMix::setExcludeUid(uid_t uid) const {
+void AudioMix::setExcludeUid(uid_t uid) {
AudioMixMatchCriterion crit;
crit.mRule = RULE_EXCLUDE_UID;
crit.mValue.mUid = uid;
- mCriteria.add(crit);
+ mCriteria.push_back(crit);
}
-void AudioMix::setMatchUid(uid_t uid) const {
+void AudioMix::setMatchUid(uid_t uid) {
AudioMixMatchCriterion crit;
crit.mRule = RULE_MATCH_UID;
crit.mValue.mUid = uid;
- mCriteria.add(crit);
+ mCriteria.push_back(crit);
}
bool AudioMix::hasUidRule(bool match, uid_t uid) const {
@@ -169,18 +178,18 @@
return false;
}
-void AudioMix::setExcludeUserId(int userId) const {
+void AudioMix::setExcludeUserId(int userId) {
AudioMixMatchCriterion crit;
crit.mRule = RULE_EXCLUDE_USERID;
crit.mValue.mUserId = userId;
- mCriteria.add(crit);
+ mCriteria.push_back(crit);
}
-void AudioMix::setMatchUserId(int userId) const {
+void AudioMix::setMatchUserId(int userId) {
AudioMixMatchCriterion crit;
crit.mRule = RULE_MATCH_USERID;
crit.mValue.mUserId = userId;
- mCriteria.add(crit);
+ mCriteria.push_back(crit);
}
bool AudioMix::hasUserIdRule(bool match, int userId) const {
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index ecd423a..381faf6 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -18,7 +18,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <media/AudioProductStrategy.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
#include <media/PolicyAidlConversion.h>
namespace android {
@@ -42,8 +42,8 @@
aidl.name = legacy.getName();
aidl.audioAttributes = VALUE_OR_RETURN(
convertContainer<std::vector<media::AudioAttributesEx>>(
- legacy.getAudioAttributes(),
- legacy2aidl_AudioAttributes_AudioAttributesEx));
+ legacy.getVolumeGroupAttributes(),
+ legacy2aidl_VolumeGroupAttributes_AudioAttributesEx));
aidl.id = VALUE_OR_RETURN(legacy2aidl_product_strategy_t_int32_t(legacy.getId()));
return aidl;
}
@@ -53,9 +53,9 @@
return AudioProductStrategy(
aidl.name,
VALUE_OR_RETURN(
- convertContainer<std::vector<AudioAttributes>>(
+ convertContainer<std::vector<VolumeGroupAttributes>>(
aidl.audioAttributes,
- aidl2legacy_AudioAttributesEx_AudioAttributes)),
+ aidl2legacy_AudioAttributesEx_VolumeGroupAttributes)),
VALUE_OR_RETURN(aidl2legacy_int32_t_product_strategy_t(aidl.id)));
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 05f27b0..af00ab1 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1336,7 +1336,7 @@
return result.value_or(PRODUCT_STRATEGY_NONE);
}
-status_t AudioSystem::getDevicesForAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getDevicesForAttributes(const audio_attributes_t& aa,
AudioDeviceTypeAddrVector* devices,
bool forVolume) {
if (devices == nullptr) {
@@ -1345,8 +1345,8 @@
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+ media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
std::vector<AudioDevice> retAidl;
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, forVolume, &retAidl)));
@@ -2093,7 +2093,7 @@
AudioProductStrategyVector strategies;
listAudioProductStrategies(strategies);
for (const auto& strategy : strategies) {
- auto attrVect = strategy.getAudioAttributes();
+ auto attrVect = strategy.getVolumeGroupAttributes();
auto iter = std::find_if(begin(attrVect), end(attrVect), [&stream](const auto& attributes) {
return attributes.getStreamType() == stream;
});
@@ -2107,7 +2107,7 @@
audio_stream_type_t AudioSystem::attributesToStreamType(const audio_attributes_t& attr) {
product_strategy_t psId;
- status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(AudioAttributes(attr), psId);
+ status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(attr, psId);
if (ret != NO_ERROR) {
ALOGE("no strategy found for attributes %s", toString(attr).c_str());
return AUDIO_STREAM_MUSIC;
@@ -2116,7 +2116,7 @@
listAudioProductStrategies(strategies);
for (const auto& strategy : strategies) {
if (strategy.getId() == psId) {
- auto attrVect = strategy.getAudioAttributes();
+ auto attrVect = strategy.getVolumeGroupAttributes();
auto iter = std::find_if(begin(attrVect), end(attrVect), [&attr](const auto& refAttr) {
return AudioProductStrategy::attributesMatches(
refAttr.getAttributes(), attr);
@@ -2137,14 +2137,14 @@
return AUDIO_STREAM_MUSIC;
}
-status_t AudioSystem::getProductStrategyFromAudioAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getProductStrategyFromAudioAttributes(const audio_attributes_t& aa,
product_strategy_t& productStrategy,
bool fallbackOnDefault) {
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+ media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
int32_t productStrategyAidl;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2167,14 +2167,14 @@
return OK;
}
-status_t AudioSystem::getVolumeGroupFromAudioAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getVolumeGroupFromAudioAttributes(const audio_attributes_t &aa,
volume_group_t& volumeGroup,
bool fallbackOnDefault) {
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+ media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
int32_t volumeGroupAidl;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
aps->getVolumeGroupFromAudioAttributes(aaAidl, fallbackOnDefault, &volumeGroupAidl)));
diff --git a/media/libaudioclient/AudioVolumeGroup.cpp b/media/libaudioclient/AudioVolumeGroup.cpp
index ab95246..978599e 100644
--- a/media/libaudioclient/AudioVolumeGroup.cpp
+++ b/media/libaudioclient/AudioVolumeGroup.cpp
@@ -23,7 +23,6 @@
#include <media/AidlConversion.h>
#include <media/AudioVolumeGroup.h>
-#include <media/AudioAttributes.h>
#include <media/PolicyAidlConversion.h>
namespace android {
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 520f09c..4423eb6 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -158,6 +158,11 @@
convertIntegral<int>(UNION_GET(aidl, userId).value()));
*rule |= RULE_MATCH_USERID;
return legacy;
+ case media::AudioMixMatchCriterionValue::audioSessionId:
+ legacy.mAudioSessionId = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_session_t(UNION_GET(aidl, audioSessionId).value()));
+ *rule |= RULE_MATCH_AUDIO_SESSION_ID;
+ return legacy;
}
return unexpected(BAD_VALUE);
}
@@ -185,7 +190,10 @@
case RULE_MATCH_USERID:
UNION_SET(aidl, userId, VALUE_OR_RETURN(convertReinterpret<uint32_t>(legacy.mUserId)));
break;
-
+ case RULE_MATCH_AUDIO_SESSION_ID:
+ UNION_SET(aidl, audioSessionId,
+ VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.mAudioSessionId)));
+ break;
default:
return unexpected(BAD_VALUE);
}
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 1aecfc6..d8151f5 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -23,9 +23,7 @@
},
{
"name": "audiosystem_tests"
- }
- ],
- "postsubmit": [
+ },
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/media/libaudioclient/AudioAttributes.cpp b/media/libaudioclient/VolumeGroupAttributes.cpp
similarity index 73%
rename from media/libaudioclient/AudioAttributes.cpp
rename to media/libaudioclient/VolumeGroupAttributes.cpp
index 260c06c..2de4667 100644
--- a/media/libaudioclient/AudioAttributes.cpp
+++ b/media/libaudioclient/VolumeGroupAttributes.cpp
@@ -14,33 +14,33 @@
* limitations under the License.
*/
-#define LOG_TAG "AudioAttributes"
+#define LOG_TAG "VolumeGroupAttributes"
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <binder/Parcel.h>
#include <media/AidlConversion.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
#include <media/PolicyAidlConversion.h>
namespace android {
-status_t AudioAttributes::readFromParcel(const Parcel* parcel) {
+status_t VolumeGroupAttributes::readFromParcel(const Parcel* parcel) {
media::AudioAttributesEx aidl;
RETURN_STATUS_IF_ERROR(aidl.readFromParcel(parcel));
- *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_AudioAttributes(aidl));
+ *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(aidl));
return OK;
}
-status_t AudioAttributes::writeToParcel(Parcel* parcel) const {
+status_t VolumeGroupAttributes::writeToParcel(Parcel* parcel) const {
media::AudioAttributesEx aidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_AudioAttributes_AudioAttributesEx(*this));
+ legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(*this));
return aidl.writeToParcel(parcel);
}
ConversionResult<media::AudioAttributesEx>
-legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy) {
+legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy) {
media::AudioAttributesEx aidl;
aidl.attributes = VALUE_OR_RETURN(
legacy2aidl_audio_attributes_t_AudioAttributesInternal(legacy.getAttributes()));
@@ -50,9 +50,9 @@
return aidl;
}
-ConversionResult<AudioAttributes>
-aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl) {
- return AudioAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
+ConversionResult<VolumeGroupAttributes>
+aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(const media::AudioAttributesEx& aidl) {
+ return VolumeGroupAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
aidl.streamType)),
VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(
diff --git a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
index 921a93a..0f373a2 100644
--- a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
@@ -28,4 +28,6 @@
/** Interpreted as uid_t. */
int uid;
int userId;
+ /** Interpreted as audio_session_t. */
+ int audioSessionId;
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 8ac89a8..24b59bf 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -137,7 +137,7 @@
int /* product_strategy_t */ getStrategyForStream(AudioStreamType stream);
- AudioDevice[] getDevicesForAttributes(in AudioAttributesEx attr, boolean forVolume);
+ AudioDevice[] getDevicesForAttributes(in AudioAttributesInternal attr, boolean forVolume);
int /* audio_io_handle_t */ getOutputForEffect(in EffectDescriptor desc);
@@ -313,11 +313,11 @@
boolean isUltrasoundSupported();
AudioProductStrategy[] listAudioProductStrategies();
- int /* product_strategy_t */ getProductStrategyFromAudioAttributes(in AudioAttributesEx aa,
- boolean fallbackOnDefault);
+ int /* product_strategy_t */ getProductStrategyFromAudioAttributes(
+ in AudioAttributesInternal aa, boolean fallbackOnDefault);
AudioVolumeGroup[] listAudioVolumeGroups();
- int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesEx aa,
+ int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesInternal aa,
boolean fallbackOnDefault);
void setRttEnabled(boolean enabled);
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index 72c050b..291312e 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -648,7 +648,7 @@
{
public:
- EffectClient(AudioEffect *effect) : mEffect(effect){}
+ explicit EffectClient(const sp<AudioEffect>& effect) : mEffect(effect){}
// IEffectClient
binder::Status controlStatusChanged(bool controlGranted) override {
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 08b3da1..61f2069 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -34,11 +34,13 @@
#define RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET (0x1 << 1)
#define RULE_MATCH_UID (0x1 << 2)
#define RULE_MATCH_USERID (0x1 << 3)
+#define RULE_MATCH_AUDIO_SESSION_ID (0x1 << 4)
#define RULE_EXCLUDE_ATTRIBUTE_USAGE (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_USAGE)
#define RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET \
(RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
#define RULE_EXCLUDE_UID (RULE_EXCLUSION_MASK|RULE_MATCH_UID)
#define RULE_EXCLUDE_USERID (RULE_EXCLUSION_MASK|RULE_MATCH_USERID)
+#define RULE_EXCLUDE_AUDIO_SESSION_ID (RULE_EXCLUSION_MASK|RULE_MATCH_AUDIO_SESSION_ID)
#define MIX_TYPE_INVALID (-1)
#define MIX_TYPE_PLAYERS 0
@@ -72,11 +74,13 @@
status_t readFromParcel(Parcel *parcel);
status_t writeToParcel(Parcel *parcel) const;
+ bool isExcludeCriterion() const;
union {
audio_usage_t mUsage;
audio_source_t mSource;
uid_t mUid;
int mUserId;
+ audio_session_t mAudioSessionId;
} mValue;
uint32_t mRule;
};
@@ -88,23 +92,24 @@
static const uint32_t kCbFlagNotifyActivity = 0x1;
AudioMix() {}
- AudioMix(Vector<AudioMixMatchCriterion> criteria, uint32_t mixType, audio_config_t format,
- uint32_t routeFlags, String8 registrationId, uint32_t flags) :
+ AudioMix(const std::vector<AudioMixMatchCriterion> &criteria, uint32_t mixType,
+ audio_config_t format, uint32_t routeFlags,const String8 ®istrationId,
+ uint32_t flags) :
mCriteria(criteria), mMixType(mixType), mFormat(format),
mRouteFlags(routeFlags), mDeviceAddress(registrationId), mCbFlags(flags){}
status_t readFromParcel(Parcel *parcel);
status_t writeToParcel(Parcel *parcel) const;
- void setExcludeUid(uid_t uid) const;
- void setMatchUid(uid_t uid) const;
+ void setExcludeUid(uid_t uid);
+ void setMatchUid(uid_t uid);
/** returns true if this mix has a rule to match or exclude the given uid */
bool hasUidRule(bool match, uid_t uid) const;
/** returns true if this mix has a rule for uid match (any uid) */
bool hasMatchUidRule() const;
- void setExcludeUserId(int userId) const;
- void setMatchUserId(int userId) const;
+ void setExcludeUserId(int userId);
+ void setMatchUserId(int userId);
/** returns true if this mix has a rule to match or exclude the given userId */
bool hasUserIdRule(bool match, int userId) const;
/** returns true if this mix has a rule for userId match (any userId) */
@@ -112,7 +117,7 @@
/** returns true if this mix can be used for uid-device affinity routing */
bool isDeviceAffinityCompatible() const;
- mutable Vector<AudioMixMatchCriterion> mCriteria;
+ std::vector<AudioMixMatchCriterion> mCriteria;
uint32_t mMixType;
audio_config_t mFormat;
uint32_t mRouteFlags;
@@ -137,6 +142,11 @@
== MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER;
}
+static inline bool is_mix_loopback(uint32_t routeFlags) {
+ return (routeFlags & MIX_ROUTE_FLAG_LOOP_BACK)
+ == MIX_ROUTE_FLAG_LOOP_BACK;
+}
+
}; // namespace android
#endif // ANDROID_AUDIO_POLICY_H
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index b55b506..7bcb5aa 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -20,7 +20,7 @@
#include <android/media/AudioProductStrategy.h>
#include <media/AidlConversionUtil.h>
#include <media/AudioCommonTypes.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
#include <system/audio.h>
#include <system/audio_policy.h>
#include <binder/Parcelable.h>
@@ -31,12 +31,15 @@
{
public:
AudioProductStrategy() {}
- AudioProductStrategy(const std::string &name, const std::vector<AudioAttributes> &attributes,
+ AudioProductStrategy(const std::string &name,
+ const std::vector<VolumeGroupAttributes> &attributes,
product_strategy_t id) :
- mName(name), mAudioAttributes(attributes), mId(id) {}
+ mName(name), mVolumeGroupAttributes(attributes), mId(id) {}
const std::string &getName() const { return mName; }
- std::vector<AudioAttributes> getAudioAttributes() const { return mAudioAttributes; }
+ std::vector<VolumeGroupAttributes> getVolumeGroupAttributes() const {
+ return mVolumeGroupAttributes;
+ }
product_strategy_t getId() const { return mId; }
status_t readFromParcel(const Parcel *parcel) override;
@@ -58,7 +61,7 @@
const audio_attributes_t clientAttritubes);
private:
std::string mName;
- std::vector<AudioAttributes> mAudioAttributes;
+ std::vector<VolumeGroupAttributes> mVolumeGroupAttributes;
product_strategy_t mId;
};
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 1c414ec..6e6b9c8 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -370,7 +370,7 @@
static status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
static product_strategy_t getStrategyForStream(audio_stream_type_t stream);
- static status_t getDevicesForAttributes(const AudioAttributes &aa,
+ static status_t getDevicesForAttributes(const audio_attributes_t &aa,
AudioDeviceTypeAddrVector *devices,
bool forVolume);
@@ -494,7 +494,7 @@
static status_t listAudioProductStrategies(AudioProductStrategyVector &strategies);
static status_t getProductStrategyFromAudioAttributes(
- const AudioAttributes &aa, product_strategy_t &productStrategy,
+ const audio_attributes_t &aa, product_strategy_t &productStrategy,
bool fallbackOnDefault = true);
static audio_attributes_t streamTypeToAttributes(audio_stream_type_t stream);
@@ -503,7 +503,8 @@
static status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups);
static status_t getVolumeGroupFromAudioAttributes(
- const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault = true);
+ const audio_attributes_t &aa, volume_group_t &volumeGroup,
+ bool fallbackOnDefault = true);
static status_t setRttEnabled(bool enabled);
diff --git a/media/libaudioclient/include/media/AudioAttributes.h b/media/libaudioclient/include/media/VolumeGroupAttributes.h
similarity index 74%
rename from media/libaudioclient/include/media/AudioAttributes.h
rename to media/libaudioclient/include/media/VolumeGroupAttributes.h
index 24bd179..0859995 100644
--- a/media/libaudioclient/include/media/AudioAttributes.h
+++ b/media/libaudioclient/include/media/VolumeGroupAttributes.h
@@ -26,15 +26,20 @@
namespace android {
-class AudioAttributes : public Parcelable
+class VolumeGroupAttributes : public Parcelable
{
public:
- AudioAttributes() = default;
- AudioAttributes(const audio_attributes_t &attributes) : mAttributes(attributes) {} // NOLINT
- AudioAttributes(volume_group_t groupId,
+ VolumeGroupAttributes() = default;
+ VolumeGroupAttributes(const audio_attributes_t &attributes)
+ : mAttributes(attributes) {} // NOLINT
+ VolumeGroupAttributes(volume_group_t groupId,
audio_stream_type_t stream,
const audio_attributes_t &attributes) :
- mAttributes(attributes), mStreamType(stream), mGroupId(groupId) {}
+ mAttributes(attributes), mStreamType(stream), mGroupId(groupId) {
+ // TODO: align native & JAVA source initializer.
+ // As far as this class concerns attributes for volume group, it applies only to playback.
+ mAttributes.source = AUDIO_SOURCE_INVALID;
+ }
audio_attributes_t getAttributes() const { return mAttributes; }
@@ -61,8 +66,8 @@
// AIDL conversion routines.
ConversionResult<media::AudioAttributesEx>
-legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy);
-ConversionResult<AudioAttributes>
-aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl);
+legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy);
+ConversionResult<VolumeGroupAttributes>
+aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(const media::AudioAttributesEx& aidl);
} // namespace android
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 44f0f50..850eb34 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -200,7 +200,7 @@
status_t AudioPlayback::waitForConsumption(bool testSeek) {
if (PLAY_STARTED != mState) return INVALID_OPERATION;
// in static buffer mode, lets not play clips with duration > 30 sec
- int retry = 30;
+ int retry = 300;
// Total number of frames in the input file.
size_t totalFrameCount = mMemCapacity / mTrack->frameSize();
while (!mStopPlaying && retry > 0) {
@@ -227,7 +227,7 @@
if (bufferPosition != setPosition) return BAD_VALUE;
mTrack->start();
}
- std::this_thread::sleep_for(std::chrono::milliseconds(300));
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
retry--;
}
if (!mStopPlaying) return TIMED_OUT;
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
index 93baefd6..d1e3d16 100644
--- a/media/libaudioclient/tests/audioclient_serialization_tests.cpp
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -66,16 +66,16 @@
decltype(audio_stream_type_from_string)>(xsdc_enum_range<xsd::AudioStreamType>{},
audio_stream_type_from_string);
-static const std::vector<uint32_t> kMixMatchRules = {
- RULE_MATCH_ATTRIBUTE_USAGE,
- RULE_EXCLUDE_ATTRIBUTE_USAGE,
- RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET,
- RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET,
- RULE_MATCH_UID,
- RULE_EXCLUDE_UID,
- RULE_MATCH_USERID,
- RULE_EXCLUDE_USERID,
-};
+static const std::vector<uint32_t> kMixMatchRules = {RULE_MATCH_ATTRIBUTE_USAGE,
+ RULE_EXCLUDE_ATTRIBUTE_USAGE,
+ RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET,
+ RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET,
+ RULE_MATCH_UID,
+ RULE_EXCLUDE_UID,
+ RULE_MATCH_USERID,
+ RULE_EXCLUDE_USERID,
+ RULE_MATCH_AUDIO_SESSION_ID,
+ RULE_EXCLUDE_AUDIO_SESSION_ID};
// Generates a random string.
std::string CreateRandomString(size_t n) {
@@ -119,16 +119,17 @@
TEST_F(SerializationTest, AudioProductStrategyBinderization) {
for (int j = 0; j < 512; j++) {
const std::string name{"Test APSBinderization for seed::" + std::to_string(mSeed)};
- std::vector<AudioAttributes> audioattributesvector;
+ std::vector<VolumeGroupAttributes> volumeGroupAttrVector;
for (auto i = 0; i < 16; i++) {
audio_attributes_t attributes;
fillAudioAttributes(attributes);
- AudioAttributes audioattributes{static_cast<volume_group_t>(rand()),
- kStreamtypes[rand() % kStreamtypes.size()], attributes};
- audioattributesvector.push_back(audioattributes);
+ VolumeGroupAttributes volumeGroupAttr{static_cast<volume_group_t>(rand()),
+ kStreamtypes[rand() % kStreamtypes.size()],
+ attributes};
+ volumeGroupAttrVector.push_back(volumeGroupAttr);
}
product_strategy_t psId = static_cast<product_strategy_t>(rand());
- AudioProductStrategy aps{name, audioattributesvector, psId};
+ AudioProductStrategy aps{name, volumeGroupAttrVector, psId};
Parcel p;
EXPECT_EQ(NO_ERROR, aps.writeToParcel(&p)) << name;
@@ -138,12 +139,12 @@
EXPECT_EQ(NO_ERROR, apsCopy.readFromParcel(&p)) << name;
EXPECT_EQ(apsCopy.getName(), name) << name;
EXPECT_EQ(apsCopy.getId(), psId) << name;
- auto avec = apsCopy.getAudioAttributes();
- EXPECT_EQ(avec.size(), audioattributesvector.size()) << name;
- for (int i = 0; i < audioattributesvector.size(); i++) {
- EXPECT_EQ(avec[i].getGroupId(), audioattributesvector[i].getGroupId()) << name;
- EXPECT_EQ(avec[i].getStreamType(), audioattributesvector[i].getStreamType()) << name;
- EXPECT_TRUE(avec[i].getAttributes() == audioattributesvector[i].getAttributes())
+ auto avec = apsCopy.getVolumeGroupAttributes();
+ EXPECT_EQ(avec.size(), volumeGroupAttrVector.size()) << name;
+ for (int i = 0; i < volumeGroupAttrVector.size(); i++) {
+ EXPECT_EQ(avec[i].getGroupId(), volumeGroupAttrVector[i].getGroupId()) << name;
+ EXPECT_EQ(avec[i].getStreamType(), volumeGroupAttrVector[i].getStreamType()) << name;
+ EXPECT_TRUE(avec[i].getAttributes() == volumeGroupAttrVector[i].getAttributes())
<< name;
}
}
@@ -189,12 +190,13 @@
TEST_F(SerializationTest, AudioMixBinderization) {
for (int j = 0; j < 512; j++) {
const std::string msg{"Test AMBinderization for seed::" + std::to_string(mSeed)};
- Vector<AudioMixMatchCriterion> criteria;
+ std::vector<AudioMixMatchCriterion> criteria;
+ criteria.reserve(16);
for (int i = 0; i < 16; i++) {
AudioMixMatchCriterion ammc{kUsages[rand() % kUsages.size()],
kInputSources[rand() % kInputSources.size()],
kMixMatchRules[rand() % kMixMatchRules.size()]};
- criteria.add(ammc);
+ criteria.push_back(ammc);
}
audio_config_t config{};
config.sample_rate = 48000;
@@ -292,17 +294,17 @@
audio_stream_type_t stream = mAudioStream;
audio_attributes_t attributes;
fillAudioAttributes(attributes);
- AudioAttributes audioattributes{groupId, stream, attributes};
+ VolumeGroupAttributes volumeGroupAttr{groupId, stream, attributes};
Parcel p;
- EXPECT_EQ(NO_ERROR, audioattributes.writeToParcel(&p)) << msg;
+ EXPECT_EQ(NO_ERROR, volumeGroupAttr.writeToParcel(&p)) << msg;
- AudioAttributes audioattributesCopy;
+ VolumeGroupAttributes volumeGroupAttrCopy;
p.setDataPosition(0);
- EXPECT_EQ(NO_ERROR, audioattributesCopy.readFromParcel(&p)) << msg;
- EXPECT_EQ(audioattributesCopy.getGroupId(), audioattributes.getGroupId()) << msg;
- EXPECT_EQ(audioattributesCopy.getStreamType(), audioattributes.getStreamType()) << msg;
- EXPECT_TRUE(audioattributesCopy.getAttributes() == attributes) << msg;
+ EXPECT_EQ(NO_ERROR, volumeGroupAttrCopy.readFromParcel(&p)) << msg;
+ EXPECT_EQ(volumeGroupAttrCopy.getGroupId(), volumeGroupAttr.getGroupId()) << msg;
+ EXPECT_EQ(volumeGroupAttrCopy.getStreamType(), volumeGroupAttr.getStreamType()) << msg;
+ EXPECT_TRUE(volumeGroupAttrCopy.getAttributes() == attributes) << msg;
}
// audioStream
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 445633b..4bd81c8 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -26,31 +26,18 @@
// UNIT TEST
TEST(AudioTrackTest, TestPerformanceMode) {
- std::vector<std::string> attachedDevices;
- std::vector<MixPort> mixPorts;
- std::vector<Route> routes;
- EXPECT_EQ(OK, parse_audio_policy_configuration_xml(attachedDevices, mixPorts, routes));
- std::string output_flags_string[] = {"AUDIO_OUTPUT_FLAG_FAST", "AUDIO_OUTPUT_FLAG_DEEP_BUFFER"};
+ std::vector<struct audio_port_v7> ports;
+ ASSERT_EQ(OK, listAudioPorts(ports));
audio_output_flags_t output_flags[] = {AUDIO_OUTPUT_FLAG_FAST, AUDIO_OUTPUT_FLAG_DEEP_BUFFER};
audio_flags_mask_t flags[] = {AUDIO_FLAG_LOW_LATENCY, AUDIO_FLAG_DEEP_BUFFER};
bool hasFlag = false;
for (int i = 0; i < sizeof(flags) / sizeof(flags[0]); i++) {
hasFlag = false;
- for (int j = 0; j < mixPorts.size() && !hasFlag; j++) {
- MixPort port = mixPorts[j];
- if (port.role == "source" && port.flags.find(output_flags_string[i]) != -1) {
- for (int k = 0; k < routes.size() && !hasFlag; k++) {
- if (routes[k].sources.find(port.name) != -1 &&
- std::find(attachedDevices.begin(), attachedDevices.end(), routes[k].sink) !=
- attachedDevices.end()) {
- hasFlag = true;
- std::cerr << "found port with flag " << output_flags_string[i] << "@ "
- << " port :: name : " << port.name << " role : " << port.role
- << " port :: flags : " << port.flags
- << " connected via route name : " << routes[k].name
- << " route sources : " << routes[k].sources
- << " route sink : " << routes[k].sink << std::endl;
- }
+ for (const auto& port : ports) {
+ if (port.role == AUDIO_PORT_ROLE_SOURCE && port.type == AUDIO_PORT_TYPE_MIX) {
+ if ((port.active_config.flags.output & output_flags[i]) != 0) {
+ hasFlag = true;
+ break;
}
}
}
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index aed847c..3dd2c95 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -332,7 +332,7 @@
bool isPublicStrategy(const AudioProductStrategy& strategy) {
bool result = true;
- for (auto& attribute : strategy.getAudioAttributes()) {
+ for (auto& attribute : strategy.getVolumeGroupAttributes()) {
if (attribute.getAttributes() == AUDIO_ATTRIBUTES_INITIALIZER &&
(uint32_t(attribute.getStreamType()) >= AUDIO_STREAM_PUBLIC_CNT)) {
result = false;
@@ -371,7 +371,7 @@
for (const auto& strategy : strategies) {
if (!isPublicStrategy(strategy)) continue;
- for (const auto& att : strategy.getAudioAttributes()) {
+ for (const auto& att : strategy.getVolumeGroupAttributes()) {
if (strategy.attributesMatches(att.getAttributes(), attributes)) {
hasStrategyForMedia = true;
mediaStrategy = strategy;
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index dbae9a0..a4e271e 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -2,9 +2,7 @@
"presubmit": [
{
"name": "audiofoundation_parcelable_test"
- }
- ],
- "postsubmit": [
+ },
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index 1f0c768..dc2899a 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -53,7 +53,7 @@
// AudioPortConfig
virtual sp<AudioPort> getAudioPort() const {
- return static_cast<AudioPort*>(const_cast<DeviceDescriptorBase*>(this));
+ return sp<AudioPort>::fromExisting(const_cast<DeviceDescriptorBase*>(this));
}
virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
const struct audio_port_config *srcConfig = NULL) const;
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
index 9aff137..5d3fb0a 100644
--- a/media/libaudiohal/TEST_MAPPING
+++ b/media/libaudiohal/TEST_MAPPING
@@ -1,5 +1,5 @@
{
- "postsubmit": [
+ "presubmit": [
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 8743c04..3956a6c 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -17,11 +17,16 @@
#define LOG_TAG "EffectHalHidl"
//#define LOG_NDEBUG 0
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android-base/stringprintf.h>
#include <common/all-versions/VersionUtils.h>
#include <cutils/native_handle.h>
+#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <media/EffectsFactoryApi.h>
+#include <mediautils/SchedulingPolicyService.h>
#include <mediautils/TimeCheck.h>
+#include <system/audio_effects/effect_spatializer.h>
#include <utils/Log.h>
#include <util/EffectUtils.h>
@@ -50,6 +55,18 @@
effect_descriptor_t halDescriptor{};
if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+ const bool isSpatializer =
+ memcmp(&halDescriptor.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+ if (isSpatializer) {
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost = property_get_int32("audio.spatializer.priority", 1);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ ALOGD("%s: audio.spatializer.priority %d on effect %lld",
+ __func__, priorityBoost, (long long)effectId);
+ mHalThreadPriority = priorityBoost;
+ }
+ }
}
}
@@ -127,6 +144,8 @@
ALOGE_IF(!mEfGroup, "Event flag creation for effects failed");
return NO_INIT;
}
+
+ (void)checkHalThreadPriority();
mStatusMQ = std::move(tempStatusMQ);
return OK;
}
@@ -317,5 +336,67 @@
return result;
}
+status_t EffectHalHidl::getHalPid(pid_t *pid) const {
+ using ::android::hidl::base::V1_0::DebugInfo;
+ using ::android::hidl::manager::V1_0::IServiceManager;
+ DebugInfo debugInfo;
+ const auto ret = mEffect->getDebugInfo([&] (const auto &info) {
+ debugInfo = info;
+ });
+ if (!ret.isOk()) {
+ ALOGW("%s: cannot get effect debug info", __func__);
+ return INVALID_OPERATION;
+ }
+ if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+ *pid = debugInfo.pid;
+ return NO_ERROR;
+ }
+ ALOGW("%s: effect debug info does not contain pid", __func__);
+ return NAME_NOT_FOUND;
+}
+
+status_t EffectHalHidl::getHalWorkerTid(pid_t *tid) {
+ int32_t reply = -1;
+ uint32_t replySize = sizeof(reply);
+ const status_t status =
+ command('gtid', 0 /* cmdSize */, nullptr /* pCmdData */, &replySize, &reply);
+ if (status == OK) {
+ *tid = (pid_t)reply;
+ } else {
+ ALOGW("%s: failed with status:%d", __func__, status);
+ }
+ return status;
+}
+
+bool EffectHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
+ if (mHalThreadPriority == kRTPriorityDisabled) {
+ return true;
+ }
+ const int err = requestPriority(
+ threadPid, threadId,
+ mHalThreadPriority, false /*isForApp*/, true /*asynchronous*/);
+ ALOGW_IF(err, "%s: failed to set RT priority %d for pid %d tid %d; error %d",
+ __func__, mHalThreadPriority, threadPid, threadId, err);
+ // Audio will still work, but may be more susceptible to glitches.
+ return err == 0;
+}
+
+status_t EffectHalHidl::checkHalThreadPriority() {
+ if (mHalThreadPriority == kRTPriorityDisabled) return OK;
+ if (mHalThreadPriority < kRTPriorityMin
+ || mHalThreadPriority > kRTPriorityMax) return BAD_VALUE;
+
+ pid_t halPid, halWorkerTid;
+ const status_t status = getHalPid(&halPid) ?: getHalWorkerTid(&halWorkerTid);
+ const bool success = status == OK && requestHalThreadPriority(halPid, halWorkerTid);
+ ALOGD("%s: effectId %lld RT priority(%d) request %s%s",
+ __func__, (long long)mEffectId, mHalThreadPriority,
+ success ? "succeeded" : "failed",
+ status == OK
+ ? base::StringPrintf(" for pid:%d tid:%d", halPid, halWorkerTid).c_str()
+ : " (pid / tid cannot be read)");
+ return success ? OK : status != OK ? status : INVALID_OPERATION /* request failed */;
+}
+
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index e139768..94dcd7e 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -78,6 +78,11 @@
std::unique_ptr<StatusMQ> mStatusMQ;
EventFlag* mEfGroup;
bool mIsInput = false;
+ static constexpr int32_t kRTPriorityMin = 1;
+ static constexpr int32_t kRTPriorityMax = 3;
+ static constexpr int kRTPriorityDisabled = 0;
+ // Typical RealTime mHalThreadPriority ranges from 1 (low) to 3 (high).
+ int mHalThreadPriority = kRTPriorityDisabled;
// Can not be constructed directly by clients.
EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
@@ -93,6 +98,10 @@
uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
uint32_t *replySize, void *pReplyData);
status_t setProcessBuffers();
+ status_t getHalPid(pid_t *pid) const;
+ status_t getHalWorkerTid(pid_t *tid);
+ bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
+ status_t checkHalThreadPriority();
};
} // namespace effect
diff --git a/media/libaudioprocessing/TEST_MAPPING b/media/libaudioprocessing/TEST_MAPPING
index 9aff137..5d3fb0a 100644
--- a/media/libaudioprocessing/TEST_MAPPING
+++ b/media/libaudioprocessing/TEST_MAPPING
@@ -1,5 +1,5 @@
{
- "postsubmit": [
+ "presubmit": [
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 03ce329..ba511fe 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -46,10 +46,9 @@
shared_libs: [
"libaudioutils",
"libbase",
- "libbinder",
"liblog",
"libutils",
- "libvibrator",
+ "libvibratorutils",
],
relative_install_path: "soundfx",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
new file mode 100644
index 0000000..2defa4e
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -0,0 +1,326 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BundleContext"
+#include <Utils.h>
+
+#include "BundleContext.h"
+#include "BundleTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+RetCode BundleContext::init() {
+ // init with pre-defined preset NORMAL
+ for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ mBandGaindB[i] = lvm::kSoftPresets[0 /* normal */][i];
+ }
+
+ // allocate lvm instance
+ LVM_ReturnStatus_en status;
+ LVM_InstParams_t params = {.BufferMode = LVM_UNMANAGED_BUFFERS,
+ .MaxBlockSize = lvm::MAX_CALL_SIZE,
+ .EQNB_NumBands = lvm::MAX_NUM_BANDS,
+ .PSA_Included = LVM_PSA_ON};
+ status = LVM_GetInstanceHandle(&mInstance, ¶ms);
+ GOTO_IF_LVM_ERROR(status, deinit, "LVM_GetInstanceHandleFailed");
+
+ // set control
+ LVM_ControlParams_t controlParams;
+ initControlParameter(controlParams);
+ status = LVM_SetControlParameters(mInstance, &controlParams);
+ GOTO_IF_LVM_ERROR(status, deinit, "LVM_SetControlParametersFailed");
+
+ /* Set the headroom parameters */
+ LVM_HeadroomParams_t headroomParams;
+ initHeadroomParameter(headroomParams);
+ status = LVM_SetHeadroomParams(mInstance, &headroomParams);
+ GOTO_IF_LVM_ERROR(status, deinit, "LVM_SetHeadroomParamsFailed");
+
+ return RetCode::SUCCESS;
+
+deinit:
+ deInit();
+ return RetCode::ERROR_EFFECT_LIB_ERROR;
+}
+
+void BundleContext::deInit() {
+ if (mInstance) {
+ LVM_DelInstanceHandle(&mInstance);
+ mInstance = nullptr;
+ }
+}
+
+RetCode BundleContext::enable() {
+ LVM_ControlParams_t params;
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ if (mType == lvm::BundleEffectType::EQUALIZER) {
+ LOG(DEBUG) << __func__ << " enable bundle EQ";
+ params.EQNB_OperatingMode = LVM_EQNB_ON;
+ }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ mEnabled = true;
+ // LvmEffect_limitLevel(pContext);
+ return RetCode::SUCCESS;
+}
+
+RetCode BundleContext::disable() {
+ LVM_ControlParams_t params;
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ if (mType == lvm::BundleEffectType::EQUALIZER) {
+ LOG(DEBUG) << __func__ << " disable bundle EQ";
+ params.EQNB_OperatingMode = LVM_EQNB_OFF;
+ }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ mEnabled = false;
+ // LvmEffect_limitLevel(pContext);
+ return RetCode::SUCCESS;
+}
+
+LVM_INT16 BundleContext::LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) const {
+ LVM_INT16 db_fix;
+ LVM_INT16 Shift;
+ LVM_INT16 SmallRemainder;
+ LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix;
+
+ /* Count leading bits, 1 cycle in assembly*/
+ for (Shift = 0; Shift < 32; Shift++) {
+ if ((Remainder & 0x80000000U) != 0) {
+ break;
+ }
+ Remainder = Remainder << 1;
+ }
+
+ /*
+ * Based on the approximation equation (for Q11.4 format):
+ *
+ * dB = -96 * Shift + 16 * (8 * Remainder - 2 * Remainder^2)
+ */
+ db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/
+ SmallRemainder = (LVM_INT16)((Remainder & 0x7fffffff) >> 24);
+ db_fix = (LVM_INT16)(db_fix + SmallRemainder);
+ SmallRemainder = (LVM_INT16)(SmallRemainder * SmallRemainder);
+ db_fix = (LVM_INT16)(db_fix - (LVM_INT16)((LVM_UINT16)SmallRemainder >> 9));
+
+ /* Correct for small offset */
+ db_fix = (LVM_INT16)(db_fix - 5);
+
+ return db_fix;
+}
+
+// TODO: replace with more generic approach, like: audio_utils_power_from_amplitude
+int16_t BundleContext::VolToDb(uint32_t vol) const {
+ int16_t dB;
+
+ dB = LVC_ToDB_s32Tos16(vol << 7);
+ dB = (dB + 8) >> 4;
+ dB = (dB < -96) ? -96 : dB;
+
+ return dB;
+}
+
+RetCode BundleContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
+ LVM_ControlParams_t params;
+ LVM_ReturnStatus_en status = LVM_SUCCESS;
+
+ // Convert volume to dB
+ int leftdB = VolToDb(volume.left);
+ int rightdB = VolToDb(volume.right);
+ int maxdB = std::max(leftdB, rightdB);
+ int pandB = rightdB - leftdB;
+ // TODO: add volume effect implementation here:
+ // android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB * 100));
+ LOG(DEBUG) << __func__ << " pandB: " << pandB << " maxdB " << maxdB;
+
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "");
+
+ params.VC_Balance = pandB;
+
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "");
+
+ mVolumeStereo = volume;
+ return RetCode::SUCCESS;
+}
+
+RetCode BundleContext::setEqualizerPreset(const int presetIdx) {
+ if (presetIdx < 0 || presetIdx >= lvm::MAX_NUM_PRESETS) {
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ std::vector<Equalizer::BandLevel> bandLevels;
+ bandLevels.reserve(lvm::MAX_NUM_BANDS);
+ for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ bandLevels.emplace_back(Equalizer::BandLevel{i, lvm::kSoftPresets[presetIdx][i]});
+ }
+
+ RetCode ret = updateControlParameter(bandLevels);
+ if (RetCode::SUCCESS == ret) {
+ mCurPresetIdx = presetIdx;
+ LOG(INFO) << __func__ << " success with " << presetIdx;
+ } else {
+ LOG(ERROR) << __func__ << " failed to setPreset " << presetIdx;
+ }
+ return ret;
+}
+
+RetCode BundleContext::setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels) {
+ RETURN_VALUE_IF(bandLevels.size() > lvm::MAX_NUM_BANDS || bandLevels.empty(),
+ RetCode::ERROR_ILLEGAL_PARAMETER, "sizeExceedMax");
+ RetCode ret = updateControlParameter(bandLevels);
+ if (RetCode::SUCCESS == ret) {
+ mCurPresetIdx = lvm::PRESET_CUSTOM;
+ LOG(INFO) << __func__ << " succeed with " << ::android::internal::ToString(bandLevels);
+ } else {
+ LOG(ERROR) << __func__ << " failed with " << ::android::internal::ToString(bandLevels);
+ }
+ return ret;
+}
+
+std::vector<Equalizer::BandLevel> BundleContext::getEqualizerBandLevels() const {
+ std::vector<Equalizer::BandLevel> bandLevels;
+ bandLevels.reserve(lvm::MAX_NUM_BANDS);
+ for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ bandLevels.emplace_back(Equalizer::BandLevel{i, mBandGaindB[i]});
+ }
+ return bandLevels;
+}
+
+bool BundleContext::isBandLevelIndexInRange(
+ const std::vector<Equalizer::BandLevel>& bandLevels) const {
+ const auto [min, max] =
+ std::minmax_element(bandLevels.begin(), bandLevels.end(),
+ [](const auto& a, const auto& b) { return a.index < b.index; });
+ return min->index >= 0 && max->index < lvm::MAX_NUM_BANDS;
+}
+
+RetCode BundleContext::updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels) {
+ RETURN_VALUE_IF(!isBandLevelIndexInRange(bandLevels), RetCode::ERROR_ILLEGAL_PARAMETER,
+ "indexOutOfRange");
+
+ std::array<int, lvm::MAX_NUM_BANDS> tempLevel;
+ for (const auto& it : bandLevels) {
+ tempLevel[it.index] = it.levelMb;
+ }
+
+ LVM_ControlParams_t params;
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
+ params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
+ params.pEQNB_BandDefinition[i].Gain = tempLevel[i];
+ }
+
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ mBandGaindB = tempLevel;
+ LOG(INFO) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGaindB);
+
+ return RetCode::SUCCESS;
+}
+
+void BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+ /* General parameters */
+ params.OperatingMode = LVM_MODE_ON;
+ params.SampleRate = LVM_FS_44100;
+ params.SourceFormat = LVM_STEREO;
+ params.SpeakerType = LVM_HEADPHONES;
+
+ /* Concert Sound parameters */
+ params.VirtualizerOperatingMode = LVM_MODE_OFF;
+ params.VirtualizerType = LVM_CONCERTSOUND;
+ params.VirtualizerReverbLevel = 100;
+ params.CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+ params.EQNB_OperatingMode = LVM_EQNB_OFF;
+ params.EQNB_NBands = lvm::MAX_NUM_BANDS;
+ params.pEQNB_BandDefinition = getDefaultEqualizerBandDefs();
+
+ /* Volume Control parameters */
+ params.VC_EffectLevel = 0;
+ params.VC_Balance = 0;
+
+ /* Treble Enhancement parameters */
+ params.TE_OperatingMode = LVM_TE_OFF;
+ params.TE_EffectLevel = 0;
+
+ /* PSA Control parameters */
+ params.PSA_Enable = LVM_PSA_OFF;
+ params.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
+
+ /* Bass Enhancement parameters */
+ params.BE_OperatingMode = LVM_BE_OFF;
+ params.BE_EffectLevel = 0;
+ params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+ params.BE_HPF = LVM_BE_HPF_ON;
+
+ /* PSA Control parameters */
+ params.PSA_Enable = LVM_PSA_OFF;
+ params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
+
+ /* TE Control parameters */
+ params.TE_OperatingMode = LVM_TE_OFF;
+ params.TE_EffectLevel = 0;
+
+ params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+ params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
+ params.SourceFormat = LVM_STEREO;
+}
+
+void BundleContext::initHeadroomParameter(LVM_HeadroomParams_t& params) const {
+ params.pHeadroomDefinition = getDefaultEqualizerHeadroomBanDefs();
+ params.NHeadroomBands = 2;
+ params.Headroom_OperatingMode = LVM_HEADROOM_OFF;
+}
+
+LVM_EQNB_BandDef_t *BundleContext::getDefaultEqualizerBandDefs() {
+ static LVM_EQNB_BandDef_t* BandDefs = []() {
+ static LVM_EQNB_BandDef_t tempDefs[lvm::MAX_NUM_BANDS];
+ /* N-Band Equaliser parameters */
+ for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ tempDefs[i].Frequency = lvm::kPresetsFrequencies[i];
+ tempDefs[i].QFactor = lvm::kPresetsQFactors[i];
+ tempDefs[i].Gain = lvm::kSoftPresets[0/* normal */][i];
+ }
+ return tempDefs;
+ }();
+
+ return BandDefs;
+}
+
+LVM_HeadroomBandDef_t *BundleContext::getDefaultEqualizerHeadroomBanDefs() {
+ static LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS] = {
+ {
+ .Limit_Low = 20,
+ .Limit_High = 4999,
+ .Headroom_Offset = 0,
+ },
+ {
+ .Limit_Low = 5000,
+ .Limit_High = 24000,
+ .Headroom_Offset = 0,
+ },
+ };
+ return HeadroomBandDef;
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
new file mode 100644
index 0000000..616ab78
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/logging.h>
+#include <array>
+
+#include "BundleTypes.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class BundleContext final : public EffectContext {
+ public:
+ BundleContext(int statusDepth, const Parameter::Common& common,
+ const lvm::BundleEffectType& type)
+ : EffectContext(statusDepth, common), mType(type) {
+ LOG(DEBUG) << __func__ << type;
+ }
+ ~BundleContext() override {
+ LOG(DEBUG) << __func__;
+ deInit();
+ }
+
+ RetCode init();
+ void deInit();
+ lvm::BundleEffectType getBundleType() const { return mType; }
+
+ RetCode enable();
+ RetCode disable();
+
+ LVM_Handle_t getLvmInstance() const { return mInstance; }
+
+ void setSampleRate (const int sampleRate) { mSampleRate = sampleRate; }
+ int getSampleRate() const { return mSampleRate; }
+
+ void setChannelMask(const aidl::android::media::audio::common::AudioChannelLayout& chMask) {
+ mChMask = chMask;
+ }
+ aidl::android::media::audio::common::AudioChannelLayout getChannelMask() const {
+ return mChMask;
+ }
+
+ RetCode setEqualizerPreset(const int presetIdx);
+ int getEqualizerPreset() const { return mCurPresetIdx; }
+ RetCode setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels);
+ std::vector<Equalizer::BandLevel> getEqualizerBandLevels() const;
+
+ RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
+ Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; };
+
+ private:
+ const lvm::BundleEffectType mType;
+ bool mEnabled = false;
+ LVM_Handle_t mInstance = nullptr;
+
+ aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
+ aidl::android::media::audio::common::AudioChannelLayout mChMask;
+
+ int mSampleRate = LVM_FS_44100;
+ int mSamplesPerSecond = 0;
+ int mSamplesToExitCountEq = 0;
+ int mSamplesToExitCountBb = 0;
+ int mSamplesToExitCountVirt = 0;
+ int mFrameCount = 0;
+
+ /* Bitmask whether drain is in progress due to disabling the effect.
+ The corresponding bit to an effect is set by 1 << lvm_effect_en. */
+ int mEffectInDrain = 0;
+
+ /* Bitmask whether process() was called for a particular effect.
+ The corresponding bit to an effect is set by 1 << lvm_effect_en. */
+ int mEffectProcessCalled = 0;
+ int mNumberEffectsEnabled = 0;
+ int mNumberEffectsCalled = 0;
+ bool mFirstVolume = false;
+ // Bass
+ bool mBassTempDisabled = false;
+ int mBassStrengthSaved = 0;
+ // Equalizer
+ int mCurPresetIdx = lvm::PRESET_CUSTOM; /* Current preset being used */
+ std::array<int, lvm::MAX_NUM_BANDS> mBandGaindB;
+ // Virtualizer
+ int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
+ bool mVirtualizerTempDisabled = false;
+ // Volume
+ int mLevelSaved = 0; /* for when mute is set, level must be saved */
+ bool mMuteEnabled = false; /* Must store as mute = -96dB level */
+
+ void initControlParameter(LVM_ControlParams_t& params) const;
+ void initHeadroomParameter(LVM_HeadroomParams_t& params) const;
+ int16_t VolToDb(uint32_t vol) const;
+ LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) const;
+ RetCode updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels);
+ bool isBandLevelIndexInRange(const std::vector<Equalizer::BandLevel>& bandLevels) const;
+ static LVM_EQNB_BandDef_t* getDefaultEqualizerBandDefs();
+ static LVM_HeadroomBandDef_t* getDefaultEqualizerHeadroomBanDefs();
+};
+
+} // namespace aidl::android::hardware::audio::effect
+
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
new file mode 100644
index 0000000..1772bd1
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <array>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include "effect-impl/EffectUUID.h"
+#include "effect-impl/EffectTypes.h"
+#include "LVM.h"
+
+namespace aidl::android::hardware::audio::effect {
+namespace lvm {
+
+constexpr inline size_t MAX_NUM_PRESETS = 10;
+constexpr inline size_t MAX_NUM_BANDS = 5;
+constexpr inline size_t MAX_CALL_SIZE = 256;
+constexpr inline int BASS_BOOST_CUP_LOAD_ARM9E = 150; // Expressed in 0.1 MIPS
+constexpr inline int VIRTUALIZER_CUP_LOAD_ARM9E = 120; // Expressed in 0.1 MIPS
+constexpr inline int EQUALIZER_CUP_LOAD_ARM9E = 220; // Expressed in 0.1 MIPS
+constexpr inline int VOLUME_CUP_LOAD_ARM9E = 0; // Expressed in 0.1 MIPS
+constexpr inline int BUNDLE_MEM_USAGE = 25; // Expressed in kB
+constexpr inline int PRESET_CUSTOM = -1;
+
+static const std::vector<Equalizer::BandFrequency> kEqBandFrequency = {{0, 30000, 120000},
+ {1, 120001, 460000},
+ {2, 460001, 1800000},
+ {3, 1800001, 7000000},
+ {4, 7000001, 20000000}};
+
+/*
+Frequencies in Hz
+Note: If these frequencies change, please update LimitLevel values accordingly.
+*/
+constexpr inline std::array<uint16_t, MAX_NUM_BANDS> kPresetsFrequencies = {60, 230, 910, 3600,
+ 14000};
+
+/* Q factor multiplied by 100 */
+constexpr inline std::array<uint16_t, MAX_NUM_BANDS> kPresetsQFactors = {96, 96, 96, 96, 96};
+
+constexpr inline std::array<std::array<int16_t, MAX_NUM_BANDS>, MAX_NUM_PRESETS> kSoftPresets = {
+ {{3, 0, 0, 0, 3}, /* Normal Preset */
+ {5, 3, -2, 4, 4}, /* Classical Preset */
+ {6, 0, 2, 4, 1}, /* Dance Preset */
+ {0, 0, 0, 0, 0}, /* Flat Preset */
+ {3, 0, 0, 2, -1}, /* Folk Preset */
+ {4, 1, 9, 3, 0}, /* Heavy Metal Preset */
+ {5, 3, 0, 1, 3}, /* Hip Hop Preset */
+ {4, 2, -2, 2, 5}, /* Jazz Preset */
+ {-1, 2, 5, 1, -2}, /* Pop Preset */
+ {5, 3, -1, 3, 5}}}; /* Rock Preset */
+
+static const std::vector<Equalizer::Preset> kEqPresets = {
+ {0, "Normal"}, {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"},
+ {5, "Heavy Metal"}, {6, "Hip Hop"}, {7, "Jazz"}, {8, "Pop"}, {9, "Rock"}};
+
+static const Equalizer::Capability kEqCap = {.bandFrequencies = kEqBandFrequency,
+ .presets = kEqPresets};
+
+static const Descriptor kEqualizerDesc = {
+ .common = {.id = {.type = EqualizerTypeUUID,
+ .uuid = EqualizerBundleImplUUID,
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::FIRST,
+ .volume = Flags::Volume::CTRL},
+ .name = "EqualizerBundle",
+ .implementor = "NXP Software Ltd."},
+ .capability = Capability::make<Capability::equalizer>(kEqCap)};
+
+// TODO: add descriptors for other bundle effect types here.
+static const Descriptor kVirtualizerDesc;
+static const Descriptor kBassBoostDesc;
+static const Descriptor kVolumeDesc;
+
+/* The following tables have been computed using the actual levels measured by the output of
+ * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
+ * the actual energy that 'could' be present in the given band.
+ * If the frequency values in EQNB_5BandPresetsFrequencies change, these values might need to be
+ * updated.
+ */
+constexpr inline std::array<float, MAX_NUM_BANDS> kBandEnergyCoefficient = {7.56, 9.69, 9.59, 7.37,
+ 2.88};
+
+constexpr inline std::array<float, MAX_NUM_BANDS - 1> kBandEnergyCrossCoefficient = {126.0, 115.0,
+ 125.0, 104.0};
+
+constexpr inline std::array<float, MAX_NUM_BANDS> kBassBoostEnergyCrossCoefficient = {
+ 221.21, 208.10, 28.16, 0.0, 0.0};
+
+constexpr inline float kBassBoostEnergyCoefficient = 9.00;
+
+constexpr inline float kVirtualizerContribution = 1.9;
+
+enum class BundleEffectType {
+ BASS_BOOST,
+ VIRTUALIZER,
+ EQUALIZER,
+ VOLUME,
+};
+
+inline std::ostream& operator<<(std::ostream& out, const BundleEffectType& type) {
+ switch (type) {
+ case BundleEffectType::BASS_BOOST:
+ return out << "BASS_BOOST";
+ case BundleEffectType::VIRTUALIZER:
+ return out << "VIRTUALIZER";
+ case BundleEffectType::EQUALIZER:
+ return out << "EQUALIZER";
+ case BundleEffectType::VOLUME:
+ return out << "VOLUME";
+ }
+ return out << "EnumBundleEffectTypeError";
+}
+
+inline std::ostream& operator<<(std::ostream& out, const LVM_ReturnStatus_en& status) {
+ switch (status) {
+ case LVM_SUCCESS:
+ return out << "LVM_SUCCESS";
+ case LVM_ALIGNMENTERROR:
+ return out << "LVM_ALIGNMENTERROR";
+ case LVM_NULLADDRESS:
+ return out << "LVM_NULLADDRESS";
+ case LVM_OUTOFRANGE:
+ return out << "LVM_OUTOFRANGE";
+ case LVM_INVALIDNUMSAMPLES:
+ return out << "LVM_INVALIDNUMSAMPLES";
+ case LVM_WRONGAUDIOTIME:
+ return out << "LVM_WRONGAUDIOTIME";
+ case LVM_ALGORITHMDISABLED:
+ return out << "LVM_ALGORITHMDISABLED";
+ case LVM_ALGORITHMPSA:
+ return out << "LVM_ALGORITHMPSA";
+ case LVM_RETURNSTATUS_DUMMY:
+ return out << "LVM_RETURNSTATUS_DUMMY";
+ }
+ return out << "EnumLvmRetStatusError";
+}
+
+#define GOTO_IF_LVM_ERROR(status, tag, log) \
+ do { \
+ LVM_ReturnStatus_en temp = (status); \
+ if (temp != LVM_SUCCESS) { \
+ LOG(ERROR) << __func__ << " return status: " << temp << " " << (log); \
+ goto tag; \
+ } \
+ } while (0)
+
+} // namespace lvm
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
new file mode 100644
index 0000000..8272462
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -0,0 +1,244 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectBundleAidl"
+#include <Utils.h>
+#include <algorithm>
+#include <unordered_set>
+
+#include <android-base/logging.h>
+#include <fmq/AidlMessageQueue.h>
+#include <audio_effects/effect_bassboost.h>
+#include <audio_effects/effect_equalizer.h>
+#include <audio_effects/effect_virtualizer.h>
+
+#include "EffectBundleAidl.h"
+#include <LVM.h>
+#include <limits.h>
+
+using aidl::android::hardware::audio::effect::EffectBundleAidl;
+using aidl::android::hardware::audio::effect::EqualizerBundleImplUUID;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+
+extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
+ std::shared_ptr<IEffect>* instanceSpp) {
+ if (uuid == nullptr || *uuid != EqualizerBundleImplUUID) {
+ LOG(ERROR) << __func__ << "uuid not supported";
+ return EX_ILLEGAL_ARGUMENT;
+ }
+ if (instanceSpp) {
+ *instanceSpp = ndk::SharedRefBase::make<EffectBundleAidl>(*uuid);
+ LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+ return EX_NONE;
+ } else {
+ LOG(ERROR) << __func__ << " invalid input parameter!";
+ return EX_ILLEGAL_ARGUMENT;
+ }
+}
+
+extern "C" binder_exception_t destroyEffect(const std::shared_ptr<IEffect>& instanceSp) {
+ if (!instanceSp) {
+ LOG(ERROR) << __func__ << "nullInstance";
+ return EX_ILLEGAL_ARGUMENT;
+ }
+ State state;
+ ndk::ScopedAStatus status = instanceSp->getState(&state);
+ if (!status.isOk() || State::INIT != state) {
+ LOG(ERROR) << __func__ << " instance " << instanceSp.get()
+ << " in state: " << toString(state) << ", status: " << status.getDescription();
+ return EX_ILLEGAL_STATE;
+ }
+ LOG(DEBUG) << __func__ << " instance " << instanceSp.get() << " destroyed";
+ return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
+ LOG(DEBUG) << __func__ << uuid.toString();
+ if (uuid == EqualizerBundleImplUUID) {
+ mType = lvm::BundleEffectType::EQUALIZER;
+ mDescriptor = &lvm::kEqualizerDesc;
+ } else {
+ // TODO: add other bundle effect types here.
+ LOG(ERROR) << __func__ << uuid.toString() << " not supported yet!";
+ }
+}
+
+EffectBundleAidl::~EffectBundleAidl() {
+ releaseContext();
+ LOG(DEBUG) << __func__;
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getDescriptor(Descriptor* _aidl_return) {
+ LOG(DEBUG) << _aidl_return->toString();
+ RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+ *_aidl_return = *mDescriptor;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterCommon(const Parameter& param) {
+ std::lock_guard lg(mMutex);
+ RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+ auto tag = param.getTag();
+ switch (tag) {
+ case Parameter::common:
+ RETURN_IF(mContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setCommFailed");
+ break;
+ case Parameter::deviceDescription:
+ RETURN_IF(mContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
+ RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
+ break;
+ case Parameter::mode:
+ RETURN_IF(mContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setModeFailed");
+ break;
+ case Parameter::source:
+ RETURN_IF(mContext->setAudioSource(param.get<Parameter::source>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setSourceFailed");
+ break;
+ case Parameter::volumeStereo:
+ RETURN_IF(mContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
+ RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
+ break;
+ default: {
+ LOG(ERROR) << __func__ << " unsupportedParameterTag " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "commonParamNotSupported");
+ }
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterSpecific(const Parameter::Specific& specific) {
+ LOG(DEBUG) << __func__ << " specific " << specific.toString();
+ auto tag = specific.getTag();
+ RETURN_IF(tag != Parameter::Specific::equalizer, EX_ILLEGAL_ARGUMENT,
+ "specificParamNotSupported");
+ RETURN_IF(mContext == nullptr, EX_NULL_POINTER , "nullContext");
+
+ auto& eq = specific.get<Parameter::Specific::equalizer>();
+ auto eqTag = eq.getTag();
+ switch (eqTag) {
+ case Equalizer::preset:
+ RETURN_IF(mContext->setEqualizerPreset(eq.get<Equalizer::preset>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed");
+ break;
+ case Equalizer::bandLevels:
+ RETURN_IF(mContext->setEqualizerBandLevels(eq.get<Equalizer::bandLevels>()) !=
+ RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed");
+ break;
+ default:
+ LOG(ERROR) << __func__ << " unsupported parameter " << specific.toString();
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "eqTagNotSupported");
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterSpecific(const Parameter::Id& id,
+ Parameter::Specific* specific) {
+ auto tag = id.getTag();
+ RETURN_IF(Parameter::Id::equalizerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
+ auto eqId = id.get<Parameter::Id::equalizerTag>();
+ auto eqIdTag = eqId.getTag();
+ switch (eqIdTag) {
+ case Equalizer::Id::commonTag:
+ return getParameterEqualizer(eqId.get<Equalizer::Id::commonTag>(), specific);
+ default:
+ LOG(ERROR) << __func__ << " tag " << toString(eqIdTag) << " not supported";
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "EqualizerTagNotSupported");
+ }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterEqualizer(const Equalizer::Tag& tag,
+ Parameter::Specific* specific) {
+ std::lock_guard lg(mMutex);
+ RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+ Equalizer eqParam;
+ switch (tag) {
+ case Equalizer::bandLevels: {
+ eqParam.set<Equalizer::bandLevels>(mContext->getEqualizerBandLevels());
+ break;
+ }
+ case Equalizer::preset: {
+ eqParam.set<Equalizer::preset>(mContext->getEqualizerPreset());
+ break;
+ }
+ default: {
+ LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+ EX_ILLEGAL_ARGUMENT, "unsupportedTag");
+ }
+ }
+
+ specific->set<Parameter::Specific::equalizer>(eqParam);
+ return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> EffectBundleAidl::createContext(const Parameter::Common& common) {
+ if (mContext) {
+ LOG(DEBUG) << __func__ << " context already exist";
+ return mContext;
+ }
+
+ // GlobalSession is a singleton
+ mContext =
+ GlobalSession::getGlobalSession().createSession(mType, 1 /* statusFmqDepth */, common);
+ return mContext;
+}
+
+RetCode EffectBundleAidl::releaseContext() {
+ if (mContext) {
+ GlobalSession::getGlobalSession().releaseSession(mType, mContext->getSessionId());
+ mContext.reset();
+ }
+ return RetCode::SUCCESS;
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+ LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << sampleToProcess;
+ if (!mContext) {
+ LOG(ERROR) << __func__ << " nullContext";
+ return {EX_NULL_POINTER, 0, 0};
+ }
+
+ auto frameSize = mContext->getInputFrameSize();
+ if (0 == frameSize) {
+ LOG(ERROR) << __func__ << " frameSizeIs0";
+ return {EX_ILLEGAL_ARGUMENT, 0, 0};
+ }
+
+ LOG(DEBUG) << __func__ << " start processing";
+ LVM_UINT16 frames = sampleToProcess * sizeof(float) / frameSize;
+ LVM_ReturnStatus_en lvmStatus = LVM_Process(mContext->getLvmInstance(), in, out, frames, 0);
+ if (lvmStatus != LVM_SUCCESS) {
+ LOG(ERROR) << __func__ << lvmStatus;
+ return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ }
+ LOG(DEBUG) << __func__ << " done processing";
+ return {STATUS_OK, sampleToProcess, sampleToProcess};
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
new file mode 100644
index 0000000..f10003e
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <functional>
+#include <map>
+#include <memory>
+#include <mutex>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <android-base/logging.h>
+
+#include "effect-impl/EffectImpl.h"
+#include "effect-impl/EffectUUID.h"
+
+#include "BundleContext.h"
+#include "BundleTypes.h"
+#include "GlobalSession.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectBundleAidl final : public EffectImpl {
+ public:
+ explicit EffectBundleAidl(const AudioUuid& uuid);
+ ~EffectBundleAidl() override;
+
+ ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+ ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
+ ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+ ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+ Parameter::Specific* specific) override;
+ IEffect::Status effectProcessImpl(float *in, float *out, int process) override;
+
+ std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+ RetCode releaseContext() override;
+
+ ndk::ScopedAStatus commandStart() override {
+ mContext->enable();
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus commandStop() override {
+ mContext->disable();
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus commandReset() override {
+ mContext->disable();
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ const Descriptor* mDescriptor;
+ lvm::BundleEffectType mType = lvm::BundleEffectType::EQUALIZER;
+ std::shared_ptr<BundleContext> mContext;
+
+ int mPreset = lvm::PRESET_CUSTOM;
+ size_t mInputFrameSize, mOutputFrameSize;
+
+ // Equalizer
+ int mCurPresetIdx = lvm::PRESET_CUSTOM; /* Current preset being used */
+ int32_t mBandGaindB[lvm::MAX_NUM_BANDS];
+
+ RetCode setEqPreset(const int& presetIdx);
+ int getEqPreset() const { return mCurPresetIdx; }
+
+ RetCode setEqBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels);
+ std::vector<Equalizer::BandLevel> getEqBandLevels() const;
+
+ IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
+ ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Tag& tag,
+ Parameter::Specific* specific);
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
new file mode 100644
index 0000000..9226274
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <memory>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+
+#include "BundleContext.h"
+#include "BundleTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+/**
+ * @brief Maintain all effect bundle sessions.
+ *
+ * Sessions are identified with the session ID, maximum of MAX_BUNDLE_SESSIONS is supported by the
+ * bundle implementation.
+ */
+class GlobalSession {
+ public:
+ static GlobalSession& getGlobalSession() {
+ static GlobalSession instance;
+ return instance;
+ }
+
+ bool isSessionIdExist(int sessionId) const { return mSessionMap.count(sessionId); }
+
+ static bool findBundleTypeInList(std::vector<std::shared_ptr<BundleContext>>& list,
+ const lvm::BundleEffectType& type, bool remove = false) {
+ auto itor = std::find_if(list.begin(), list.end(),
+ [type](const std::shared_ptr<BundleContext>& bundle) {
+ return bundle ? bundle->getBundleType() == type : false;
+ });
+ if (itor == list.end()) {
+ return false;
+ }
+ if (remove && *itor) {
+ (*itor)->deInit();
+ list.erase(itor);
+ }
+ return true;
+ }
+
+ /**
+ * Create a certain type of BundleContext in shared_ptr container, each session must not have
+ * more than one session for each type.
+ */
+ std::shared_ptr<BundleContext> createSession(const lvm::BundleEffectType& type, int statusDepth,
+ const Parameter::Common& common) {
+ int sessionId = common.session;
+ LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
+ std::lock_guard lg(mMutex);
+ if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_BUNDLE_SESSIONS) {
+ LOG(ERROR) << __func__ << " exceed max bundle session";
+ return nullptr;
+ }
+
+ if (mSessionMap.count(sessionId)) {
+ if (findBundleTypeInList(mSessionMap[sessionId], type)) {
+ LOG(ERROR) << __func__ << type << " already exist in session " << sessionId;
+ return nullptr;
+ }
+ }
+
+ auto& list = mSessionMap[sessionId];
+ auto context = std::make_shared<BundleContext>(statusDepth, common, type);
+ RETURN_VALUE_IF(!context, nullptr, "failedToCreateContext");
+
+ RetCode ret = context->init();
+ if (RetCode::SUCCESS != ret) {
+ LOG(ERROR) << __func__ << " context init ret " << ret;
+ return nullptr;
+ }
+ list.push_back(context);
+ return context;
+ }
+
+ void releaseSession(const lvm::BundleEffectType& type, int sessionId) {
+ LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
+ std::lock_guard lg(mMutex);
+ if (mSessionMap.count(sessionId)) {
+ auto& list = mSessionMap[sessionId];
+ if (!findBundleTypeInList(list, type, true /* remove */)) {
+ LOG(ERROR) << __func__ << " can't find " << type << "in session " << sessionId;
+ return;
+ }
+ if (list.size() == 0) {
+ mSessionMap.erase(sessionId);
+ }
+ }
+ }
+
+ private:
+ // Lock for mSessionMap access.
+ std::mutex mMutex;
+ // Max session number supported.
+ static constexpr int MAX_BUNDLE_SESSIONS = 32;
+ std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<BundleContext>>>
+ mSessionMap GUARDED_BY(mMutex);
+};
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 1287514..a32188a 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -100,3 +100,29 @@
integer_overflow: true,
},
}
+
+cc_library_shared {
+ name: "libbundleaidl",
+ srcs: [
+ "Aidl/BundleContext.cpp",
+ "Aidl/EffectBundleAidl.cpp",
+ ":effectCommonFile",
+ ],
+ static_libs: ["libmusicbundle"],
+ defaults: [
+ "aidlaudioservice_defaults",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ "latest_android_media_audio_common_types_ndk_shared",
+ ],
+ local_include_dirs: ["Aidl"],
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ ],
+ shared_libs: [
+ "liblog",
+ ],
+ visibility: [
+ "//hardware/interfaces/audio/aidl/default",
+ ],
+}
\ No newline at end of file
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 7e2c762..f64aedf 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -83,6 +83,7 @@
"Twist-test.cpp",
],
shared_libs: [
+ "libaudioutils",
"libheadtracking",
],
}
diff --git a/media/libheadtracking/HeadTrackingProcessor.cpp b/media/libheadtracking/HeadTrackingProcessor.cpp
index ccb75af..101b825 100644
--- a/media/libheadtracking/HeadTrackingProcessor.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor.cpp
@@ -164,29 +164,30 @@
std::string toString_l(unsigned level) const override {
std::string prefixSpace(level, ' ');
std::string ss = prefixSpace + "HeadTrackingProcessor:\n";
- StringAppendF(&ss, "%smaxTranslationalVelocity: %f\n", prefixSpace.c_str(),
+ StringAppendF(&ss, "%s maxTranslationalVelocity: %f meter/second\n", prefixSpace.c_str(),
mOptions.maxTranslationalVelocity);
- StringAppendF(&ss, "%smaxRotationalVelocity: %f\n", prefixSpace.c_str(),
+ StringAppendF(&ss, "%s maxRotationalVelocity: %f rad/second\n", prefixSpace.c_str(),
mOptions.maxRotationalVelocity);
- StringAppendF(&ss, "%sfreshnessTimeout: %" PRId64 "\n", prefixSpace.c_str(),
- mOptions.freshnessTimeout);
- StringAppendF(&ss, "%spredictionDuration: %f\n", prefixSpace.c_str(),
- mOptions.predictionDuration);
- StringAppendF(&ss, "%sautoRecenterWindowDuration: %" PRId64 "\n", prefixSpace.c_str(),
- mOptions.autoRecenterWindowDuration);
- StringAppendF(&ss, "%sautoRecenterTranslationalThreshold: %f\n", prefixSpace.c_str(),
+ StringAppendF(&ss, "%s freshnessTimeout: %0.4f ms\n", prefixSpace.c_str(),
+ media::nsToFloatMs(mOptions.freshnessTimeout));
+ StringAppendF(&ss, "%s predictionDuration: %0.4f ms\n", prefixSpace.c_str(),
+ media::nsToFloatMs(mOptions.predictionDuration));
+ StringAppendF(&ss, "%s autoRecenterWindowDuration: %0.4f ms\n", prefixSpace.c_str(),
+ media::nsToFloatMs(mOptions.autoRecenterWindowDuration));
+ StringAppendF(&ss, "%s autoRecenterTranslationalThreshold: %f meter\n", prefixSpace.c_str(),
mOptions.autoRecenterTranslationalThreshold);
- StringAppendF(&ss, "%sautoRecenterRotationalThreshold: %f\n", prefixSpace.c_str(),
+ StringAppendF(&ss, "%s autoRecenterRotationalThreshold: %f radians\n", prefixSpace.c_str(),
mOptions.autoRecenterRotationalThreshold);
- StringAppendF(&ss, "%sscreenStillnessWindowDuration: %" PRId64 "\n", prefixSpace.c_str(),
- mOptions.screenStillnessWindowDuration);
- StringAppendF(&ss, "%sscreenStillnessTranslationalThreshold: %f\n", prefixSpace.c_str(),
- mOptions.screenStillnessTranslationalThreshold);
- StringAppendF(&ss, "%sscreenStillnessRotationalThreshold: %f\n", prefixSpace.c_str(),
- mOptions.screenStillnessRotationalThreshold);
+ StringAppendF(&ss, "%s screenStillnessWindowDuration: %0.4f ms\n", prefixSpace.c_str(),
+ media::nsToFloatMs(mOptions.screenStillnessWindowDuration));
+ StringAppendF(&ss, "%s screenStillnessTranslationalThreshold: %f meter\n",
+ prefixSpace.c_str(), mOptions.screenStillnessTranslationalThreshold);
+ StringAppendF(&ss, "%s screenStillnessRotationalThreshold: %f radians\n",
+ prefixSpace.c_str(), mOptions.screenStillnessRotationalThreshold);
+ ss += mModeSelector.toString(level + 1);
+ ss += mRateLimiter.toString(level + 1);
ss.append(prefixSpace + "ReCenterHistory:\n");
ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
- // TODO: 233092747 add string from PoseRateLimiter/PoseRateLimiter etc...
return ss;
}
@@ -217,5 +218,17 @@
return std::make_unique<HeadTrackingProcessorImpl>(options, initialMode);
}
+std::string toString(HeadTrackingMode mode) {
+ switch (mode) {
+ case HeadTrackingMode::STATIC:
+ return "STATIC";
+ case HeadTrackingMode::WORLD_RELATIVE:
+ return "WORLD_RELATIVE";
+ case HeadTrackingMode::SCREEN_RELATIVE:
+ return "SCREEN_RELATIVE";
+ }
+ return "EnumNotImplemented";
+};
+
} // namespace media
} // namespace android
diff --git a/media/libheadtracking/ModeSelector.cpp b/media/libheadtracking/ModeSelector.cpp
index cb3a27f..6277090 100644
--- a/media/libheadtracking/ModeSelector.cpp
+++ b/media/libheadtracking/ModeSelector.cpp
@@ -13,11 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include <android-base/stringprintf.h>
#include "ModeSelector.h"
namespace android {
namespace media {
+using android::base::StringAppendF;
ModeSelector::ModeSelector(const Options& options, HeadTrackingMode initialMode)
: mOptions(options), mDesiredMode(initialMode), mActualMode(initialMode) {}
@@ -47,12 +49,15 @@
}
void ModeSelector::calculateActualMode(int64_t timestamp) {
- bool isValidScreenToHead = mScreenToHead.has_value() &&
- timestamp - mScreenToHeadTimestamp < mOptions.freshnessTimeout;
- bool isValidWorldToHead = mWorldToHead.has_value() &&
- timestamp - mWorldToHeadTimestamp < mOptions.freshnessTimeout;
- bool isValidScreenStable = mScreenStable.has_value() &&
- timestamp - mScreenStableTimestamp < mOptions.freshnessTimeout;
+ int64_t screenToHeadGap = timestamp - mScreenToHeadTimestamp;
+ int64_t worldToHeadGap = timestamp - mWorldToHeadTimestamp;
+ int64_t screenStableGap = timestamp - mScreenStableTimestamp;
+ bool isValidScreenToHead =
+ mScreenToHead.has_value() && screenToHeadGap < mOptions.freshnessTimeout;
+ bool isValidWorldToHead =
+ mWorldToHead.has_value() && worldToHeadGap < mOptions.freshnessTimeout;
+ bool isValidScreenStable =
+ mScreenStable.has_value() && screenStableGap < mOptions.freshnessTimeout;
HeadTrackingMode mode = mDesiredMode;
@@ -70,7 +75,17 @@
}
}
- mActualMode = mode;
+ if (mode != mActualMode) {
+ mLocalLog.log(
+ "HT mode change from %s to %s, this ts %0.4f ms, lastTs+gap [ScreenToHead %0.4f + "
+ "%0.4f, WorldToHead %0.4f + %0.4f, ScreenStable %0.4f + %0.4f] ms",
+ media::toString(mActualMode).c_str(), media::toString(mode).c_str(),
+ media::nsToFloatMs(timestamp), media::nsToFloatMs(mScreenToHeadTimestamp),
+ media::nsToFloatMs(screenToHeadGap), media::nsToFloatMs(mWorldToHeadTimestamp),
+ media::nsToFloatMs(worldToHeadGap), media::nsToFloatMs(mScreenStableTimestamp),
+ media::nsToFloatMs(screenStableGap));
+ mActualMode = mode;
+ }
}
void ModeSelector::calculate(int64_t timestamp) {
@@ -99,5 +114,15 @@
return mActualMode;
}
+std::string ModeSelector::toString(unsigned level) const {
+ std::string prefixSpace(level, ' ');
+ std::string ss(prefixSpace);
+ StringAppendF(&ss, "ModeSelector: ScreenToStage %s\n",
+ mScreenToStage.toString().c_str());
+ ss.append(prefixSpace + "Mode downgrade history:\n");
+ ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), sMaxLocalLogLine);
+ return ss;
+}
+
} // namespace media
} // namespace android
diff --git a/media/libheadtracking/ModeSelector.h b/media/libheadtracking/ModeSelector.h
index e537040..2475a5b 100644
--- a/media/libheadtracking/ModeSelector.h
+++ b/media/libheadtracking/ModeSelector.h
@@ -16,6 +16,7 @@
#pragma once
#include <optional>
+#include <audio_utils/SimpleLog.h>
#include "media/HeadTrackingMode.h"
#include "media/Pose.h"
@@ -114,6 +115,8 @@
*/
HeadTrackingMode getActualMode() const;
+ std::string toString(unsigned level) const;
+
private:
const Options mOptions;
@@ -129,6 +132,9 @@
HeadTrackingMode mActualMode;
Pose3f mHeadToStage;
+ static constexpr std::size_t sMaxLocalLogLine = 10;
+ SimpleLog mLocalLog{sMaxLocalLogLine};
+
void calculateActualMode(int64_t timestamp);
};
diff --git a/media/libheadtracking/Pose.cpp b/media/libheadtracking/Pose.cpp
index ae39512..4a4b56a 100644
--- a/media/libheadtracking/Pose.cpp
+++ b/media/libheadtracking/Pose.cpp
@@ -13,6 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include <android-base/stringprintf.h>
#include "media/Pose.h"
#include "media/Twist.h"
@@ -21,6 +22,7 @@
namespace android {
namespace media {
+using android::base::StringAppendF;
using Eigen::Vector3f;
std::optional<Pose3f> Pose3f::fromVector(const std::vector<float>& vec) {
@@ -35,6 +37,19 @@
return {mTranslation[0], mTranslation[1], mTranslation[2], rot[0], rot[1], rot[2]};
}
+std::string Pose3f::toString() const {
+ const auto& vec = this->toVector();
+ std::string ss = "[";
+ for (auto f = vec.begin(); f != vec.end(); ++f) {
+ if (f != vec.begin()) {
+ ss.append(", ");
+ }
+ StringAppendF(&ss, "%0.2f", *f);
+ }
+ ss.append("]");
+ return ss;
+}
+
std::tuple<Pose3f, bool> moveWithRateLimit(const Pose3f& from, const Pose3f& to, float t,
float maxTranslationalVelocity,
float maxRotationalVelocity) {
diff --git a/media/libheadtracking/PoseRateLimiter.cpp b/media/libheadtracking/PoseRateLimiter.cpp
index 380e22b..060bb4b 100644
--- a/media/libheadtracking/PoseRateLimiter.cpp
+++ b/media/libheadtracking/PoseRateLimiter.cpp
@@ -13,11 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include <android-base/stringprintf.h>
#include "PoseRateLimiter.h"
namespace android {
namespace media {
+using android::base::StringAppendF;
PoseRateLimiter::PoseRateLimiter(const Options& options) : mOptions(options), mLimiting(false) {}
@@ -48,5 +50,15 @@
return pose;
}
+std::string PoseRateLimiter::toString(unsigned level) const {
+ std::string ss(level, ' ');
+ if (mLimiting) {
+ StringAppendF(&ss, "PoseRateLimiter: enabled with target: %s\n",
+ mTargetPose.has_value() ? mTargetPose.value().toString().c_str() : "NULL");
+ } else {
+ StringAppendF(&ss, "PoseRateLimiter: disabled\n");
+ }
+ return ss;
+}
} // namespace media
} // namespace android
diff --git a/media/libheadtracking/PoseRateLimiter.h b/media/libheadtracking/PoseRateLimiter.h
index aa2fe80..c673a33 100644
--- a/media/libheadtracking/PoseRateLimiter.h
+++ b/media/libheadtracking/PoseRateLimiter.h
@@ -77,6 +77,8 @@
Pose3f calculatePose(int64_t timestamp);
+ std::string toString(unsigned level) const;
+
private:
struct Point {
Pose3f pose;
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index 6c0a96d..bd8af04 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -18,9 +18,10 @@
#define LOG_TAG "SensorPoseProvider"
-#include <inttypes.h>
-
+#include <algorithm>
#include <future>
+#include <inttypes.h>
+#include <limits>
#include <map>
#include <thread>
@@ -135,7 +136,10 @@
{
std::lock_guard lock(mMutex);
- mEnabledSensorsExtra.emplace(sensor, SensorExtra{ .format = format });
+ mEnabledSensorsExtra.emplace(
+ sensor,
+ SensorExtra{.format = format,
+ .samplingPeriod = static_cast<int32_t>(samplingPeriod.count())});
}
// Enable the sensor.
@@ -173,8 +177,11 @@
StringAppendF(&ss, "%sSensors total number %zu:\n", prefixSpace.c_str(),
mEnabledSensorsExtra.size());
for (auto sensor : mEnabledSensorsExtra) {
- StringAppendF(&ss, "%s[Handle: 0x%08x, Format %s", prefixSpace.c_str(), sensor.first,
- toString(sensor.second.format).c_str());
+ StringAppendF(&ss,
+ "%s[Handle: 0x%08x, Format %s Period (set %d max %0.4f min %0.4f) ms",
+ prefixSpace.c_str(), sensor.first, toString(sensor.second.format).c_str(),
+ sensor.second.samplingPeriod, media::nsToFloatMs(sensor.second.maxPeriod),
+ media::nsToFloatMs(sensor.second.minPeriod));
if (sensor.second.discontinuityCount.has_value()) {
StringAppendF(&ss, ", DiscontinuityCount: %d",
sensor.second.discontinuityCount.value());
@@ -202,7 +209,11 @@
};
struct SensorExtra {
- DataFormat format;
+ DataFormat format = DataFormat::kUnknown;
+ int32_t samplingPeriod = 0;
+ int64_t latestTimestamp = 0;
+ int64_t maxPeriod = 0;
+ int64_t minPeriod = std::numeric_limits<int64_t>::max();
std::optional<int32_t> discontinuityCount;
};
@@ -296,6 +307,7 @@
return;
}
value = parseEvent(event, iter->second.format, &iter->second.discontinuityCount);
+ updateEventTimestamp(event, iter->second);
}
mListener->onPose(event.timestamp, event.sensor, value.pose, value.twist,
value.isNewReference);
@@ -351,6 +363,15 @@
return std::nullopt;
}
+ void updateEventTimestamp(const ASensorEvent& event, SensorExtra& extra) {
+ if (extra.latestTimestamp != 0) {
+ int64_t gap = event.timestamp - extra.latestTimestamp;
+ extra.maxPeriod = std::max(gap, extra.maxPeriod);
+ extra.minPeriod = std::min(gap, extra.minPeriod);
+ }
+ extra.latestTimestamp = event.timestamp;
+ }
+
static PoseEvent parseEvent(const ASensorEvent& event, DataFormat format,
std::optional<int32_t>* discontinutyCount) {
switch (format) {
@@ -381,7 +402,7 @@
}
}
- const std::string toString(DataFormat format) {
+ const static std::string toString(DataFormat format) {
switch (format) {
case DataFormat::kUnknown:
return "kUnknown";
diff --git a/media/libheadtracking/include/media/HeadTrackingMode.h b/media/libheadtracking/include/media/HeadTrackingMode.h
index 38496e8..92d1165 100644
--- a/media/libheadtracking/include/media/HeadTrackingMode.h
+++ b/media/libheadtracking/include/media/HeadTrackingMode.h
@@ -15,6 +15,8 @@
*/
#pragma once
+#include <string>
+
namespace android {
namespace media {
@@ -30,5 +32,7 @@
SCREEN_RELATIVE,
};
+std::string toString(HeadTrackingMode mode);
+
} // namespace media
} // namespace android
diff --git a/media/libheadtracking/include/media/Pose.h b/media/libheadtracking/include/media/Pose.h
index e660bb9..50294ed 100644
--- a/media/libheadtracking/include/media/Pose.h
+++ b/media/libheadtracking/include/media/Pose.h
@@ -16,6 +16,7 @@
#pragma once
#include <optional>
+#include <string>
#include <vector>
#include <Eigen/Geometry>
@@ -63,6 +64,9 @@
*/
std::vector<float> toVector() const;
+ // Convert instance to a string representation.
+ std::string toString() const;
+
Pose3f& operator=(const Pose3f& other) {
mTranslation = other.mTranslation;
mRotation = other.mRotation;
@@ -128,5 +132,10 @@
float maxTranslationalVelocity,
float maxRotationalVelocity);
+template <typename T>
+static float nsToFloatMs(T ns) {
+ return ns * 1e-6f;
+}
+
} // namespace media
} // namespace android
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index de4c7db..b45dae5 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -195,7 +195,8 @@
INVOKE_ID_SELECT_TRACK = 4,
INVOKE_ID_UNSELECT_TRACK = 5,
INVOKE_ID_SET_VIDEO_SCALING_MODE = 6,
- INVOKE_ID_GET_SELECTED_TRACK = 7
+ INVOKE_ID_GET_SELECTED_TRACK = 7,
+ INVOKE_ID_SET_PLAYER_IID = 8,
};
// ----------------------------------------------------------------------------
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 73b5f8d..35aa213 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -47,7 +47,9 @@
method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile_optional();
method public java.util.List<media.profiles.CamcorderProfiles.ImageDecodingOptional> getImageDecoding_optional();
method public java.util.List<media.profiles.CamcorderProfiles.ImageEncodingOptional> getImageEncoding_optional();
+ method public int getStartOffsetMs();
method public void setCameraId(int);
+ method public void setStartOffsetMs(int);
}
public static class CamcorderProfiles.ImageDecodingOptional {
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index 9664456..dcc3028 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -49,6 +49,7 @@
</xs:element>
</xs:choice>
<xs:attribute name="cameraId" type="xs:int"/>
+ <xs:attribute name="startOffsetMs" type="xs:int"/>
</xs:complexType>
<xs:complexType name="EncoderProfile">
<xs:sequence>
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index bebd382..27f987d 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -87,6 +87,7 @@
#define AMEDIAMETRICS_PROP_PREFIX_EFFECTIVE "effective."
#define AMEDIAMETRICS_PROP_PREFIX_HAL "hal."
#define AMEDIAMETRICS_PROP_PREFIX_HAPTIC "haptic."
+#define AMEDIAMETRICS_PROP_PREFIX_LAST "last."
#define AMEDIAMETRICS_PROP_PREFIX_SERVER "server."
// Properties within mediametrics are string constants denoted by
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 8e19d02..bdf1cbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1805,7 +1805,8 @@
MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId,
const AttributionSourceState& attributionSource, const audio_attributes_t* attr,
const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
- : mCallback(NULL),
+ : mCachedPlayerIId(PLAYER_PIID_INVALID),
+ mCallback(NULL),
mCallbackCookie(NULL),
mCallbackData(NULL),
mStreamType(AUDIO_STREAM_MUSIC),
@@ -2314,6 +2315,10 @@
return t->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
});
+ if (mCachedPlayerIId != PLAYER_PIID_INVALID) {
+ t->setPlayerIId(mCachedPlayerIId);
+ }
+
mSampleRateHz = sampleRate;
mFlags = flags;
mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
@@ -2366,6 +2371,17 @@
return NO_INIT;
}
+void MediaPlayerService::AudioOutput::setPlayerIId(int32_t playerIId)
+{
+ ALOGV("setPlayerIId(%d)", playerIId);
+ Mutex::Autolock lock(mLock);
+ mCachedPlayerIId = playerIId;
+
+ if (mTrack != nullptr) {
+ mTrack->setPlayerIId(mCachedPlayerIId);
+ }
+}
+
void MediaPlayerService::AudioOutput::setNextOutput(const sp<AudioOutput>& nextOutput) {
Mutex::Autolock lock(mLock);
mNextOutput = nextOutput;
@@ -2683,7 +2699,7 @@
// This is a benign busy-wait, with the next data request generated 10 ms or more later;
// nevertheless for power reasons, we don't want to see too many of these.
- ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+ ALOGV_IF(actualSize == 0 && buffer.size() > 0, "callbackwrapper: empty buffer returned");
unlock();
return actualSize;
}
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 86be3fe..52c2f79 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -113,6 +113,8 @@
bool doNotReconnect = false,
uint32_t suggestedFrameCount = 0);
+ virtual void setPlayerIId(int32_t playerIId);
+
virtual status_t start();
virtual ssize_t write(const void* buffer, size_t size, bool blocking = true);
virtual void stop();
@@ -160,6 +162,7 @@
sp<AudioTrack> mTrack;
sp<AudioTrack> mRecycledTrack;
sp<AudioOutput> mNextOutput;
+ int mCachedPlayerIId;
AudioCallback mCallback;
void * mCallbackCookie;
sp<CallbackData> mCallbackData;
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 1cbd8a0..fb20aab 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -123,6 +123,8 @@
bool doNotReconnect = false,
uint32_t suggestedFrameCount = 0) = 0;
+ virtual void setPlayerIId(int32_t playerIId) = 0;
+
virtual status_t start() = 0;
/* Input parameter |size| is in byte units stored in |buffer|.
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 71a3168..911463b 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -17,6 +17,14 @@
],
}
+cc_library_headers {
+ name: "libstagefright_nuplayer_headers",
+
+ export_include_dirs: [
+ "include",
+ ],
+}
+
cc_library_static {
srcs: [
@@ -81,6 +89,7 @@
static_libs: [
"libplayerservice_datasource",
+ "libstagefright_esds",
"libstagefright_timedtext",
],
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 36e4d4a..1358faa 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -992,6 +992,11 @@
format->setInt32("auto", !!isAutoselect);
format->setInt32("default", !!isDefault);
format->setInt32("forced", !!isForced);
+ } else if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ int32_t hapticChannelCount;
+ if (meta->findInt32(kKeyHapticChannelCount, &hapticChannelCount)) {
+ format->setInt32("haptic-channel-count", hapticChannelCount);
+ }
}
return format;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 9b4fc8f..727d68d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -60,7 +60,7 @@
#include <gui/Surface.h>
-#include "ESDS.h"
+#include <media/esds/ESDS.h>
#include <media/stagefright/Utils.h>
namespace android {
@@ -555,6 +555,13 @@
reply->writeInt32(isAuto);
reply->writeInt32(isDefault);
reply->writeInt32(isForced);
+ } else if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ int32_t hapticChannelCount;
+ bool hasHapticChannels = format->findInt32("haptic-channel-count", &hapticChannelCount);
+ reply->writeInt32(hasHapticChannels);
+ if (hasHapticChannels) {
+ reply->writeInt32(hapticChannelCount);
+ }
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 2a50fc2..ceea2f4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -26,6 +26,7 @@
#include "NuPlayer.h"
#include "NuPlayerSource.h"
+#include <audiomanager/AudioManager.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -85,6 +86,7 @@
mMediaClock(new MediaClock),
mPlayer(new NuPlayer(pid, mMediaClock)),
mPlayerFlags(0),
+ mCachedPlayerIId(PLAYER_PIID_INVALID),
mMetricsItem(NULL),
mClientUid(-1),
mAtEOS(false),
@@ -804,6 +806,16 @@
return mPlayer->getSelectedTrack(type, reply);
}
+ case INVOKE_ID_SET_PLAYER_IID:
+ {
+ Mutex::Autolock autoLock(mAudioSinkLock);
+ mCachedPlayerIId = request.readInt32();
+ if (mAudioSink != nullptr) {
+ mAudioSink->setPlayerIId(mCachedPlayerIId);
+ }
+ return OK;
+ }
+
default:
{
return INVALID_OPERATION;
@@ -812,8 +824,12 @@
}
void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
+ Mutex::Autolock autoLock(mAudioSinkLock);
mPlayer->setAudioSink(audioSink);
mAudioSink = audioSink;
+ if (mCachedPlayerIId != PLAYER_PIID_INVALID) {
+ mAudioSink->setPlayerIId(mCachedPlayerIId);
+ }
}
status_t NuPlayerDriver::setParameter(
@@ -1027,6 +1043,7 @@
if (mState != STATE_RESET_IN_PROGRESS) {
if (mAutoLoop) {
audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ Mutex::Autolock autoLock(mAudioSinkLock);
if (mAudioSink != NULL) {
streamType = mAudioSink->getAudioStreamType();
}
@@ -1037,6 +1054,7 @@
}
if (mLooping || mAutoLoop) {
mPlayer->seekToAsync(0);
+ Mutex::Autolock autoLock(mAudioSinkLock);
if (mAudioSink != NULL) {
// The renderer has stopped the sink at the end in order to play out
// the last little bit of audio. If we're looping, we need to restart it.
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
index 55a0fad..138cd6f 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
@@ -140,9 +140,12 @@
sp<ALooper> mLooper;
const sp<MediaClock> mMediaClock;
const sp<NuPlayer> mPlayer;
- sp<AudioSink> mAudioSink;
uint32_t mPlayerFlags;
+ mutable Mutex mAudioSinkLock;
+ sp<AudioSink> mAudioSink GUARDED_BY(mAudioSinkLock);
+ int32_t mCachedPlayerIId GUARDED_BY(mAudioSinkLock);
+
mediametrics::Item *mMetricsItem;
mutable Mutex mMetricsLock;
uid_t mClientUid;
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index 2beb47f..30f6a91 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -25,8 +25,8 @@
#include <aidl/android/media/BnResourceManagerService.h>
#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
#include <mediadrm/DrmSessionManager.h>
+#include <mediautils/ProcessInfoInterface.h>
#include <algorithm>
#include <iostream>
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 52c4c0f..d6028d9 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -785,7 +785,7 @@
// we cannot change the number of output buffers while OMX is running
// set up surface to the same count
- Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
+ std::vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
ALOGV("setting up surface for %zu buffers", buffers.size());
err = native_window_set_buffer_count(nativeWindow, buffers.size());
@@ -825,7 +825,7 @@
// cancel undequeued buffers to new surface
if (!storingMetadataInDecodedBuffers()) {
for (size_t i = 0; i < buffers.size(); ++i) {
- BufferInfo &info = buffers.editItemAt(i);
+ BufferInfo &info = buffers[i];
if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
err = nativeWindow->cancelBuffer(
@@ -872,7 +872,7 @@
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
CHECK(mAllocator[portIndex] == NULL);
- CHECK(mBuffers[portIndex].isEmpty());
+ CHECK(mBuffers[portIndex].empty());
status_t err;
if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
@@ -951,6 +951,7 @@
const sp<AMessage> &format =
portIndex == kPortIndexInput ? mInputFormat : mOutputFormat;
+ mBuffers[portIndex].reserve(def.nBufferCountActual);
for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
hidl_memory hidlMemToken;
sp<TMemory> hidlMem;
@@ -1039,7 +1040,7 @@
}
}
- mBuffers[portIndex].push(info);
+ mBuffers[portIndex].push_back(info);
}
}
}
@@ -1250,6 +1251,7 @@
mComponentName.c_str(), bufferCount, bufferSize);
// Dequeue buffers and send them to OMX
+ mBuffers[kPortIndexOutput].reserve(bufferCount);
for (OMX_U32 i = 0; i < bufferCount; i++) {
ANativeWindowBuffer *buf;
int fenceFd;
@@ -1275,7 +1277,7 @@
info.mData = new MediaCodecBuffer(mOutputFormat, new ABuffer(bufferSize));
info.mCodecData = info.mData;
- mBuffers[kPortIndexOutput].push(info);
+ mBuffers[kPortIndexOutput].push_back(info);
IOMX::buffer_id bufferId;
err = mOMXNode->useBuffer(kPortIndexOutput, graphicBuffer, &bufferId);
@@ -1285,7 +1287,7 @@
break;
}
- mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
+ mBuffers[kPortIndexOutput][i].mBufferID = bufferId;
ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
mComponentName.c_str(),
@@ -1307,7 +1309,7 @@
}
for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
- BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ BufferInfo *info = &mBuffers[kPortIndexOutput][i];
if (info->mStatus == BufferInfo::OWNED_BY_US) {
status_t error = cancelBufferToNativeWindow(info);
if (err == 0) {
@@ -1336,6 +1338,7 @@
ALOGV("[%s] Allocating %u meta buffers on output port",
mComponentName.c_str(), bufferCount);
+ mBuffers[kPortIndexOutput].reserve(bufferCount);
for (OMX_U32 i = 0; i < bufferCount; i++) {
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -1353,7 +1356,7 @@
info.mCodecData = info.mData;
err = mOMXNode->useBuffer(kPortIndexOutput, OMXBuffer::sPreset, &info.mBufferID);
- mBuffers[kPortIndexOutput].push(info);
+ mBuffers[kPortIndexOutput].push_back(info);
ALOGV("[%s] allocated meta buffer with ID %u",
mComponentName.c_str(), info.mBufferID);
@@ -1462,7 +1465,7 @@
it != done.cend(); ++it) {
ssize_t index = it->getIndex();
if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
- mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL;
+ mBuffers[kPortIndexOutput][index].mRenderInfo = NULL;
} else if (index >= 0) {
// THIS SHOULD NEVER HAPPEN
ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
@@ -1502,7 +1505,7 @@
bool stale = false;
for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
i--;
- BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ BufferInfo *info = &mBuffers[kPortIndexOutput][i];
if (info->mGraphicBuffer != NULL &&
info->mGraphicBuffer->handle == buf->handle) {
@@ -1550,8 +1553,7 @@
BufferInfo *oldest = NULL;
for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
i--;
- BufferInfo *info =
- &mBuffers[kPortIndexOutput].editItemAt(i);
+ BufferInfo *info = &mBuffers[kPortIndexOutput][i];
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
(oldest == NULL ||
// avoid potential issues from counter rolling over
@@ -1608,8 +1610,7 @@
status_t err = OK;
for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
i--;
- BufferInfo *info =
- &mBuffers[kPortIndexOutput].editItemAt(i);
+ BufferInfo *info = &mBuffers[kPortIndexOutput][i];
// At this time some buffers may still be with the component
// or being drained.
@@ -1626,7 +1627,7 @@
}
status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
- BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+ BufferInfo *info = &mBuffers[portIndex][i];
status_t err = OK;
// there should not be any fences in the metadata
@@ -1666,14 +1667,14 @@
}
// remove buffer even if mOMXNode->freeBuffer fails
- mBuffers[portIndex].removeAt(i);
+ mBuffers[portIndex].erase(mBuffers[portIndex].begin() + i);
return err;
}
ACodec::BufferInfo *ACodec::findBufferByID(
uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
- BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+ BufferInfo *info = &mBuffers[portIndex][i];
if (info->mBufferID == bufferID) {
if (index != NULL) {
@@ -5102,7 +5103,7 @@
size_t n = 0;
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
- const BufferInfo &info = mBuffers[portIndex].itemAt(i);
+ const BufferInfo &info = mBuffers[portIndex][i];
if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) {
++n;
@@ -5116,7 +5117,7 @@
size_t n = 0;
for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
- const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i);
+ const BufferInfo &info = mBuffers[kPortIndexOutput][i];
if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
++n;
@@ -5143,7 +5144,7 @@
bool ACodec::allYourBuffersAreBelongToUs(
OMX_U32 portIndex) {
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
- BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+ BufferInfo *info = &mBuffers[portIndex][i];
if (info->mStatus != BufferInfo::OWNED_BY_US
&& info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
@@ -5167,12 +5168,11 @@
}
void ACodec::processDeferredMessages() {
- List<sp<AMessage> > queue = mDeferredQueue;
+ std::list<sp<AMessage>> queue = mDeferredQueue;
mDeferredQueue.clear();
- List<sp<AMessage> >::iterator it = queue.begin();
- while (it != queue.end()) {
- onMessageReceived(*it++);
+ for(const sp<AMessage> &msg : queue) {
+ onMessageReceived(msg);
}
}
@@ -5928,19 +5928,17 @@
case ACodec::kWhatSetSurface:
{
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
sp<RefBase> obj;
CHECK(msg->findObject("surface", &obj));
status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
- sp<AReplyToken> replyID;
- if (msg->senderAwaitsResponse(&replyID)) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
- response->postReply(replyID);
- } else if (err != OK) {
- mCodec->signalError(OMX_ErrorUndefined, err);
- }
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
break;
}
@@ -6483,7 +6481,7 @@
BufferInfo *eligible = NULL;
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
- BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput][i];
#if 0
if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) {
@@ -7515,7 +7513,7 @@
// submit as many buffers as there are input buffers with the codec
// in case we are in port reconfiguring
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
- BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput][i];
if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) {
if (mCodec->submitOutputMetadataBuffer() != OK)
@@ -7533,7 +7531,7 @@
void ACodec::ExecutingState::submitRegularOutputBuffers() {
bool failed = false;
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
- BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput][i];
if (mCodec->mNativeWindow != NULL) {
if (info->mStatus != BufferInfo::OWNED_BY_US
@@ -7590,7 +7588,7 @@
}
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
- BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput][i];
if (info->mStatus == BufferInfo::OWNED_BY_US) {
postFillThisBuffer(info);
}
@@ -8529,17 +8527,11 @@
case kWhatSetSurface:
{
- ALOGV("[%s] Deferring setSurface", mCodec->mComponentName.c_str());
-
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
+ ALOGD("[%s] Deferring setSurface from OutputPortSettingsChangedState",
+ mCodec->mComponentName.c_str());
mCodec->deferMessage(msg);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", OK);
- response->postReply(replyID);
-
handled = true;
break;
}
@@ -8594,7 +8586,7 @@
ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str());
status_t err = OK;
- if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) {
+ if (!mCodec->mBuffers[kPortIndexOutput].empty()) {
ALOGE("disabled port should be empty, but has %zu buffers",
mCodec->mBuffers[kPortIndexOutput].size());
err = FAILED_TRANSACTION;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 10baec4..8c469df 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -33,75 +33,6 @@
},
}
-cc_library_static {
- name: "libstagefright_esds",
- apex_available: [
- "//apex_available:platform",
- "com.android.media",
- ],
- min_sdk_version: "29",
-
- srcs: ["ESDS.cpp"],
-
- cflags: [
- "-Werror",
- "-Wall",
- ],
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
- shared_libs: [
- "libstagefright_foundation",
- "libutils"
- ],
- host_supported: true,
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
-
-cc_library_static {
- name: "libstagefright_metadatautils",
- apex_available: [
- "//apex_available:platform",
- "com.android.media",
- ],
- min_sdk_version: "29",
-
- srcs: ["MetaDataUtils.cpp"],
-
- cflags: [
- "-Werror",
- "-Wall",
- ],
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
-
- header_libs: [
- "libaudioclient_headers",
- "libstagefright_foundation_headers",
- "media_ndk_headers",
- ],
-
- host_supported: true,
- target: {
- darwin: {
- enabled: false,
- },
- },
-
- export_include_dirs: ["include"],
-}
-
cc_library_shared {
name: "libstagefright_codecbase",
@@ -166,6 +97,10 @@
"liblog",
],
+ static_libs: [
+ "libstagefright_esds",
+ ],
+
export_include_dirs: [
"include",
],
@@ -386,7 +321,6 @@
"libstagefright_webm",
"libstagefright_timedtext",
"libogg",
- "libwebm",
"libstagefright_id3",
"framework-permission-aidl-cpp",
"libmediandk_format",
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 6de112a..2370a7b 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -86,11 +86,14 @@
displayHeight = height;
}
- if (allocRotated && (rotationAngle == 90 || rotationAngle == 270)) {
- int32_t tmp;
- tmp = width; width = height; height = tmp;
- tmp = displayWidth; displayWidth = displayHeight; displayHeight = tmp;
- tmp = tileWidth; tileWidth = tileHeight; tileHeight = tmp;
+ if (allocRotated) {
+ if (rotationAngle == 90 || rotationAngle == 270) {
+ // swap width and height for 90 & 270 degrees rotation
+ std::swap(width, height);
+ std::swap(displayWidth, displayHeight);
+ std::swap(tileWidth, tileHeight);
+ }
+ // Rotation is already applied.
rotationAngle = 0;
}
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 34b840e..fbc684b 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -30,7 +30,7 @@
#include <media/stagefright/MetaData.h>
#include <arpa/inet.h>
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
namespace android {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 9ff2177..c93d033 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -51,7 +51,7 @@
#include <media/mediarecorder.h>
#include <cutils/properties.h>
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
#include "include/HevcUtils.h"
#ifndef __predict_false
@@ -157,6 +157,8 @@
bool isHevc() const { return mIsHevc; }
bool isAv1() const { return mIsAv1; }
bool isHeic() const { return mIsHeic; }
+ bool isAvif() const { return mIsAvif; }
+ bool isHeif() const { return mIsHeif; }
bool isAudio() const { return mIsAudio; }
bool isMPEG4() const { return mIsMPEG4; }
bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
@@ -325,6 +327,8 @@
bool mIsAudio;
bool mIsVideo;
bool mIsHeic;
+ bool mIsAvif;
+ bool mIsHeif;
bool mIsMPEG4;
bool mGotStartKeyFrame;
bool mIsMalformed;
@@ -550,6 +554,7 @@
mStreamableFile = false;
mTimeScale = -1;
mHasFileLevelMeta = false;
+ mIsAvif = false;
mFileLevelMetaDataSize = 0;
mPrimaryItemId = 0;
mAssociationEntryCount = 0;
@@ -670,6 +675,8 @@
return "mett";
} else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
return "heic";
+ } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
+ return "avif";
} else {
ALOGE("Track (%s) other than video/audio/metadata is not supported", mime);
}
@@ -714,8 +721,9 @@
Track *track = new Track(this, source, 1 + mTracks.size());
mTracks.push_back(track);
- mHasMoovBox |= !track->isHeic();
- mHasFileLevelMeta |= track->isHeic();
+ mHasMoovBox |= !track->isHeif();
+ mHasFileLevelMeta |= track->isHeif();
+ mIsAvif |= track->isAvif();
return OK;
}
@@ -797,7 +805,7 @@
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
- if ((*it)->isHeic()) {
+ if ((*it)->isHeif()) {
metaSize += (*it)->getMetaSizeIncrease(rotation, mTracks.size());
}
}
@@ -999,8 +1007,8 @@
return err;
}
- ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d",
- mHasMoovBox, mHasFileLevelMeta);
+ ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d, mIsAvif %d",
+ mHasMoovBox, mHasFileLevelMeta, mIsAvif);
err = startWriterThread();
if (err != OK) {
@@ -1316,7 +1324,7 @@
}
// skip image tracks
- if ((*it)->isHeic()) continue;
+ if ((*it)->isHeif()) continue;
nonImageTrackCount++;
int64_t durationUs = (*it)->getDurationUs();
@@ -1494,7 +1502,7 @@
int64_t minCttsOffsetTimeUs = kMaxCttsOffsetTimeUs;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
- if (!(*it)->isHeic()) {
+ if (!(*it)->isHeif()) {
minCttsOffsetTimeUs =
std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
}
@@ -1510,7 +1518,7 @@
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
- if (!(*it)->isHeic()) {
+ if (!(*it)->isHeif()) {
(*it)->writeTrackHeader();
}
}
@@ -1530,17 +1538,27 @@
writeFourcc("isom");
writeFourcc("3gp4");
} else {
- // Only write "heic" as major brand if the client specified HEIF
- // AND we indeed receive some image heic tracks.
+ // Only write "heic"/"avif" as major brand if the client specified HEIF/AVIF
+ // AND we indeed receive some image heic/avif tracks.
if (fileType == OUTPUT_FORMAT_HEIF && mHasFileLevelMeta) {
- writeFourcc("heic");
+ if (mIsAvif) {
+ writeFourcc("avif");
+ } else {
+ writeFourcc("heic");
+ }
} else {
writeFourcc("mp42");
}
writeInt32(0);
if (mHasFileLevelMeta) {
- writeFourcc("mif1");
- writeFourcc("heic");
+ if (mIsAvif) {
+ writeFourcc("mif1");
+ writeFourcc("miaf");
+ writeFourcc("avif");
+ } else {
+ writeFourcc("mif1");
+ writeFourcc("heic");
+ }
}
if (mHasMoovBox) {
writeFourcc("isom");
@@ -2117,7 +2135,8 @@
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
- if (!(*it)->isHeic() && (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
+ if (!(*it)->isHeif() &&
+ (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
return true;
}
}
@@ -2224,6 +2243,8 @@
mIsAudio = !strncasecmp(mime, "audio/", 6);
mIsVideo = !strncasecmp(mime, "video/", 6);
mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+ mIsAvif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF);
+ mIsHeif = mIsHeic || mIsAvif;
mIsMPEG4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC);
@@ -2235,7 +2256,7 @@
}
}
- if (!mIsHeic) {
+ if (!mIsHeif) {
setTimeScale();
} else {
CHECK(mMeta->findInt32(kKeyWidth, &mWidth) && (mWidth > 0));
@@ -2316,7 +2337,7 @@
void MPEG4Writer::Track::updateTrackSizeEstimate() {
mEstimatedTrackSizeBytes = mMdatSizeBytes; // media data size
- if (!isHeic() && !mOwner->isFileStreamable()) {
+ if (!isHeif() && !mOwner->isFileStreamable()) {
mEstimatedTrackSizeBytes += trackMetaDataSize();
}
}
@@ -2399,7 +2420,7 @@
bool MPEG4Writer::Track::isExifData(
MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
- if (!mIsHeic) {
+ if (!mIsHeif) {
return false;
}
@@ -2428,12 +2449,12 @@
}
void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
- CHECK(!mIsHeic);
+ CHECK(!mIsHeif);
mCo64TableEntries->add(hton64(offset));
}
void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
- CHECK(mIsHeic);
+ CHECK(mIsHeif);
if (offset > UINT32_MAX || size > UINT32_MAX) {
ALOGE("offset or size is out of range: %lld, %lld",
@@ -2479,8 +2500,10 @@
if (mProperties.empty()) {
mProperties.push_back(mOwner->addProperty_l({
- .type = FOURCC('h', 'v', 'c', 'C'),
- .hvcc = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
+ .type = static_cast<uint32_t>(mIsAvif ?
+ FOURCC('a', 'v', '1', 'C') :
+ FOURCC('h', 'v', 'c', 'C')),
+ .data = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
}));
mProperties.push_back(mOwner->addProperty_l({
@@ -2500,7 +2523,7 @@
mTileIndex++;
if (hasGrid) {
mDimgRefs.value.push_back(mOwner->addItem_l({
- .itemType = "hvc1",
+ .itemType = mIsAvif ? "av01" : "hvc1",
.itemId = mItemIdBase++,
.isPrimary = false,
.isHidden = true,
@@ -2536,7 +2559,7 @@
}
} else {
mImageItemId = mOwner->addItem_l({
- .itemType = "hvc1",
+ .itemType = mIsAvif ? "av01" : "hvc1",
.itemId = mItemIdBase++,
.isPrimary = (mIsPrimary != 0),
.isHidden = false,
@@ -2553,7 +2576,7 @@
// it affects the 'dimg' refs for tiled image, as we only have the refs after the
// last tile sample is written.
void MPEG4Writer::Track::flushItemRefs() {
- CHECK(mIsHeic);
+ CHECK(mIsHeif);
if (mImageItemId > 0) {
mOwner->addRefs_l(mImageItemId, mDimgRefs);
@@ -2654,7 +2677,8 @@
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
mMeta->findData(kKeyHVCC, &type, &data, &size);
- } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
+ !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
mMeta->findData(kKeyAV1C, &type, &data, &size);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
getDolbyVisionProfile();
@@ -2766,7 +2790,7 @@
size_t bytesWritten;
off64_t offset = addSample_l(*it, usePrefix, tiffHdrOffset, &bytesWritten);
- if (chunk->mTrack->isHeic()) {
+ if (chunk->mTrack->isHeif()) {
chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten, isExif);
} else if (isFirstSample) {
chunk->mTrack->addChunkOffset(offset);
@@ -2918,11 +2942,11 @@
mStartTimeRealUs = startTimeUs;
int32_t rotationDegrees;
- if ((mIsVideo || mIsHeic) && params &&
+ if ((mIsVideo || mIsHeif) && params &&
params->findInt32(kKeyRotation, &rotationDegrees)) {
mRotation = rotationDegrees;
}
- if (mIsHeic) {
+ if (mIsHeif) {
// Reserve the item ids, so that the item ids are ordered in the same
// order that the image tracks are added.
// If we leave the item ids to be assigned when the sample is written out,
@@ -3598,7 +3622,7 @@
}
// Per-frame metadata sample's size must be smaller than max allowed.
- if (!mIsVideo && !mIsAudio && !mIsHeic &&
+ if (!mIsVideo && !mIsAudio && !mIsHeif &&
buffer->range_length() >= kMaxMetadataSize) {
ALOGW("Buffer size is %zu. Maximum metadata buffer size is %lld for %s track",
buffer->range_length(), (long long)kMaxMetadataSize, trackName);
@@ -3722,7 +3746,7 @@
mGotStartKeyFrame = true;
}
////////////////////////////////////////////////////////////////////////////////
- if (!mIsHeic) {
+ if (!mIsHeif) {
if (mStszTableEntries->count() == 0) {
mFirstSampleTimeRealUs = systemTime() / 1000;
if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
@@ -3942,7 +3966,7 @@
off64_t offset = mOwner->addSample_l(
copy, usePrefix, tiffHdrOffset, &bytesWritten);
- if (mIsHeic) {
+ if (mIsHeif) {
addItemOffsetAndSize(offset, bytesWritten, isExif);
} else {
if (mCo64TableEntries->count() == 0) {
@@ -3955,7 +3979,7 @@
}
mChunkSamples.push_back(copy);
- if (mIsHeic) {
+ if (mIsHeif) {
bufferChunk(0 /*timestampUs*/);
++nChunks;
} else if (interleaveDurationUs == 0) {
@@ -3993,7 +4017,7 @@
// Add final entries only for non-empty tracks.
if (mStszTableEntries->count() > 0) {
- if (mIsHeic) {
+ if (mIsHeif) {
if (!mChunkSamples.empty()) {
bufferChunk(0);
++nChunks;
@@ -4066,7 +4090,7 @@
mOwner->mStartMeta->findInt32(kKeyEmptyTrackMalFormed, &emptyTrackMalformed) &&
emptyTrackMalformed) {
// MediaRecorder(sets kKeyEmptyTrackMalFormed by default) report empty tracks as malformed.
- if (!mIsHeic && mStszTableEntries->count() == 0) { // no samples written
+ if (!mIsHeif && mStszTableEntries->count() == 0) { // no samples written
ALOGE("The number of recorded samples is 0");
mIsMalformed = true;
return true;
@@ -4229,7 +4253,7 @@
int32_t MPEG4Writer::Track::getMetaSizeIncrease(
int32_t angle, int32_t trackCount) const {
- CHECK(mIsHeic);
+ CHECK(mIsHeif);
int32_t grid = (mTileWidth > 0);
int32_t rotate = (angle > 0);
@@ -4281,7 +4305,8 @@
!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
- !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
+ !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
+ !strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
if (!mCodecSpecificData ||
mCodecSpecificDataSize <= 0) {
ALOGE("Missing codec specific data");
@@ -4300,7 +4325,7 @@
const char *MPEG4Writer::Track::getTrackType() const {
return mIsAudio ? "Audio" :
mIsVideo ? "Video" :
- mIsHeic ? "Image" :
+ mIsHeif ? "Image" :
"Metadata";
}
@@ -5413,7 +5438,7 @@
case FOURCC('h', 'v', 'c', 'C'):
{
beginBox("hvcC");
- sp<ABuffer> hvcc = mProperties[propIndex].hvcc;
+ sp<ABuffer> hvcc = mProperties[propIndex].data;
// Patch avcc's lengthSize field to match the number
// of bytes we use to indicate the size of a nal unit.
uint8_t *ptr = (uint8_t *)hvcc->data();
@@ -5422,6 +5447,14 @@
endBox();
break;
}
+ case FOURCC('a', 'v', '1', 'C'):
+ {
+ beginBox("av1C");
+ sp<ABuffer> av1c = mProperties[propIndex].data;
+ write(av1c->data(), av1c->size());
+ endBox();
+ break;
+ }
case FOURCC('i', 's', 'p', 'e'):
{
beginBox("ispe");
@@ -5525,7 +5558,7 @@
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
- if ((*it)->isHeic()) {
+ if ((*it)->isHeif()) {
(*it)->flushItemRefs();
}
}
diff --git a/media/libstagefright/MediaAppender.cpp b/media/libstagefright/MediaAppender.cpp
index 21dcfa1..2d9c651 100644
--- a/media/libstagefright/MediaAppender.cpp
+++ b/media/libstagefright/MediaAppender.cpp
@@ -308,7 +308,11 @@
ALOGE("MediaAppender::start() is called in invalid state %d", mState);
return INVALID_OPERATION;
}
- mMuxer = new (std::nothrow) MediaMuxer(mFd, mFormat);
+ mMuxer = MediaMuxer::create(mFd, mFormat);
+ if (mMuxer == nullptr) {
+ ALOGE("MediaMuxer::create failed");
+ return INVALID_OPERATION;
+ }
for (const auto& n : mFmtIndexMap) {
ssize_t muxIndex = mMuxer->addTrack(n.second);
if (muxIndex < 0) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c963e19..ad6e36a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3355,8 +3355,8 @@
MediaCodecInfo::Attributes attr = mCodecInfo
? mCodecInfo->getAttributes()
: MediaCodecInfo::Attributes(0);
- if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
- // software codec is currently ignored.
+ if (mDomain == DOMAIN_VIDEO || !(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
+ // software audio codecs are currently ignored.
mResourceManagerProxy->addResource(MediaResource::CodecResource(
mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
}
@@ -5289,7 +5289,7 @@
MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
- List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
+ std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
if (availBuffers->empty()) {
return nullptr;
@@ -5306,7 +5306,7 @@
return -EAGAIN;
}
- List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
+ std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
size_t index = *availBuffers->begin();
CHECK_EQ(info, &mPortBuffers[portIndex][index]);
availBuffers->erase(availBuffers->begin());
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index a3040f4..78b7288 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -354,8 +354,19 @@
//static
void MediaCodecList::findMatchingCodecs(
- const char *mime, bool encoder, uint32_t flags, sp<AMessage> format,
+ const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
Vector<AString> *matches) {
+ findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ true);
+ if (matches->empty()) {
+ ALOGV("no matching codec found, retrying without profile check");
+ findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ false);
+ }
+}
+
+//static
+void MediaCodecList::findMatchingCodecs(
+ const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
+ Vector<AString> *matches, bool checkProfile) {
matches->clear();
const sp<IMediaCodecList> list = getInstance();
@@ -379,7 +390,7 @@
AString componentName = info->getCodecName();
- if (!codecHandlesFormat(mime, info, format)) {
+ if (!codecHandlesFormat(mime, info, format, checkProfile)) {
ALOGV("skipping codec '%s' which doesn't satisfy format %s",
componentName.c_str(), format->debugString(2).c_str());
continue;
@@ -400,9 +411,10 @@
}
}
-/*static*/
-bool MediaCodecList::codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info,
- sp<AMessage> format) {
+// static
+bool MediaCodecList::codecHandlesFormat(
+ const char *mime, const sp<MediaCodecInfo> &info, const sp<AMessage> &format,
+ bool checkProfile) {
if (format == nullptr) {
ALOGD("codecHandlesFormat: no format, so no extra checks");
@@ -510,9 +522,7 @@
}
int32_t profile = -1;
- if (format->findInt32("profile", &profile)) {
- int32_t level = -1;
- format->findInt32("level", &level);
+ if (checkProfile && format->findInt32("profile", &profile)) {
Vector<MediaCodecInfo::ProfileLevel> profileLevels;
capabilities->getSupportedProfileLevels(&profileLevels);
auto it = profileLevels.begin();
@@ -520,14 +530,11 @@
if (profile != it->mProfile) {
continue;
}
- if (level > -1 && level > it->mLevel) {
- continue;
- }
break;
}
if (it == profileLevels.end()) {
- ALOGV("Codec does not support profile %d with level %d", profile, level);
+ ALOGV("Codec does not support profile %d", profile);
return false;
}
}
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index a946f71..9768f97 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -46,6 +46,30 @@
format == MediaMuxer::OUTPUT_FORMAT_HEIF;
}
+MediaMuxer* MediaMuxer::create(int fd, OutputFormat format) {
+ bool isInputValid = true;
+ if (isMp4Format(format)) {
+ isInputValid = MPEG4Writer::isFdOpenModeValid(fd);
+ } else if (format == OUTPUT_FORMAT_WEBM) {
+ isInputValid = WebmWriter::isFdOpenModeValid(fd);
+ } else if (format == OUTPUT_FORMAT_OGG) {
+ isInputValid = OggWriter::isFdOpenModeValid(fd);
+ } else {
+ ALOGE("MediaMuxer does not support output format %d", format);
+ return nullptr;
+ }
+ if (!isInputValid) {
+ ALOGE("File descriptor is not suitable for format %d", format);
+ return nullptr;
+ }
+
+ MediaMuxer *muxer = new (std::nothrow) MediaMuxer(fd, (MediaMuxer::OutputFormat)format);
+ if (muxer == nullptr) {
+ ALOGE("Failed to create writer object");
+ }
+ return muxer;
+}
+
MediaMuxer::MediaMuxer(int fd, OutputFormat format)
: mFormat(format),
mState(UNINITIALIZED) {
@@ -138,7 +162,7 @@
return INVALID_OPERATION;
}
if (!isMp4Format(mFormat)) {
- ALOGE("setLocation() is only supported for .mp4, .3gp or .heic output.");
+ ALOGE("setLocation() is only supported for .mp4, .3gp, .heic or .avif output.");
return INVALID_OPERATION;
}
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 6893324..0536f2a 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -20,7 +20,7 @@
#include <media/stagefright/NuMediaExtractor.h>
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
#include <datasource/DataSourceFactory.h>
#include <datasource/FileSource.h>
@@ -72,6 +72,37 @@
}
}
+status_t NuMediaExtractor::initMediaExtractor(const sp<DataSource>& dataSource) {
+ status_t err = OK;
+
+ mImpl = MediaExtractorFactory::Create(dataSource);
+ if (mImpl == NULL) {
+ ALOGE("%s: failed to create MediaExtractor", __FUNCTION__);
+ return ERROR_UNSUPPORTED;
+ }
+
+ setEntryPointToRemoteMediaExtractor();
+
+ if (!mCasToken.empty()) {
+ err = mImpl->setMediaCas(mCasToken);
+ if (err != OK) {
+ ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
+ return err;
+ }
+ }
+
+ // Get the name of the implementation.
+ mName = mImpl->name();
+
+ // Update the duration and bitrate
+ err = updateDurationAndBitrate();
+ if (err == OK) {
+ mDataSource = dataSource;
+ }
+
+ return OK;
+}
+
status_t NuMediaExtractor::setDataSource(
const sp<MediaHTTPService> &httpService,
const char *path,
@@ -89,28 +120,8 @@
return -ENOENT;
}
- mImpl = MediaExtractorFactory::Create(dataSource);
-
- if (mImpl == NULL) {
- return ERROR_UNSUPPORTED;
- }
- setEntryPointToRemoteMediaExtractor();
-
- status_t err = OK;
- if (!mCasToken.empty()) {
- err = mImpl->setMediaCas(mCasToken);
- if (err != OK) {
- ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
- return err;
- }
- }
-
- err = updateDurationAndBitrate();
- if (err == OK) {
- mDataSource = dataSource;
- }
-
- return OK;
+ // Initialize MediaExtractor using the data source
+ return initMediaExtractor(dataSource);
}
status_t NuMediaExtractor::setDataSource(int fd, off64_t offset, off64_t size) {
@@ -131,27 +142,8 @@
return err;
}
- mImpl = MediaExtractorFactory::Create(fileSource);
-
- if (mImpl == NULL) {
- return ERROR_UNSUPPORTED;
- }
- setEntryPointToRemoteMediaExtractor();
-
- if (!mCasToken.empty()) {
- err = mImpl->setMediaCas(mCasToken);
- if (err != OK) {
- ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
- return err;
- }
- }
-
- err = updateDurationAndBitrate();
- if (err == OK) {
- mDataSource = fileSource;
- }
-
- return OK;
+ // Initialize MediaExtractor using the file source
+ return initMediaExtractor(fileSource);
}
status_t NuMediaExtractor::setDataSource(const sp<DataSource> &source) {
@@ -166,32 +158,13 @@
return err;
}
- mImpl = MediaExtractorFactory::Create(source);
-
- if (mImpl == NULL) {
- return ERROR_UNSUPPORTED;
- }
- setEntryPointToRemoteMediaExtractor();
-
- if (!mCasToken.empty()) {
- err = mImpl->setMediaCas(mCasToken);
- if (err != OK) {
- ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
- return err;
- }
- }
-
- err = updateDurationAndBitrate();
- if (err == OK) {
- mDataSource = source;
- }
-
- return err;
+ // Initialize MediaExtractor using the given data source
+ return initMediaExtractor(source);
}
const char* NuMediaExtractor::getName() const {
Mutex::Autolock autoLock(mLock);
- return mImpl == nullptr ? nullptr : mImpl->name().string();
+ return mImpl == nullptr ? nullptr : mName.string();
}
static String8 arrayToString(const std::vector<uint8_t> &array) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 92bef2e..c5b5199 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -24,7 +24,7 @@
#include <utility>
#include <vector>
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
#include "include/HevcUtils.h"
#include <cutils/properties.h>
@@ -796,6 +796,8 @@
{ "valid-samples", kKeyValidSamples },
{ "dvb-component-tag", kKeyDvbComponentTag},
{ "dvb-audio-description", kKeyDvbAudioDescription},
+ { "dvb-teletext-magazine-number", kKeyDvbTeletextMagazineNumber},
+ { "dvb-teletext-page-number", kKeyDvbTeletextPageNumber},
}
};
@@ -1014,6 +1016,16 @@
msg->setInt32("dvb-audio-description", dvbAudioDescription);
}
+ int32_t dvbTeletextMagazineNumber = 0;
+ if (meta->findInt32(kKeyDvbTeletextMagazineNumber, &dvbTeletextMagazineNumber)) {
+ msg->setInt32("dvb-teletext-magazine-number", dvbTeletextMagazineNumber);
+ }
+
+ int32_t dvbTeletextPageNumber = 0;
+ if (meta->findInt32(kKeyDvbTeletextPageNumber, &dvbTeletextPageNumber)) {
+ msg->setInt32("dvb-teletext-page-number", dvbTeletextPageNumber);
+ }
+
const char *lang;
if (meta->findCString(kKeyMediaLanguage, &lang)) {
msg->setString("language", lang);
@@ -1810,6 +1822,16 @@
meta->setInt32(kKeyDvbAudioDescription, dvbAudioDescription);
}
+ int32_t dvbTeletextMagazineNumber = 0;
+ if (msg->findInt32("dvb-teletext-magazine-number", &dvbTeletextMagazineNumber)) {
+ meta->setInt32(kKeyDvbTeletextMagazineNumber, dvbTeletextMagazineNumber);
+ }
+
+ int32_t dvbTeletextPageNumber = 0;
+ if (msg->findInt32("dvb-teletext-page-number", &dvbTeletextPageNumber)) {
+ meta->setInt32(kKeyDvbTeletextPageNumber, dvbTeletextPageNumber);
+ }
+
AString lang;
if (msg->findString("language", &lang)) {
meta->setCString(kKeyMediaLanguage, lang.c_str());
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
new file mode 100644
index 0000000..76b054a
--- /dev/null
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libstagefright_colorconversion_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: [
+ "frameworks_av_media_libstagefright_colorconversion_license",
+ ],
+}
+
+cc_defaults {
+ name: "libcolorconversion_fuzzer_defaults",
+ static_libs: [
+ "libyuv_static",
+ "libstagefright_color_conversion",
+ "libstagefright",
+ "liblog",
+ ],
+ header_libs: [
+ "libstagefright_headers",
+ "libgui_headers",
+ ],
+ shared_libs: [
+ "libui",
+ "libnativewindow",
+ "libstagefright_codecbase",
+ "libstagefright_foundation",
+ "libutils",
+ "libgui",
+ "libbinder",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
+
+cc_fuzz {
+ name: "color_conversion_fuzzer",
+ srcs: [
+ "color_conversion_fuzzer.cpp",
+ ],
+ defaults: [
+ "libcolorconversion_fuzzer_defaults",
+ ],
+}
diff --git a/media/libstagefright/colorconversion/fuzzer/README.md b/media/libstagefright/colorconversion/fuzzer/README.md
new file mode 100644
index 0000000..220f749
--- /dev/null
+++ b/media/libstagefright/colorconversion/fuzzer/README.md
@@ -0,0 +1,28 @@
+# Fuzzers for libstagefright_color_conversion
+
+## Table of contents
++ [color_conversion_fuzzer](#ColorConversion)
+
+
+# <a name="ColorConversion"></a> Fuzzer for Colorconversion
+
+ColorConversion supports the following parameters:
+1. SrcColorFormatType (parameter name: "kSrcFormatType")
+2. DstColorFormatType (parameter name: "kDstFormatType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kSrcFormatType`| 0. `OMX_COLOR_FormatYUV420Planar`<br/>1. `OMX_COLOR_FormatYUV420Planar16`<br/>2. `OMX_COLOR_FormatYUV420SemiPlanar` <br/>3. `OMX_TI_COLOR_FormatYUV420PackedSemiPlanar` <br/>4.`OMX_COLOR_FormatCbYCrY`<br/>5.`OMX_QCOM_COLOR_FormatYVU420SemiPlanar`<br/>6.`COLOR_FormatYUVP010`|Value obtained from FuzzedDataProvider|
+|`kDstFormatType`| 0. `OMX_COLOR_Format16bitRGB565`<br/>1. `OMX_COLOR_Format32BitRGBA8888`<br/>2. `OMX_COLOR_Format32bitBGRA8888` <br/>3. `OMX_COLOR_Format16bitRGB565` <br/>4. `OMX_COLOR_Format32bitBGRA8888`<br/>5.`OMX_COLOR_FormatYUV444Y410`<br/>6. `COLOR_Format32bitABGR2101010`|Value obtained from FuzzedDataProvider|
+
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) color_conversion_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/color_conversion_fuzzer/color_conversion_fuzzer
+```
diff --git a/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp b/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
new file mode 100644
index 0000000..7c2bfe5
--- /dev/null
+++ b/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <iostream>
+#include <vector>
+#include "fuzzer/FuzzedDataProvider.h"
+
+using namespace android;
+using ::android::sp;
+
+static constexpr int32_t kMinFrameSize = 2;
+static constexpr int32_t kMaxFrameSize = 8192;
+
+static constexpr int32_t kSrcFormatType[] = {OMX_COLOR_FormatYUV420Planar,
+ OMX_COLOR_FormatYUV420Planar16,
+ OMX_COLOR_FormatYUV420SemiPlanar,
+ OMX_TI_COLOR_FormatYUV420PackedSemiPlanar,
+ OMX_COLOR_FormatCbYCrY,
+ OMX_QCOM_COLOR_FormatYVU420SemiPlanar,
+ COLOR_FormatYUVP010};
+
+static constexpr int32_t kDstFormatType[] = {
+ OMX_COLOR_Format16bitRGB565, OMX_COLOR_Format32BitRGBA8888, OMX_COLOR_Format32bitBGRA8888,
+ OMX_COLOR_FormatYUV444Y410, COLOR_Format32bitABGR2101010};
+
+class ColorConversionFuzzer {
+ public:
+ ColorConversionFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ void process();
+
+ private:
+ FuzzedDataProvider mFdp;
+ int32_t getFrameSize(OMX_COLOR_FORMATTYPE colorFormat, int32_t stride, int32_t height);
+ bool isValidFormat(OMX_COLOR_FORMATTYPE srcFormat, OMX_COLOR_FORMATTYPE dstFormat);
+};
+
+int32_t ColorConversionFuzzer::getFrameSize(OMX_COLOR_FORMATTYPE colorFormat, int32_t stride,
+ int32_t height) {
+ int32_t frameSize;
+ switch ((int32_t)colorFormat) {
+ case OMX_COLOR_Format16bitRGB565: {
+ frameSize = 2 * stride * height;
+ break;
+ }
+ case OMX_COLOR_FormatYUV420Planar16:
+ case COLOR_FormatYUVP010:
+ case OMX_COLOR_FormatYUV444Y410: {
+ frameSize = 3 * stride * height;
+ break;
+ }
+ case OMX_COLOR_Format32bitBGRA8888:
+ case OMX_COLOR_Format32BitRGBA8888:
+ case COLOR_Format32bitABGR2101010: {
+ frameSize = 4 * stride * height;
+ break;
+ }
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ case OMX_COLOR_FormatCbYCrY:
+ case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ default: {
+ frameSize = stride * height + 2 * (((stride + 1) / 2) * ((height + 1) / 2));
+ break;
+ }
+ }
+ return frameSize;
+}
+
+void ColorConversionFuzzer::process() {
+ OMX_COLOR_FORMATTYPE srcColorFormat =
+ static_cast<OMX_COLOR_FORMATTYPE>(mFdp.PickValueInArray(kSrcFormatType));
+ OMX_COLOR_FORMATTYPE dstColorFormat =
+ static_cast<OMX_COLOR_FORMATTYPE>(mFdp.PickValueInArray(kDstFormatType));
+ std::unique_ptr<ColorConverter> converter(new ColorConverter(srcColorFormat, dstColorFormat));
+ if (converter->isValid()) {
+ int32_t srcLeft, srcTop, srcRight, srcBottom, width, height, stride;
+ width = mFdp.ConsumeIntegralInRange<int32_t>(kMinFrameSize, kMaxFrameSize);
+ height = mFdp.ConsumeIntegralInRange<int32_t>(kMinFrameSize, kMaxFrameSize);
+ stride = mFdp.ConsumeIntegralInRange<int32_t>(width, 2 * kMaxFrameSize);
+
+ srcLeft = mFdp.ConsumeIntegralInRange<int32_t>(0, width - 1);
+ srcTop = mFdp.ConsumeIntegralInRange<int32_t>(0, height - 1);
+ srcRight = mFdp.ConsumeIntegralInRange<int32_t>(srcLeft, width - 1);
+ srcBottom = mFdp.ConsumeIntegralInRange<int32_t>(srcTop, height - 1);
+
+ int32_t dstLeft, dstTop, dstRight, dstBottom;
+ dstLeft = mFdp.ConsumeIntegralInRange<int32_t>(0, width - 1);
+ dstTop = mFdp.ConsumeIntegralInRange<int32_t>(0, height - 1);
+ dstRight = mFdp.ConsumeIntegralInRange<int32_t>(dstLeft, width - 1);
+ dstBottom = mFdp.ConsumeIntegralInRange<int32_t>(dstTop, height - 1);
+
+ int32_t srcFrameSize = getFrameSize(srcColorFormat, stride, height);
+ int32_t dstFrameSize = getFrameSize(dstColorFormat, stride, height);
+ std::vector<uint8_t> srcFrame(srcFrameSize), dstFrame(dstFrameSize);
+ mFdp.ConsumeData(srcFrame.data(), srcFrameSize);
+ converter->convert(srcFrame.data(), width, height, stride, srcLeft, srcTop, srcRight,
+ srcBottom, dstFrame.data(), width, height, stride, dstLeft, dstTop,
+ dstRight, dstBottom);
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ ColorConversionFuzzer colorConversionFuzzer(data, size);
+ colorConversionFuzzer.process();
+ return 0;
+}
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index 829f403..2c258e4 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -118,5 +118,13 @@
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
+ <MediaCodec name="c2.android.av1.encoder" type="video/av01">
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-3600" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ </MediaCodec>
</Encoders>
</Included>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d667685..c966a3d 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -321,5 +321,13 @@
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
+ <MediaCodec name="c2.android.av1.encoder" type="video/av01" variant="!slow-cpu">
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-3600" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ </MediaCodec>
</Encoders>
</MediaCodecs>
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 5a21755..38a4c1e 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -18,6 +18,8 @@
#define A_CODEC_H_
#include <stdint.h>
+#include <list>
+#include <vector>
#include <android/native_window.h>
#include <media/hardware/MetadataBufferType.h>
#include <media/MediaCodecInfo.h>
@@ -265,11 +267,11 @@
sp<AMessage> mBaseOutputFormat;
FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
- Vector<BufferInfo> mBuffers[2];
+ std::vector<BufferInfo> mBuffers[2];
bool mPortEOS[2];
status_t mInputEOSResult;
- List<sp<AMessage> > mDeferredQueue;
+ std::list<sp<AMessage>> mDeferredQueue;
sp<AMessage> mLastOutputFormat;
bool mIsVideo;
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 7c3eca6..cf76606 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -196,7 +196,9 @@
typedef key_value_pair_t< const char *, Vector<uint16_t> > ItemRefs;
typedef struct _ItemInfo {
bool isGrid() const { return !strcmp("grid", itemType); }
- bool isImage() const { return !strcmp("hvc1", itemType) || isGrid(); }
+ bool isImage() const {
+ return !strcmp("hvc1", itemType) || !strcmp("av01", itemType) || isGrid();
+ }
const char *itemType;
uint16_t itemId;
bool isPrimary;
@@ -224,10 +226,11 @@
int32_t width;
int32_t height;
int32_t rotation;
- sp<ABuffer> hvcc;
+ sp<ABuffer> data;
} ItemProperty;
bool mHasFileLevelMeta;
+ bool mIsAvif; // used to differentiate HEIC and AVIF under the same OUTPUT_FORMAT_HEIF
uint64_t mFileLevelMetaDataSize;
bool mHasMoovBox;
uint32_t mPrimaryItemId;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 1d2d711..6f6a4e6 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -18,6 +18,7 @@
#define MEDIA_CODEC_H_
+#include <list>
#include <memory>
#include <vector>
@@ -483,7 +484,7 @@
// stop/flush/reset/release.
Mutex mBufferLock;
- List<size_t> mAvailPortBuffers[2];
+ std::list<size_t> mAvailPortBuffers[2];
std::vector<BufferInfo> mPortBuffers[2];
int32_t mDequeueInputTimeoutGeneration;
@@ -501,7 +502,7 @@
sp<IDescrambler> mDescrambler;
- List<sp<ABuffer> > mCSD;
+ std::list<sp<ABuffer> > mCSD;
sp<AMessage> mActivityNotify;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index 3cf455c..56c6a45 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -80,11 +80,9 @@
const char *mime,
bool createEncoder,
uint32_t flags,
- sp<AMessage> format,
+ const sp<AMessage> &format,
Vector<AString> *matchingCodecs);
- static bool codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info, sp<AMessage> format);
-
static bool isSoftwareCodec(const AString &componentName);
private:
@@ -115,6 +113,20 @@
MediaCodecList(const MediaCodecList&) = delete;
MediaCodecList& operator=(const MediaCodecList&) = delete;
+
+ static void findMatchingCodecs(
+ const char *mime,
+ bool createEncoder,
+ uint32_t flags,
+ const sp<AMessage> &format,
+ Vector<AString> *matchingCodecs,
+ bool checkProfile);
+
+ static bool codecHandlesFormat(
+ const char *mime,
+ const sp<MediaCodecInfo> &info,
+ const sp<AMessage> &format,
+ bool checkProfile);
};
} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index e97a65e..33aaf11 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -48,9 +48,13 @@
// deleting the output file after stop.
struct MediaMuxer : public MediaMuxerBase {
public:
- // Construct the muxer with the file descriptor. Note that the MediaMuxer
- // will close this file at stop().
- MediaMuxer(int fd, OutputFormat format);
+ /**
+ * Creates the muxer for a given output format.
+ * @param fd : file descriptor of the output file.
+ * @param format : output format of the muxer. e.g.: webm/mp4/ogg
+ * @return writer's object or nullptr if error.
+ */
+ static MediaMuxer* create(int fd, OutputFormat format);
virtual ~MediaMuxer();
@@ -127,6 +131,11 @@
sp<AMessage> getTrackFormat(size_t idx);
private:
+ // Construct the muxer with the file descriptor. Note that the MediaMuxer
+ // will close this file at stop().
+ // This constructor is made private to ensure that MediaMuxer::create() is used instead.
+ MediaMuxer(int fd, OutputFormat format);
+
const OutputFormat mFormat;
sp<MediaWriter> mWriter;
Vector< sp<MediaAdapter> > mTrackList; // Each track has its MediaAdapter.
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 9f20185..2b14811 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -31,6 +31,29 @@
mMaxFileDurationLimitUs(0) {
}
+ // Returns true if the file descriptor is opened using a mode
+ // which meets minimum writer/muxer requirements.
+ static bool isFdOpenModeValid(int fd) {
+ // check for invalid file descriptor.
+ int flags = fcntl(fd, F_GETFL);
+ if (flags == -1) {
+ ALOGE("Invalid File Status Flags and/or mode : %d", flags);
+ return false;
+ }
+ // fd must be in read-write mode or write-only mode.
+ if ((flags & (O_RDWR | O_WRONLY)) == 0) {
+ ALOGE("File must be writable");
+ return false;
+ }
+ // Verify fd is seekable
+ off64_t off = lseek64(fd, 0, SEEK_SET);
+ if (off < 0) {
+ ALOGE("File descriptor is not seekable");
+ return false;
+ }
+ return true;
+ }
+
virtual status_t addSource(const sp<MediaSource> &source) = 0;
virtual bool reachedEOS() = 0;
virtual status_t start(MetaData *params = NULL) = 0;
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index cdf8d35..33f224c 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -283,6 +283,12 @@
// DVB audio description
kKeyDvbAudioDescription = 'addt', // bool (int32_t), DVB audio description only defined for
// audio component
+
+ // DVB teletext magazine number
+ kKeyDvbTeletextMagazineNumber = 'ttxm', // int32_t, DVB teletext magazine number
+
+ // DVB teletext page number
+ kKeyDvbTeletextPageNumber = 'ttxp', // int32_t, DVB teletext page number
};
enum {
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index d17a480..52ea28b 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -146,6 +146,7 @@
Vector<TrackInfo> mSelectedTracks;
int64_t mTotalBitrate; // in bits/sec
int64_t mDurationUs;
+ String8 mName;
void setEntryPointToRemoteMediaExtractor();
@@ -165,6 +166,7 @@
bool getTotalBitrate(int64_t *bitRate) const;
status_t updateDurationAndBitrate();
status_t appendVorbisNumPageSamples(MediaBufferBase *mbuf, const sp<ABuffer> &buffer);
+ status_t initMediaExtractor(const sp<DataSource>& dataSource);
DISALLOW_EVIL_CONSTRUCTORS(NuMediaExtractor);
};
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bebd516..c82a303 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -316,7 +316,7 @@
// that a new message is available on the queue. Otherwise, the message stays on the queue, but
// the listener is not notified of it. It will process this message when a subsequent message
// is posted with |realTime| set to true.
- void post(const omx_message &msg, bool realTime = true);
+ void post(const omx_message &msg, bool realTime);
bool loop();
@@ -325,18 +325,15 @@
private:
enum {
- // This is used for frame_rendered message batching, which will eventually end up in a
- // single AMessage in MediaCodec when it is signaled to the app. AMessage can contain
- // up-to 64 key-value pairs, and each frame_rendered message uses 2 keys, so the max
- // value for this would be 32. Nonetheless, limit this to 12 to which gives at least 10
- // mseconds of batching at 120Hz.
- kMaxQueueSize = 12,
+ // Don't delay non-realtime messages longer than 200ms
+ kMaxBatchedDelayNs = 200 * 1000 * 1000,
};
Mutex mLock;
sp<OMXNodeInstance> const mOwner;
bool mDone;
+ bool mHasBatchedMessages;
Condition mQueueChanged;
std::list<omx_message> mQueue;
@@ -350,7 +347,8 @@
OMXNodeInstance::CallbackDispatcher::CallbackDispatcher(const sp<OMXNodeInstance> &owner)
: mOwner(owner),
- mDone(false) {
+ mDone(false),
+ mHasBatchedMessages(false) {
mThread = new CallbackDispatcherThread(this);
mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
}
@@ -358,7 +356,6 @@
OMXNodeInstance::CallbackDispatcher::~CallbackDispatcher() {
{
Mutex::Autolock autoLock(mLock);
-
mDone = true;
mQueueChanged.signal();
}
@@ -377,8 +374,11 @@
Mutex::Autolock autoLock(mLock);
mQueue.push_back(msg);
- if (realTime || mQueue.size() >= kMaxQueueSize) {
+ if (realTime) {
mQueueChanged.signal();
+ } else if (!mHasBatchedMessages) {
+ mHasBatchedMessages = true;
+ mQueueChanged.signal(); // The first non-realtime message is not batched.
}
}
@@ -393,11 +393,16 @@
bool OMXNodeInstance::CallbackDispatcher::loop() {
for (;;) {
std::list<omx_message> messages;
+ std::list<long long> messageTimestamps;
{
Mutex::Autolock autoLock(mLock);
while (!mDone && mQueue.empty()) {
- mQueueChanged.wait(mLock);
+ if (mHasBatchedMessages) {
+ mQueueChanged.waitRelative(mLock, kMaxBatchedDelayNs);
+ } else {
+ mQueueChanged.wait(mLock);
+ }
}
if (mDone) {
@@ -2447,7 +2452,7 @@
msg.type = omx_message::EMPTY_BUFFER_DONE;
msg.fenceFd = fenceFd;
msg.u.buffer_data.buffer = instance->findBufferID(pBuffer);
- instance->mDispatcher->post(msg);
+ instance->mDispatcher->post(msg, true /* realTime */);
return OMX_ErrorNone;
}
@@ -2475,7 +2480,7 @@
msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
msg.u.extended_buffer_data.flags = pBuffer->nFlags;
msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
- instance->mDispatcher->post(msg);
+ instance->mDispatcher->post(msg, true /* realTime */);
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index ddf797c..88f7be7 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -332,6 +332,11 @@
}
bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ ALOGE("b/230630526 buffer->size() == 0");
+ android_errorWriteLog(0x534e4554, "230630526");
+ return false;
+ }
const uint8_t *data = buffer->data();
unsigned nalType = data[0] & 0x1f;
if (!mFirstIFrameProvided && nalType < 0x5) {
@@ -624,8 +629,7 @@
int32_t firstSeqNo = buffer->int32Data();
// This only works for FU-A type & non-start sequence
- int32_t nalType = buffer->size() >= 1 ? buffer->data()[0] & 0x1f : -1;
- if (nalType != 28 || (buffer->size() >= 2 && buffer->data()[1] & 0x80)) {
+ if (buffer->size() < 2 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[1] & 0x80) {
return firstSeqNo;
}
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index bb42d1f..72dd981 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -629,13 +629,13 @@
int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ CHECK(!queue->empty());
// pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
int32_t firstSeqNo = buffer->int32Data();
// This only works for FU-A type & non-start sequence
- unsigned nalType = buffer->data()[0] & 0x1f;
- if (nalType != 28 || buffer->data()[2] & 0x80) {
+ if (buffer->size() < 3 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[2] & 0x80) {
return firstSeqNo;
}
@@ -645,7 +645,7 @@
if (rtpTime + jit >= play) {
break;
}
- if ((data[2] & 0x80)) {
+ if (it->size() >= 3 && (data[2] & 0x80)) {
const int32_t seqNo = it->int32Data();
ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
firstSeqNo = seqNo;
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 5df3267..70d73c8 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -52,7 +52,10 @@
MediaMuxer::OutputFormat format =
(MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
- sp<MediaMuxer> mMuxer(new MediaMuxer(fd, format));
+ sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
+ if (mMuxer == nullptr) {
+ return 0;
+ }
while (fdp.remaining_bytes() > 1) {
switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 5eaadbd..3823c36 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -54,6 +54,19 @@
static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
+bool WebmWriter::isFdOpenModeValid(int fd) {
+ // check for invalid file descriptor.
+ if (!MediaWriter::isFdOpenModeValid(fd)) {
+ return false;
+ }
+ int flags = fcntl(fd, F_GETFL);
+ if ((flags & O_RDWR) == 0) {
+ ALOGE("File must be in read-write mode for webm writer");
+ return false;
+ }
+ return true;
+}
+
WebmWriter::WebmWriter(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0 ? NO_INIT : OK),
diff --git a/media/libstagefright/webm/include/webm/WebmWriter.h b/media/libstagefright/webm/include/webm/WebmWriter.h
index ed5bc4c..e339add 100644
--- a/media/libstagefright/webm/include/webm/WebmWriter.h
+++ b/media/libstagefright/webm/include/webm/WebmWriter.h
@@ -36,6 +36,10 @@
class WebmWriter : public MediaWriter {
public:
+ // Returns true if the file descriptor is opened using a mode
+ // which is compatible with WebmWriter.
+ // Note that this overloads that method in the base class.
+ static bool isFdOpenModeValid(int fd);
explicit WebmWriter(int fd);
~WebmWriter() { reset(); }
diff --git a/services/mediacodec/registrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
similarity index 95%
rename from services/mediacodec/registrant/Android.bp
rename to media/module/codecserviceregistrant/Android.bp
index 12cc32a..f3a1723 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -4,7 +4,6 @@
// all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
}
cc_library {
@@ -62,6 +61,7 @@
"libcodec2_soft_vp9dec",
// "libcodec2_soft_av1dec_aom", // replaced by the gav1 implementation
"libcodec2_soft_av1dec_gav1",
+ "libcodec2_soft_av1enc",
"libcodec2_soft_vp8enc",
"libcodec2_soft_vp9enc",
"libcodec2_soft_rawdec",
diff --git a/services/mediacodec/registrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
similarity index 100%
rename from services/mediacodec/registrant/CodecServiceRegistrant.cpp
rename to media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
diff --git a/services/mediacodec/registrant/fuzzer/Android.bp b/media/module/codecserviceregistrant/fuzzer/Android.bp
similarity index 94%
rename from services/mediacodec/registrant/fuzzer/Android.bp
rename to media/module/codecserviceregistrant/fuzzer/Android.bp
index 43afbf1..0b9affd 100644
--- a/services/mediacodec/registrant/fuzzer/Android.bp
+++ b/media/module/codecserviceregistrant/fuzzer/Android.bp
@@ -20,7 +20,6 @@
// all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
}
cc_fuzz {
diff --git a/services/mediacodec/registrant/fuzzer/README.md b/media/module/codecserviceregistrant/fuzzer/README.md
similarity index 100%
rename from services/mediacodec/registrant/fuzzer/README.md
rename to media/module/codecserviceregistrant/fuzzer/README.md
diff --git a/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
similarity index 100%
rename from services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
rename to media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
diff --git a/media/module/esds/Android.bp b/media/module/esds/Android.bp
new file mode 100644
index 0000000..272d4d7
--- /dev/null
+++ b/media/module/esds/Android.bp
@@ -0,0 +1,44 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_static {
+ name: "libstagefright_esds",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ min_sdk_version: "29",
+
+ export_include_dirs: ["include"],
+
+ local_include_dirs: ["include"],
+
+ srcs: ["ESDS.cpp"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+ shared_libs: [
+ "libstagefright_foundation",
+ "libutils"
+ ],
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
diff --git a/media/libstagefright/ESDS.cpp b/media/module/esds/ESDS.cpp
similarity index 99%
rename from media/libstagefright/ESDS.cpp
rename to media/module/esds/ESDS.cpp
index ea059e8..906250b 100644
--- a/media/libstagefright/ESDS.cpp
+++ b/media/module/esds/ESDS.cpp
@@ -20,7 +20,7 @@
#include <media/stagefright/foundation/ByteUtils.h>
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
#include <string.h>
diff --git a/media/libstagefright/include/ESDS.h b/media/module/esds/include/media/esds/ESDS.h
similarity index 100%
rename from media/libstagefright/include/ESDS.h
rename to media/module/esds/include/media/esds/ESDS.h
diff --git a/media/libstagefright/tests/ESDS/Android.bp b/media/module/esds/tests/Android.bp
similarity index 93%
rename from media/libstagefright/tests/ESDS/Android.bp
rename to media/module/esds/tests/Android.bp
index 04e9b29..aea611e 100644
--- a/media/libstagefright/tests/ESDS/Android.bp
+++ b/media/module/esds/tests/Android.bp
@@ -20,9 +20,6 @@
// all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: [
- "frameworks_av_media_libstagefright_tests_license",
- ],
}
cc_test {
diff --git a/media/libstagefright/tests/ESDS/AndroidTest.xml b/media/module/esds/tests/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/tests/ESDS/AndroidTest.xml
rename to media/module/esds/tests/AndroidTest.xml
diff --git a/media/libstagefright/tests/ESDS/ESDSTest.cpp b/media/module/esds/tests/ESDSTest.cpp
similarity index 99%
rename from media/libstagefright/tests/ESDS/ESDSTest.cpp
rename to media/module/esds/tests/ESDSTest.cpp
index 101e00c..ea9a888 100644
--- a/media/libstagefright/tests/ESDS/ESDSTest.cpp
+++ b/media/module/esds/tests/ESDSTest.cpp
@@ -22,7 +22,7 @@
#include <string.h>
#include <fstream>
-#include <ESDS.h>
+#include <media/esds/ESDS.h>
#include <binder/ProcessState.h>
#include <datasource/FileSource.h>
#include <media/stagefright/MediaExtractorFactory.h>
diff --git a/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h b/media/module/esds/tests/ESDSTestEnvironment.h
similarity index 100%
rename from media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
rename to media/module/esds/tests/ESDSTestEnvironment.h
diff --git a/media/libstagefright/tests/ESDS/README.md b/media/module/esds/tests/README.md
similarity index 100%
rename from media/libstagefright/tests/ESDS/README.md
rename to media/module/esds/tests/README.md
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 490e195..b3e34d2 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -173,7 +173,7 @@
],
static_libs: [
- "libwebm",
+ "libwebm_mkvparser",
"libstagefright_flacdec",
"libstagefright_metadatautils",
"libmkvextractor",
diff --git a/media/module/extractors/mkv/Android.bp b/media/module/extractors/mkv/Android.bp
index 98ce305..c4b67eb 100644
--- a/media/module/extractors/mkv/Android.bp
+++ b/media/module/extractors/mkv/Android.bp
@@ -33,7 +33,7 @@
"libstagefright_foundation_colorutils_ndk", // for mainline-safe ColorUtils
"libstagefright_foundation",
"libstagefright_metadatautils",
- "libwebm",
+ "libwebm_mkvparser",
"libutils",
],
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 5a03992..2e889e3 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -34,7 +34,7 @@
#include "SampleTable.h"
#include "ItemTable.h"
-#include <ESDS.h>
+#include <media/esds/ESDS.h>
#include <ID3.h>
#include <media/stagefright/DataSourceBase.h>
#include <media/ExtractorUtils.h>
@@ -5908,12 +5908,18 @@
return -EINVAL;
}
- int32_t dataOffsetDelta;
- if (!mDataSource->getUInt32(offset, (uint32_t*)&dataOffsetDelta)) {
+ uint32_t dataOffsetDelta;
+ if (!mDataSource->getUInt32(offset, &dataOffsetDelta)) {
return ERROR_MALFORMED;
}
- dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta;
+ if (__builtin_add_overflow(
+ mTrackFragmentHeaderInfo.mBaseDataOffset, dataOffsetDelta, &dataOffset)) {
+ ALOGW("b/232242894 mBaseDataOffset(%" PRIu64 ") + dataOffsetDelta(%u) overflows uint64",
+ mTrackFragmentHeaderInfo.mBaseDataOffset, dataOffsetDelta);
+ android_errorWriteLog(0x534e4554, "232242894");
+ return ERROR_MALFORMED;
+ }
offset += 4;
size -= 4;
@@ -6047,7 +6053,12 @@
return NO_MEMORY;
}
- dataOffset += sampleSize;
+ if (__builtin_add_overflow(dataOffset, sampleSize, &dataOffset)) {
+ ALOGW("b/232242894 dataOffset(%" PRIu64 ") + sampleSize(%u) overflows uint64",
+ dataOffset, sampleSize);
+ android_errorWriteLog(0x534e4554, "232242894");
+ return ERROR_MALFORMED;
+ }
}
mTrackFragmentHeaderInfo.mDataOffset = dataOffset;
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index 3c3bbdc..d6e79c7 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -55,7 +55,7 @@
"libmedia_midiiowrapper",
"libsonivoxwithoutjet",
"libvorbisidec",
- "libwebm",
+ "libwebm_mkvparser",
"libFLAC",
],
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index ca17117..dc8384d 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -110,6 +110,11 @@
"-DNO_IMEMORY",
],
},
+ host: {
+ sanitize: {
+ cfi: false,
+ },
+ },
apex: {
exclude_shared_libs: [
"libbinder",
diff --git a/media/libwatchdog/Android.bp b/media/module/libwatchdog/Android.bp
similarity index 100%
rename from media/libwatchdog/Android.bp
rename to media/module/libwatchdog/Android.bp
diff --git a/media/libwatchdog/Watchdog.cpp b/media/module/libwatchdog/Watchdog.cpp
similarity index 100%
rename from media/libwatchdog/Watchdog.cpp
rename to media/module/libwatchdog/Watchdog.cpp
diff --git a/media/libwatchdog/include/watchdog/Watchdog.h b/media/module/libwatchdog/include/watchdog/Watchdog.h
similarity index 100%
rename from media/libwatchdog/include/watchdog/Watchdog.h
rename to media/module/libwatchdog/include/watchdog/Watchdog.h
diff --git a/media/module/metadatautils/Android.bp b/media/module/metadatautils/Android.bp
new file mode 100644
index 0000000..c77474f
--- /dev/null
+++ b/media/module/metadatautils/Android.bp
@@ -0,0 +1,46 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_static {
+ name: "libstagefright_metadatautils",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ min_sdk_version: "29",
+
+ srcs: ["MetaDataUtils.cpp"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ header_libs: [
+ "libaudioclient_headers",
+ "libstagefright_headers",
+ "libstagefright_foundation_headers",
+ "media_ndk_headers",
+ ],
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
+ export_include_dirs: ["include"],
+}
diff --git a/media/libstagefright/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
similarity index 100%
rename from media/libstagefright/MetaDataUtils.cpp
rename to media/module/metadatautils/MetaDataUtils.cpp
diff --git a/media/libstagefright/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
similarity index 100%
rename from media/libstagefright/include/media/stagefright/MetaDataUtils.h
rename to media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
diff --git a/media/libstagefright/tests/metadatautils/Android.bp b/media/module/metadatautils/test/Android.bp
similarity index 93%
rename from media/libstagefright/tests/metadatautils/Android.bp
rename to media/module/metadatautils/test/Android.bp
index ecdf89b..21f38f6 100644
--- a/media/libstagefright/tests/metadatautils/Android.bp
+++ b/media/module/metadatautils/test/Android.bp
@@ -20,9 +20,6 @@
// all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: [
- "frameworks_av_media_libstagefright_tests_license",
- ],
}
cc_test {
diff --git a/media/libstagefright/tests/metadatautils/AndroidTest.xml b/media/module/metadatautils/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/tests/metadatautils/AndroidTest.xml
rename to media/module/metadatautils/test/AndroidTest.xml
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp b/media/module/metadatautils/test/MetaDataUtilsTest.cpp
similarity index 99%
rename from media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
rename to media/module/metadatautils/test/MetaDataUtilsTest.cpp
index 9fd5fdb..08c9284 100644
--- a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
+++ b/media/module/metadatautils/test/MetaDataUtilsTest.cpp
@@ -21,7 +21,7 @@
#include <fstream>
#include <string>
-#include <ESDS.h>
+#include <media/esds/ESDS.h>
#include <media/NdkMediaFormat.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h b/media/module/metadatautils/test/MetaDataUtilsTestEnvironment.h
similarity index 100%
rename from media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
rename to media/module/metadatautils/test/MetaDataUtilsTestEnvironment.h
diff --git a/media/libstagefright/tests/metadatautils/README.md b/media/module/metadatautils/test/README.md
similarity index 100%
rename from media/libstagefright/tests/metadatautils/README.md
rename to media/module/metadatautils/test/README.md
diff --git a/services/minijail/Android.bp b/media/module/minijail/Android.bp
similarity index 100%
rename from services/minijail/Android.bp
rename to media/module/minijail/Android.bp
diff --git a/services/minijail/OWNERS b/media/module/minijail/OWNERS
similarity index 100%
rename from services/minijail/OWNERS
rename to media/module/minijail/OWNERS
diff --git a/services/minijail/TEST_MAPPING b/media/module/minijail/TEST_MAPPING
similarity index 100%
rename from services/minijail/TEST_MAPPING
rename to media/module/minijail/TEST_MAPPING
diff --git a/services/minijail/av_services_minijail_unittest.cpp b/media/module/minijail/av_services_minijail_unittest.cpp
similarity index 100%
rename from services/minijail/av_services_minijail_unittest.cpp
rename to media/module/minijail/av_services_minijail_unittest.cpp
diff --git a/services/minijail/minijail.cpp b/media/module/minijail/minijail.cpp
similarity index 100%
rename from services/minijail/minijail.cpp
rename to media/module/minijail/minijail.cpp
diff --git a/services/minijail/minijail.h b/media/module/minijail/minijail.h
similarity index 100%
rename from services/minijail/minijail.h
rename to media/module/minijail/minijail.h
diff --git a/media/module/mpeg2ts/Android.bp b/media/module/mpeg2ts/Android.bp
index 283df1e..bf762c6 100644
--- a/media/module/mpeg2ts/Android.bp
+++ b/media/module/mpeg2ts/Android.bp
@@ -51,6 +51,7 @@
"libmedia_datasource_headers",
"libaudioclient_headers",
"media_ndk_headers",
+ "libstagefright_headers",
"libstagefright_foundation_headers",
],
diff --git a/services/mediatranscoding/.clang-format b/media/module/service.mediatranscoding/.clang-format
similarity index 100%
rename from services/mediatranscoding/.clang-format
rename to media/module/service.mediatranscoding/.clang-format
diff --git a/services/mediatranscoding/Android.bp b/media/module/service.mediatranscoding/Android.bp
similarity index 96%
rename from services/mediatranscoding/Android.bp
rename to media/module/service.mediatranscoding/Android.bp
index fa5eb4e..37f354b 100644
--- a/services/mediatranscoding/Android.bp
+++ b/media/module/service.mediatranscoding/Android.bp
@@ -26,6 +26,10 @@
"SimulatedTranscoder.cpp",
],
+ export_include_dirs: [
+ ".",
+ ],
+
min_sdk_version: "29",
apex_available: [
"com.android.media",
diff --git a/services/mediatranscoding/MODULE_LICENSE_APACHE2 b/media/module/service.mediatranscoding/MODULE_LICENSE_APACHE2
similarity index 100%
rename from services/mediatranscoding/MODULE_LICENSE_APACHE2
rename to media/module/service.mediatranscoding/MODULE_LICENSE_APACHE2
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/media/module/service.mediatranscoding/MediaTranscodingService.cpp
similarity index 100%
rename from services/mediatranscoding/MediaTranscodingService.cpp
rename to media/module/service.mediatranscoding/MediaTranscodingService.cpp
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/media/module/service.mediatranscoding/MediaTranscodingService.h
similarity index 100%
rename from services/mediatranscoding/MediaTranscodingService.h
rename to media/module/service.mediatranscoding/MediaTranscodingService.h
diff --git a/services/mediatranscoding/NOTICE b/media/module/service.mediatranscoding/NOTICE
similarity index 100%
rename from services/mediatranscoding/NOTICE
rename to media/module/service.mediatranscoding/NOTICE
diff --git a/services/mediatranscoding/OWNERS b/media/module/service.mediatranscoding/OWNERS
similarity index 100%
rename from services/mediatranscoding/OWNERS
rename to media/module/service.mediatranscoding/OWNERS
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/media/module/service.mediatranscoding/SimulatedTranscoder.cpp
similarity index 100%
rename from services/mediatranscoding/SimulatedTranscoder.cpp
rename to media/module/service.mediatranscoding/SimulatedTranscoder.cpp
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/media/module/service.mediatranscoding/SimulatedTranscoder.h
similarity index 100%
rename from services/mediatranscoding/SimulatedTranscoder.h
rename to media/module/service.mediatranscoding/SimulatedTranscoder.h
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/media/module/service.mediatranscoding/main_mediatranscodingservice.cpp
similarity index 100%
rename from services/mediatranscoding/main_mediatranscodingservice.cpp
rename to media/module/service.mediatranscoding/main_mediatranscodingservice.cpp
diff --git a/services/mediatranscoding/tests/Android.bp b/media/module/service.mediatranscoding/tests/Android.bp
similarity index 95%
rename from services/mediatranscoding/tests/Android.bp
rename to media/module/service.mediatranscoding/tests/Android.bp
index ae13656..97fbd4c 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/media/module/service.mediatranscoding/tests/Android.bp
@@ -20,10 +20,6 @@
"-Wextra",
],
- include_dirs: [
- "frameworks/av/services/mediatranscoding",
- ],
-
shared_libs: [
"libactivitymanager_aidl",
"libbinder",
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
similarity index 100%
rename from services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
rename to media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
diff --git a/services/mediatranscoding/tests/README.txt b/media/module/service.mediatranscoding/tests/README.txt
similarity index 100%
rename from services/mediatranscoding/tests/README.txt
rename to media/module/service.mediatranscoding/tests/README.txt
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/module/service.mediatranscoding/tests/build_and_run_all_unit_tests.sh
similarity index 100%
rename from services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
rename to media/module/service.mediatranscoding/tests/build_and_run_all_unit_tests.sh
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/media/module/service.mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
similarity index 100%
rename from services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
rename to media/module/service.mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp b/media/module/service.mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
similarity index 100%
rename from services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
rename to media/module/service.mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/media/module/service.mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
similarity index 100%
rename from services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
rename to media/module/service.mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
diff --git a/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp
index c4dd564..c32d28a 100644
--- a/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp
+++ b/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp
@@ -79,8 +79,6 @@
usb_device* device = usb_device_new(deviceName.c_str(), fd);
MtpDevice mtpDevice(device, fdp.ConsumeIntegral<int32_t>(), &descriptor.ep[0],
&descriptor.ep[1], &descriptor.ep[2]);
- MtpObjectInfo objectinfo(fdp.ConsumeIntegral<uint32_t>());
- MtpStorageInfo storageInfo(fdp.ConsumeIntegral<uint32_t>());
while (fdp.remaining_bytes()) {
auto mtpDeviceFunction = fdp.PickValueInArray<const std::function<void()>>(
{[&]() { mtpDevice.getStorageIDs(); },
@@ -190,6 +188,7 @@
},
[&]() { MtpDevice::open(deviceName.c_str(), fd); },
[&]() {
+ MtpObjectInfo objectinfo(fdp.ConsumeIntegral<uint32_t>() /* handle */);
MtpDataPacket mtpDataPacket;
MtpDevHandle devHandle;
std::vector<uint8_t> packet = fdp.ConsumeBytes<uint8_t>(kMaxBytes);
@@ -198,6 +197,7 @@
objectinfo.print();
},
[&]() {
+ MtpStorageInfo storageInfo(fdp.ConsumeIntegral<uint32_t>() /* id */);
MtpDataPacket mtpDataPacket;
MtpDevHandle devHandle;
std::vector<uint8_t> packet = fdp.ConsumeBytes<uint8_t>(kMaxBytes);
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index ddc71db..fded4f5 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -52,6 +52,9 @@
symbol_file: "libmediandk.map.txt",
first_version: "21",
unversioned_until: "current",
+ export_header_libs: [
+ "libmediandk_headers",
+ ],
}
ndk_headers {
@@ -167,7 +170,7 @@
stubs: {
symbol_file: "libmediandk.map.txt",
versions: ["29"],
- }
+ },
}
cc_library {
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 0bdb41b..c2093ac 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -264,6 +264,13 @@
case HAL_PIXEL_FORMAT_YCrCb_420_SP:
*pixelStride = (planeIdx == 0) ? 1 : 2;
return AMEDIA_OK;
+ case HAL_PIXEL_FORMAT_YCBCR_P010:
+ if (mLockedBuffer->dataCb && mLockedBuffer->dataCr) {
+ *pixelStride = (planeIdx == 0) ? 2 : mLockedBuffer->chromaStep;
+ } else {
+ *pixelStride = (planeIdx == 0) ? 2 : 4;
+ }
+ return AMEDIA_OK;
case HAL_PIXEL_FORMAT_Y8:
*pixelStride = 1;
return AMEDIA_OK;
@@ -333,6 +340,13 @@
*rowStride = (planeIdx == 0) ? mLockedBuffer->stride
: ALIGN(mLockedBuffer->stride / 2, 16);
return AMEDIA_OK;
+ case HAL_PIXEL_FORMAT_YCBCR_P010:
+ if (mLockedBuffer->dataCb && mLockedBuffer->dataCr) {
+ *rowStride = (planeIdx == 0) ? mLockedBuffer->stride : mLockedBuffer->chromaStride;
+ } else {
+ *rowStride = mLockedBuffer->stride * 2;
+ }
+ return AMEDIA_OK;
case HAL_PIXEL_FORMAT_RAW10:
case HAL_PIXEL_FORMAT_RAW12:
// RAW10 and RAW12 are used for 10-bit and 12-bit raw data, they are single plane
@@ -490,6 +504,47 @@
: (planeIdx == 1) ? cb : cr;
dataSize = (planeIdx == 0) ? ySize : cSize;
break;
+ case HAL_PIXEL_FORMAT_YCBCR_P010:
+ if (mLockedBuffer->height % 2 != 0) {
+ ALOGE("YCBCR_P010: height (%d) should be a multiple of 2", mLockedBuffer->height);
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ if (mLockedBuffer->width <= 0) {
+ ALOGE("YCBCR_P010: width (%d) should be a > 0", mLockedBuffer->width);
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ if (mLockedBuffer->height <= 0) {
+ ALOGE("YCBCR_P010: height (%d) should be a > 0", mLockedBuffer->height);
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ if (mLockedBuffer->dataCb && mLockedBuffer->dataCr) {
+ pData = (planeIdx == 0) ? mLockedBuffer->data :
+ (planeIdx == 1) ? mLockedBuffer->dataCb : mLockedBuffer->dataCr;
+ // only map until last pixel
+ if (planeIdx == 0) {
+ cStride = mLockedBuffer->stride;
+ dataSize = cStride * (mLockedBuffer->height - 1) + mLockedBuffer->width * 2;
+ } else {
+ bytesPerPixel = mLockedBuffer->chromaStep;
+ cStride = mLockedBuffer->chromaStride;
+ dataSize = cStride * (mLockedBuffer->height / 2 - 1) +
+ bytesPerPixel * (mLockedBuffer->width / 2);
+ }
+ break;
+ }
+
+ cStride = mLockedBuffer->stride * 2;
+ ySize = cStride * mLockedBuffer->height;
+ cSize = ySize / 2;
+ cb = mLockedBuffer->data + ySize;
+ cr = cb + 2;
+
+ pData = (planeIdx == 0) ? mLockedBuffer->data : (planeIdx == 1) ? cb : cr;
+ dataSize = (planeIdx == 0) ? ySize : cSize;
+ break;
case HAL_PIXEL_FORMAT_Y8:
// Single plane, 8bpp.
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index ac5cba8..067c8f4 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -76,6 +76,7 @@
case AIMAGE_FORMAT_HEIC:
case AIMAGE_FORMAT_DEPTH_JPEG:
case AIMAGE_FORMAT_RAW_DEPTH10:
+ case HAL_PIXEL_FORMAT_YCBCR_P010:
return true;
case AIMAGE_FORMAT_PRIVATE:
// For private format, cpu usage is prohibited.
@@ -89,6 +90,7 @@
AImageReader::getNumPlanesForFormat(int32_t format) {
switch (format) {
case AIMAGE_FORMAT_YUV_420_888:
+ case HAL_PIXEL_FORMAT_YCBCR_P010:
return 3;
case AIMAGE_FORMAT_RGBA_8888:
case AIMAGE_FORMAT_RGBX_8888:
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index ed31c02..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -64,6 +64,10 @@
if (untranslated.find(err) == untranslated.end()) {
ALOGE("untranslated sf error code: %d", err);
+ char err_as_string[32];
+ snprintf(err_as_string, sizeof(err_as_string), "%d", err);
+ android_errorWriteWithInfoLog(0x534e4554, "224869524", -1,
+ err_as_string, strlen(err_as_string));
untranslated.insert(err);
}
}
diff --git a/media/ndk/NdkMediaMuxer.cpp b/media/ndk/NdkMediaMuxer.cpp
index 1965e62..9d62884 100644
--- a/media/ndk/NdkMediaMuxer.cpp
+++ b/media/ndk/NdkMediaMuxer.cpp
@@ -46,7 +46,7 @@
if (mData == nullptr) {
return nullptr;
}
- mData->mImpl = new (std::nothrow) MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+ mData->mImpl = MediaMuxer::create(fd, (MediaMuxer::OutputFormat)format);
if (mData->mImpl == nullptr) {
delete mData;
return nullptr;
diff --git a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
index c7ce950..fa81cd8 100644
--- a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
+++ b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
@@ -264,11 +264,11 @@
}
}
-AMediaFormat* NdkMediaCodecFuzzerBase::getSampleCodecFormat() {
- AMediaFormat* format = AMediaFormat_new();
+void NdkMediaCodecFuzzerBase::setCodecFormat() {
std::string value;
int32_t count = 0;
int32_t maxFormatKeys = 0;
+ AMediaFormat_clear(mFormat);
/*set mimeType*/
if (mFdp->ConsumeBool()) {
@@ -277,37 +277,36 @@
value = mFdp->PickValueInArray(kMimeTypes);
}
if (mFdp->ConsumeBool()) {
- AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, value.c_str());
+ AMediaFormat_setString(mFormat, AMEDIAFORMAT_KEY_MIME, value.c_str());
}
maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatStringKeys));
for (count = 0; count < maxFormatKeys; ++count) {
std::string formatKey = mFdp->PickValueInArray(kFormatStringKeys);
- formatSetString(format, formatKey.c_str(), mFdp);
+ formatSetString(mFormat, formatKey.c_str(), mFdp);
}
maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatIntKeys));
for (count = 0; count < maxFormatKeys; ++count) {
std::string formatKey = mFdp->PickValueInArray(kFormatIntKeys);
- formatSetInt(format, formatKey.c_str(), mFdp);
+ formatSetInt(mFormat, formatKey.c_str(), mFdp);
}
maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatFloatKeys));
for (count = 0; count < maxFormatKeys; ++count) {
std::string formatKey = mFdp->PickValueInArray(kFormatFloatKeys);
- formatSetFloat(format, formatKey.c_str(), mFdp);
+ formatSetFloat(mFormat, formatKey.c_str(), mFdp);
}
maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatBufferKeys));
for (count = 0; count < maxFormatKeys; ++count) {
std::string formatKey = mFdp->PickValueInArray(kFormatBufferKeys);
- formatSetBuffer(format, formatKey.c_str(), mFdp);
+ formatSetBuffer(mFormat, formatKey.c_str(), mFdp);
}
- return format;
}
AMediaCodec* NdkMediaCodecFuzzerBase::createCodec(bool isEncoder, bool isCodecForClient) {
- mFormat = getSampleCodecFormat();
+ setCodecFormat();
return (mFdp->ConsumeBool() ? createAMediaCodecByname(isEncoder, isCodecForClient)
: createAMediaCodecByType(isEncoder, isCodecForClient));
}
diff --git a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
index 42ef6ea..2875f9f 100644
--- a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
+++ b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
@@ -39,6 +39,7 @@
class NdkMediaCodecFuzzerBase {
public:
+ NdkMediaCodecFuzzerBase() { mFormat = AMediaFormat_new(); }
void invokeCodecFormatAPI(AMediaCodec* codec);
void invokeInputBufferOperationAPI(AMediaCodec* codec);
void invokeOutputBufferOperationAPI(AMediaCodec* codec);
@@ -46,13 +47,18 @@
AMediaCodec* createCodec(bool isEncoder, bool isCodecForClient);
AMediaFormat* getCodecFormat() { return mFormat; };
void setFdp(FuzzedDataProvider* fdp) { mFdp = fdp; }
+ ~NdkMediaCodecFuzzerBase() {
+ if (mFormat) {
+ AMediaFormat_delete(mFormat);
+ }
+ }
private:
AMediaCodec* createAMediaCodecByname(bool isEncoder, bool isCodecForClient);
AMediaCodec* createAMediaCodecByType(bool isEncoder, bool isCodecForClient);
AMediaFormat* getSampleAudioFormat();
AMediaFormat* getSampleVideoFormat();
- AMediaFormat* getSampleCodecFormat();
+ void setCodecFormat();
AMediaFormat* mFormat = nullptr;
FuzzedDataProvider* mFdp = nullptr;
};
diff --git a/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
index a3f3650..d348f66 100644
--- a/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
@@ -41,14 +41,13 @@
void NdkSyncCodecFuzzer::invokekSyncCodecAPIs(bool isEncoder) {
ANativeWindow* nativeWindow = nullptr;
- AMediaFormat* format = getCodecFormat();
int32_t numOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
int32_t count = 0;
while (++count <= numOfFrames) {
int32_t ndkcodecAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
switch (ndkcodecAPI) {
case 0: { // configure the codec
- AMediaCodec_configure(mCodec, format, nativeWindow, nullptr /* crypto */,
+ AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow, nullptr /* crypto */,
(isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0));
break;
}
@@ -119,9 +118,6 @@
if (nativeWindow) {
ANativeWindow_release(nativeWindow);
}
- if (format) {
- AMediaFormat_delete(format);
- }
}
void NdkSyncCodecFuzzer::invokeSyncCodeConfigAPI() {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
index 21ba957..7fd2752 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
@@ -60,14 +60,12 @@
private static final String mStatsFile =
mContext.getExternalFilesDir(null) + "/Muxer." + System.currentTimeMillis() + ".csv";
private static final String TAG = "MuxerTest";
- private static final Map<String, Integer> mMapFormat = new Hashtable<String, Integer>() {
- {
- put("mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
- put("webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM);
- put("3gpp", MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP);
- put("ogg", MediaMuxer.OutputFormat.MUXER_OUTPUT_OGG);
- }
- };
+ private static final Map<String, Integer> mMapFormat = Map.of(
+ "mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4,
+ "webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM,
+ "3gpp", MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP,
+ "ogg", MediaMuxer.OutputFormat.MUXER_OUTPUT_OGG);
+
private String mInputFileName;
private String mFormat;
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
index 043bc9e..6ea954c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
@@ -79,7 +79,6 @@
vector<AMediaCodecBufferInfo> frameInfo;
AMediaCodecBufferInfo info;
uint32_t inputBufferOffset = 0;
-
// Get frame data
while (1) {
status = extractor->getFrameSample(info);
@@ -110,7 +109,7 @@
const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
string sInputReference = string(inputReference);
decoder->dumpStatistics(sInputReference, sCodecName, (asyncMode ? "async" : "sync"),
- statsFile);
+ (statsFile == nullptr ? "" : statsFile));
env->ReleaseStringUTFChars(jCodecName, codecName);
env->ReleaseStringUTFChars(jStatsFile, statsFile);
env->ReleaseStringUTFChars(jFileName, inputReference);
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 66fee33..c2ed82c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -49,7 +49,7 @@
private FileOutputStream mOutputStream;
public Decoder() { mStats = new Stats(); }
-
+ public Stats getStats() { return mStats; };
/**
* Setup of decoder
*
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
index 7245a3a..0ebf798 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
@@ -33,7 +33,17 @@
private long mDeInitTimeNs;
private long mStartTimeNs;
private ArrayList<Integer> mFrameSizes;
+ /*
+ * Array for holding the wallclock time
+ * for each input buffer available.
+ */
private ArrayList<Long> mInputTimer;
+ /*
+ * Array for holding the wallclock time
+ * for each output buffer available.
+ * This is used for determining the decoded
+ * frame intervals.
+ */
private ArrayList<Long> mOutputTimer;
public Stats() {
@@ -76,9 +86,15 @@
public long getDeInitTime() { return mDeInitTimeNs; }
+ public long getStartTime() { return mStartTimeNs; }
+
+ public ArrayList<Long> getOutputTimers() { return mOutputTimer; }
+
+ public ArrayList<Long> getInputTimers() { return mInputTimer; }
+
public long getTimeDiff(long sTime, long eTime) { return (eTime - sTime); }
- private long getTotalTime() {
+ public long getTotalTime() {
if (mOutputTimer.size() == 0) {
return -1;
}
@@ -86,7 +102,7 @@
return lastTime - mStartTimeNs;
}
- private long getTotalSize() {
+ public long getTotalSize() {
long totalSize = 0;
for (long size : mFrameSizes) {
totalSize += size;
diff --git a/media/tests/benchmark/src/native/common/Stats.cpp b/media/tests/benchmark/src/native/common/Stats.cpp
index bfde125..d55a22d 100644
--- a/media/tests/benchmark/src/native/common/Stats.cpp
+++ b/media/tests/benchmark/src/native/common/Stats.cpp
@@ -35,13 +35,18 @@
* \param mode the operating mode: sync/async.
* \param statsFile the file where the stats data is to be written.
*/
-void Stats::dumpStatistics(string operation, string inputReference, int64_t durationUs,
- string componentName, string mode, string statsFile) {
+void Stats::dumpStatistics(const string& operation, const string& inputReference,
+ int64_t durationUs, const string& componentName,
+ const string& mode, const string& statsFile) {
ALOGV("In %s", __func__);
if (!mOutputTimer.size()) {
ALOGE("No output produced");
return;
}
+ if (statsFile.empty()) {
+ return uploadMetrics(operation, inputReference, durationUs, componentName,
+ mode);
+ }
nsecs_t totalTimeTakenNs = getTotalTime();
nsecs_t timeTakenPerSec = (totalTimeTakenNs * 1000000) / durationUs;
nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
@@ -87,3 +92,67 @@
out << rowData;
out.close();
}
+
+/**
+ * Dumps the stats of the operation for a given input media to a listener.
+ *
+ * \param operation describes the operation performed on the input media
+ * (i.e. extract/mux/decode/encode)
+ * \param inputReference input media
+ * \param durationUs is a duration of the input media in microseconds.
+ * \param componentName describes the codecName/muxFormat/mimeType.
+ * \param mode the operating mode: sync/async.
+ *
+ */
+
+#define LOG_METRIC(...) \
+ __android_log_print(ANDROID_LOG_INFO, "ForTimingCollector", __VA_ARGS__)
+
+void Stats::uploadMetrics(const string& operation, const string& inputReference,
+ const int64_t& durationUs, const string& componentName,
+ const string& mode) {
+
+ ALOGV("In %s", __func__);
+ (void)durationUs;
+ (void)componentName;
+ if (!mOutputTimer.size()) {
+ ALOGE("No output produced");
+ return;
+ }
+ nsecs_t totalTimeTakenNs = getTotalTime();
+ nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
+ int32_t size = std::accumulate(mFrameSizes.begin(), mFrameSizes.end(), 0);
+ // get min and max output intervals.
+ nsecs_t intervalNs;
+ nsecs_t minTimeTakenNs = INT64_MAX;
+ nsecs_t maxTimeTakenNs = 0;
+ nsecs_t prevIntervalNs = mStartTimeNs;
+ for (int32_t idx = 0; idx < mOutputTimer.size() - 1; idx++) {
+ intervalNs = mOutputTimer.at(idx) - prevIntervalNs;
+ prevIntervalNs = mOutputTimer.at(idx);
+ if (minTimeTakenNs > intervalNs) minTimeTakenNs = intervalNs;
+ else if (maxTimeTakenNs < intervalNs) maxTimeTakenNs = intervalNs;
+ }
+
+ // Write the stats data to file.
+ int64_t dataSize = size;
+ int64_t bytesPerSec = ((int64_t)dataSize * 1000000000) / totalTimeTakenNs;
+ (void)mode;
+ (void)operation;
+ (void)inputReference;
+ string prefix = "CodecStats_NativeDec";
+ prefix.append("_").append(componentName);
+ // Reports the time taken to initialize the codec.
+ LOG_METRIC("%s_CodecInitTimeNs:%lld", prefix.c_str(), (long long)mInitTimeNs);
+ // Reports the time taken to free the codec.
+ LOG_METRIC("%s_CodecDeInitTimeNs:%lld", prefix.c_str(), (long long)mDeInitTimeNs);
+ // Reports the min time taken between output frames from the codec
+ LOG_METRIC("%s_CodecMinTimeNs:%lld", prefix.c_str(), (long long)minTimeTakenNs);
+ // Reports the max time between the output frames from the codec
+ LOG_METRIC("%s_CodecMaxTimeNs:%lld", prefix.c_str(), (long long)maxTimeTakenNs);
+ // Report raw throughout ( bytes/sec ) of the codec for the entire media
+ LOG_METRIC("%s_ProcessedBytesPerSec:%lld", prefix.c_str(), (long long)bytesPerSec);
+ // Reports the time taken to get the first frame from the codec
+ LOG_METRIC("%s_TimeforFirstFrame:%lld", prefix.c_str(), (long long)timeToFirstFrameNs);
+
+}
diff --git a/media/tests/benchmark/src/native/common/Stats.h b/media/tests/benchmark/src/native/common/Stats.h
index 18e4b06..0ba511f 100644
--- a/media/tests/benchmark/src/native/common/Stats.h
+++ b/media/tests/benchmark/src/native/common/Stats.h
@@ -102,8 +102,12 @@
return (*(mOutputTimer.end() - 1) - mStartTimeNs);
}
- void dumpStatistics(string operation, string inputReference, int64_t duarationUs,
- string codecName = "", string mode = "", string statsFile = "");
-};
+ void dumpStatistics(const string& operation, const string& inputReference,
+ int64_t duarationUs, const string& componentName = "",
+ const string& mode = "", const string& statsFile = "");
+ void uploadMetrics(const string& operation, const string& inputReference,
+ const int64_t& durationUs, const string& componentName = "",
+ const string& mode = "");
+};
#endif // __STATS_H__
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 04d9ed9..698752f 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -28,19 +28,35 @@
"-Werror",
"-Wextra",
],
+ sanitize: {
+ cfi: true,
+ integer_overflow: true,
+ },
+ target: {
+ host: {
+ sanitize: {
+ cfi: false,
+ },
+ },
+ },
}
filegroup {
name: "libmediautils_core_srcs",
srcs: [
"AImageReaderUtils.cpp",
+ "BatteryNotifier.cpp",
"ISchedulingPolicyService.cpp",
"Library.cpp",
"MediaUtilsDelayed.cpp",
"MethodStatistics.cpp",
"Process.cpp",
+ "ProcessInfo.cpp",
"SchedulingPolicyService.cpp",
+ "ServiceUtilities.cpp",
"ThreadSnapshot.cpp",
+ "TimeCheck.cpp",
+ "TimerThread.cpp",
],
}
@@ -55,47 +71,15 @@
export_include_dirs: ["include"],
}
-cc_library {
- name: "libmediautils_core",
- defaults: ["libmediautils_defaults"],
- host_supported: true,
- srcs: [":libmediautils_core_srcs"],
- shared_libs: [
- "libaudioutils", // for clock.h, Statistics.h
- "libbase",
- "libbinder",
- "libhidlbase",
- "liblog",
- "libpermission",
- "libutils",
- "android.hardware.graphics.bufferqueue@1.0",
- "android.hidl.token@1.0-utils",
- "packagemanager_aidl-cpp",
- ],
-
- export_shared_lib_headers: [
- "libpermission",
- ],
-
- local_include_dirs: ["include"],
- export_include_dirs: ["include"],
-}
cc_library {
name: "libmediautils",
+ host_supported: true,
defaults: ["libmediautils_defaults"],
srcs: [
":libmediautils_core_srcs",
- "BatteryNotifier.cpp",
- "MemoryLeakTrackUtil.cpp",
- "LimitProcessMemory.cpp",
- "ProcessInfo.cpp",
- "ServiceUtilities.cpp",
- "TimeCheck.cpp",
- "TimerThread.cpp",
],
static_libs: [
- "libc_malloc_debug_backtrace",
"libbatterystats_aidl",
"libprocessinfoservice_aidl",
],
@@ -119,11 +103,6 @@
logtags: ["EventLogTags.logtags"],
- header_libs: [
- "bionic_libc_platform_headers",
- "libmedia_headers",
- ],
-
export_shared_lib_headers: [
"libpermission",
],
@@ -132,16 +111,32 @@
"libmediautils_delayed", // lazy loaded
],
- include_dirs: [
- // For DEBUGGER_SIGNAL
- "system/core/debuggerd/include",
- ],
+ target: {
+ android: {
+ srcs: [
+ "LimitProcessMemory.cpp",
+ "MemoryLeakTrackUtil.cpp",
+ ],
+ static_libs: [
+ "libc_malloc_debug_backtrace",
+ ],
+ include_dirs: [
+ // For DEBUGGER_SIGNAL
+ "system/core/debuggerd/include",
+ ],
+ header_libs: [
+ "bionic_libc_platform_headers",
+ ],
+ },
+ },
+
local_include_dirs: ["include"],
export_include_dirs: ["include"],
}
cc_library {
name: "libmediautils_delayed", // match with MEDIAUTILS_DELAYED_LIBRARY_NAME
+ host_supported: true,
defaults: ["libmediautils_defaults"],
srcs: [
"MediaUtilsDelayedLibrary.cpp",
diff --git a/media/utils/MemoryLeakTrackUtil.cpp b/media/utils/MemoryLeakTrackUtil.cpp
index fdb8c4f..7451033 100644
--- a/media/utils/MemoryLeakTrackUtil.cpp
+++ b/media/utils/MemoryLeakTrackUtil.cpp
@@ -33,10 +33,8 @@
#define ABI_STRING "arm"
#elif defined(__aarch64__)
#define ABI_STRING "arm64"
-#elif defined(__mips__) && !defined(__LP64__)
-#define ABI_STRING "mips"
-#elif defined(__mips__) && defined(__LP64__)
-#define ABI_STRING "mips64"
+#elif defined(__riscv)
+#define ABI_STRING "riscv64"
#elif defined(__i386__)
#define ABI_STRING "x86"
#elif defined(__x86_64__)
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index da199c4..3baa4b4 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -18,7 +18,7 @@
#define LOG_TAG "ProcessInfo"
#include <utils/Log.h>
-#include <media/stagefright/ProcessInfo.h>
+#include <mediautils/ProcessInfo.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 6823f4f..0cf5bd9 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -14,20 +14,36 @@
* limitations under the License.
*/
+#include <csignal>
+#include "mediautils/TimerThread.h"
#define LOG_TAG "TimeCheck"
#include <optional>
#include <android-base/logging.h>
+#include <android-base/strings.h>
#include <audio_utils/clock.h>
#include <mediautils/EventLog.h>
#include <mediautils/FixedString.h>
#include <mediautils/MethodStatistics.h>
#include <mediautils/TimeCheck.h>
+#include <mediautils/TidWrapper.h>
#include <utils/Log.h>
+
+#if defined(__ANDROID__)
#include "debuggerd/handler.h"
+#endif
+
namespace android::mediautils {
+// This function appropriately signals a pid to dump a backtrace if we are
+// running on device (and the HAL exists). If we are not running on an Android
+// device, there is no HAL to signal (so we do nothing).
+static inline void signalAudioHAL([[maybe_unused]] pid_t pid) {
+#if defined(__ANDROID__)
+ sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+#endif
+}
/**
* Returns the std::string "HH:MM:SS.MSc" from a system_clock time_point.
@@ -136,14 +152,14 @@
std::string TimeCheck::toString() {
// note pending and retired are individually locked for maximum concurrency,
// snapshot is not instantaneous at a single time.
- return getTimeCheckThread().toString();
+ return getTimeCheckThread().getSnapshotAnalysis().toString();
}
TimeCheck::TimeCheck(std::string_view tag, OnTimerFunc&& onTimer, Duration requestedTimeoutDuration,
Duration secondChanceDuration, bool crashOnTimeout)
: mTimeCheckHandler{ std::make_shared<TimeCheckHandler>(
tag, std::move(onTimer), crashOnTimeout, requestedTimeoutDuration,
- secondChanceDuration, std::chrono::system_clock::now(), gettid()) }
+ secondChanceDuration, std::chrono::system_clock::now(), getThreadIdWrapper()) }
, mTimerHandle(requestedTimeoutDuration.count() == 0
/* for TimeCheck we don't consider a non-zero secondChanceDuration here */
? getTimeCheckThread().trackTask(mTimeCheckHandler->tag)
@@ -241,7 +257,7 @@
// Generate the TimerThread summary string early before sending signals to the
// HAL processes which can affect thread behavior.
- const std::string summary = getTimeCheckThread().toString(4 /* retiredCount */);
+ const auto snapshotAnalysis = getTimeCheckThread().getSnapshotAnalysis(4 /* retiredCount */);
// Generate audio HAL processes tombstones and allow time to complete
// before forcing restart
@@ -251,7 +267,7 @@
for (const auto& pid : pids) {
ALOGI("requesting tombstone for pid: %d", pid);
halPids.append(std::to_string(pid)).append(" ");
- sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+ signalAudioHAL(pid);
}
sleep(1);
} else {
@@ -269,7 +285,7 @@
.append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
elapsedSteadyMs, elapsedSystemMs)).append("\n")
.append(halPids).append("\n")
- .append(summary);
+ .append(snapshotAnalysis.toString());
// Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
// Log message text may be truncated to less than an
@@ -279,7 +295,20 @@
// to avoid the size limitation. LOG(FATAL) does an abort whereas
// LOG(FATAL_WITHOUT_ABORT) does not abort.
- LOG(FATAL) << abortMessage;
+ static constexpr pid_t invalidPid = TimerThread::SnapshotAnalysis::INVALID_PID;
+ pid_t tidToAbort = invalidPid;
+ if (snapshotAnalysis.suspectTid != invalidPid) {
+ tidToAbort = snapshotAnalysis.suspectTid;
+ } else if (snapshotAnalysis.timeoutTid != invalidPid) {
+ tidToAbort = snapshotAnalysis.timeoutTid;
+ }
+
+ LOG(FATAL_WITHOUT_ABORT) << abortMessage;
+ const auto ret = abortTid(tidToAbort);
+ if (ret < 0) {
+ LOG(FATAL) << "TimeCheck thread signal failed, aborting process. "
+ "errno: " << errno << base::ErrnoNumberAsString(errno);
+ }
}
// Automatically create a TimeCheck class for a class and method.
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index 5e58a3d..b760ee2 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -22,6 +22,7 @@
#include <vector>
#include <mediautils/MediaUtilsDelayed.h>
+#include <mediautils/TidWrapper.h>
#include <mediautils/TimerThread.h>
#include <utils/Log.h>
#include <utils/ThreadDefs.h>
@@ -39,14 +40,14 @@
const auto now = std::chrono::system_clock::now();
auto request = std::make_shared<const Request>(now, now +
std::chrono::duration_cast<std::chrono::system_clock::duration>(timeoutDuration),
- secondChanceDuration, gettid(), tag);
+ secondChanceDuration, getThreadIdWrapper(), tag);
return mMonitorThread.add(std::move(request), std::move(func), timeoutDuration);
}
TimerThread::Handle TimerThread::trackTask(std::string_view tag) {
const auto now = std::chrono::system_clock::now();
auto request = std::make_shared<const Request>(now, now,
- Duration{} /* secondChanceDuration */, gettid(), tag);
+ Duration{} /* secondChanceDuration */, getThreadIdWrapper(), tag);
return mNoTimeoutMap.add(std::move(request));
}
@@ -58,39 +59,29 @@
return true;
}
-std::string TimerThread::toString(size_t retiredCount) const {
+
+std::string TimerThread::SnapshotAnalysis::toString() const {
// Note: These request queues are snapshot very close together but
// not at "identical" times as we don't use a class-wide lock.
-
- std::vector<std::shared_ptr<const Request>> timeoutRequests;
- std::vector<std::shared_ptr<const Request>> retiredRequests;
- mTimeoutQueue.copyRequests(timeoutRequests);
- mRetiredQueue.copyRequests(retiredRequests, retiredCount);
- std::vector<std::shared_ptr<const Request>> pendingRequests =
- getPendingRequests();
-
- struct Analysis analysis = analyzeTimeout(timeoutRequests, pendingRequests);
- std::string analysisSummary;
- if (!analysis.summary.empty()) {
- analysisSummary = std::string("\nanalysis [ ").append(analysis.summary).append(" ]");
- }
+ std::string analysisSummary = std::string("\nanalysis [ ").append(description).append(" ]");
std::string timeoutStack;
- if (analysis.timeoutTid != -1) {
- timeoutStack = std::string("\ntimeout(")
- .append(std::to_string(analysis.timeoutTid)).append(") callstack [\n")
- .append(getCallStackStringForTid(analysis.timeoutTid)).append("]");
- }
std::string blockedStack;
- if (analysis.HALBlockedTid != -1) {
+ if (timeoutTid != -1) {
+ timeoutStack = std::string(suspectTid == timeoutTid ? "\ntimeout/blocked(" : "\ntimeout(")
+ .append(std::to_string(timeoutTid)).append(") callstack [\n")
+ .append(getCallStackStringForTid(timeoutTid)).append("]");
+ }
+
+ if (suspectTid != -1 && suspectTid != timeoutTid) {
blockedStack = std::string("\nblocked(")
- .append(std::to_string(analysis.HALBlockedTid)).append(") callstack [\n")
- .append(getCallStackStringForTid(analysis.HALBlockedTid)).append("]");
+ .append(std::to_string(suspectTid)).append(") callstack [\n")
+ .append(getCallStackStringForTid(suspectTid)).append("]");
}
return std::string("now ")
.append(formatTime(std::chrono::system_clock::now()))
.append("\nsecondChanceCount ")
- .append(std::to_string(mMonitorThread.getSecondChanceCount()))
+ .append(std::to_string(secondChanceCount))
.append(analysisSummary)
.append("\ntimeout [ ")
.append(requestsToString(timeoutRequests))
@@ -120,16 +111,23 @@
return separatorPos != std::string::npos;
}
-/* static */
-struct TimerThread::Analysis TimerThread::analyzeTimeout(
- const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
- const std::vector<std::shared_ptr<const Request>>& pendingRequests) {
-
- if (timeoutRequests.empty() || pendingRequests.empty()) return {}; // nothing to say.
-
+struct TimerThread::SnapshotAnalysis TimerThread::getSnapshotAnalysis(size_t retiredCount) const {
+ struct SnapshotAnalysis analysis{};
+ // The following snapshot of the TimerThread state will be utilized for
+ // analysis. Note, there is no lock around these calls, so there could be
+ // a state update between them.
+ mTimeoutQueue.copyRequests(analysis.timeoutRequests);
+ mRetiredQueue.copyRequests(analysis.retiredRequests, retiredCount);
+ analysis.pendingRequests = getPendingRequests();
+ analysis.secondChanceCount = mMonitorThread.getSecondChanceCount();
+ // No call has timed out, so there is no analysis to be done.
+ if (analysis.timeoutRequests.empty())
+ return analysis;
// for now look at last timeout (in our case, the only timeout)
- const std::shared_ptr<const Request> timeout = timeoutRequests.back();
-
+ const std::shared_ptr<const Request> timeout = analysis.timeoutRequests.back();
+ analysis.timeoutTid = timeout->tid;
+ if (analysis.pendingRequests.empty())
+ return analysis;
// pending Requests that are problematic.
std::vector<std::shared_ptr<const Request>> pendingExact;
std::vector<std::shared_ptr<const Request>> pendingPossible;
@@ -140,7 +138,7 @@
// such as HAL write() and read().
//
constexpr Duration kPendingDuration = 1000ms;
- for (const auto& pending : pendingRequests) {
+ for (const auto& pending : analysis.pendingRequests) {
// If the pending tid is the same as timeout tid, problem identified.
if (pending->tid == timeout->tid) {
pendingExact.emplace_back(pending);
@@ -153,29 +151,27 @@
}
}
- struct Analysis analysis{};
-
- analysis.timeoutTid = timeout->tid;
- std::string& summary = analysis.summary;
+ std::string& description = analysis.description;
if (!pendingExact.empty()) {
const auto& request = pendingExact.front();
const bool hal = isRequestFromHal(request);
if (hal) {
- summary = std::string("Blocked directly due to HAL call: ")
+ description = std::string("Blocked directly due to HAL call: ")
.append(request->toString());
+ analysis.suspectTid= request->tid;
}
}
- if (summary.empty() && !pendingPossible.empty()) {
+ if (description.empty() && !pendingPossible.empty()) {
for (const auto& request : pendingPossible) {
const bool hal = isRequestFromHal(request);
if (hal) {
// The first blocked call is the most likely one.
// Recent calls might be temporarily blocked
// calls such as write() or read() depending on kDuration.
- summary = std::string("Blocked possibly due to HAL call: ")
+ description = std::string("Blocked possibly due to HAL call: ")
.append(request->toString());
- analysis.HALBlockedTid = request->tid;
+ analysis.suspectTid= request->tid;
}
}
}
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index fc4c2f9..bd9a462 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -14,7 +14,7 @@
"libbinder",
"liblog",
"libcutils",
- "libmediautils_core",
+ "libmediautils",
"libutils",
"framework-permission-aidl-cpp",
"packagemanager_aidl-cpp",
@@ -30,32 +30,24 @@
cc_fuzz {
name: "libmediautils_fuzzer_battery_notifier",
- host_supported: false,
- shared_libs: ["libmediautils"],
defaults: ["libmediautils_fuzzer_defaults"],
srcs: ["BatteryNotifierFuzz.cpp"],
}
cc_fuzz {
name: "libmediautils_fuzzer_scheduling_policy_service",
- host_supported: false,
- shared_libs: ["libmediautils"],
defaults: ["libmediautils_fuzzer_defaults"],
srcs: ["SchedulingPolicyServiceFuzz.cpp"],
}
cc_fuzz {
name: "libmediautils_fuzzer_service_utilities",
- host_supported: false,
- shared_libs: ["libmediautils"],
defaults: ["libmediautils_fuzzer_defaults"],
srcs: ["ServiceUtilitiesFuzz.cpp"],
}
cc_fuzz {
name: "libmediautils_fuzzer_time_check",
- host_supported: false,
- shared_libs: ["libmediautils"],
defaults: ["libmediautils_fuzzer_defaults"],
srcs: ["TimeCheckFuzz.cpp"],
}
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 32fc3be..d672fb0 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -19,6 +19,7 @@
#include <utils/String16.h>
#include <android/log.h>
#include <mediautils/SchedulingPolicyService.h>
+#include <mediautils/TidWrapper.h>
#include "fuzzer/FuzzedDataProvider.h"
using android::IBatteryStats;
using android::IBinder;
@@ -55,7 +56,8 @@
int32_t priority = data_provider.ConsumeIntegral<int32_t>();
bool is_for_app = data_provider.ConsumeBool();
bool async = data_provider.ConsumeBool();
- requestPriority(getpid(), gettid(), priority, is_for_app, async);
+ requestPriority(getpid(), android::mediautils::getThreadIdWrapper(), priority, is_for_app,
+ async);
// TODO: Verify and re-enable in AOSP (R).
// bool enable = data_provider.ConsumeBool();
// We are just using batterystats to avoid the need
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/utils/include/mediautils/ProcessInfo.h
similarity index 90%
rename from media/libstagefright/include/media/stagefright/ProcessInfo.h
rename to media/utils/include/mediautils/ProcessInfo.h
index 06b9c92..9afa3df 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/utils/include/mediautils/ProcessInfo.h
@@ -18,8 +18,7 @@
#define PROCESS_INFO_H_
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/ProcessInfoInterface.h>
+#include <mediautils/ProcessInfoInterface.h>
#include <map>
#include <mutex>
#include <utils/Condition.h>
@@ -46,7 +45,8 @@
std::mutex mOverrideLock;
std::map<int, ProcessInfoOverride> mOverrideMap GUARDED_BY(mOverrideLock);
- DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
+ ProcessInfo(const ProcessInfo&) = delete;
+ ProcessInfo& operator=(const ProcessInfo&) = delete;
};
} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/utils/include/mediautils/ProcessInfoInterface.h
similarity index 93%
rename from media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
rename to media/utils/include/mediautils/ProcessInfoInterface.h
index b7fc858..b6529fc 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ b/media/utils/include/mediautils/ProcessInfoInterface.h
@@ -25,8 +25,8 @@
virtual bool getPriority(int pid, int* priority) = 0;
virtual bool isPidTrusted(int pid) = 0;
virtual bool isPidUidTrusted(int pid, int uid) = 0;
- virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
- virtual void removeProcessInfoOverride(int pid);
+ virtual bool overrideProcessInfo(int pid, int procState, int oomScore) = 0;
+ virtual void removeProcessInfoOverride(int pid) = 0;
protected:
virtual ~ProcessInfoInterface() {}
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
new file mode 100644
index 0000000..17c1ac9
--- /dev/null
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -0,0 +1,470 @@
+/*
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#pragma once
+
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <iomanip>
+#include <limits>
+#include <sstream>
+#include <string>
+#include <type_traits>
+#include <unordered_map>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <log/log_main.h>
+#include <utils/StrongPointer.h>
+
+namespace std {
+template <typename T>
+struct hash<::android::wp<T>> {
+ size_t operator()(const ::android::wp<T>& x) const {
+ return std::hash<const T*>()(x.unsafe_get());
+ }
+};
+} // namespace std
+
+namespace android::mediautils {
+
+// Allocations represent owning handles to a region of shared memory (and thus
+// should not be copied in order to fulfill RAII).
+// To share ownership between multiple objects, a
+// ref-counting solution such as sp or shared ptr is appropriate, so the dtor
+// is called once for a particular block of memory.
+
+using AllocationType = ::android::sp<IMemory>;
+using WeakAllocationType = ::android::wp<IMemory>;
+
+namespace shared_allocator_impl {
+constexpr inline size_t roundup(size_t size, size_t pageSize) {
+ LOG_ALWAYS_FATAL_IF(pageSize == 0 || (pageSize & (pageSize - 1)) != 0,
+ "Page size not multiple of 2");
+ return ((size + pageSize - 1) & ~(pageSize - 1));
+}
+
+constexpr inline bool isHeapValid(const sp<IMemoryHeap>& heap) {
+ return (heap && heap->getBase() &&
+ heap->getBase() != MAP_FAILED); // TODO if not mapped locally
+}
+
+template <typename, typename = void>
+static constexpr bool has_deallocate_all = false;
+
+template <typename T>
+static constexpr bool has_deallocate_all<
+ T, std::enable_if_t<std::is_same_v<decltype(std::declval<T>().deallocate_all()), void>,
+ void>> = true;
+
+template <typename, typename = void>
+static constexpr bool has_owns = false;
+
+template <typename T>
+static constexpr bool
+ has_owns<T, std::enable_if_t<std::is_same_v<decltype(std::declval<T>().owns(
+ std::declval<const AllocationType>())),
+ bool>,
+ void>> = true;
+
+template <typename, typename = void>
+static constexpr bool has_dump = false;
+
+template <typename T>
+static constexpr bool has_dump<
+ T,
+ std::enable_if_t<std::is_same_v<decltype(std::declval<T>().dump()), std::string>, void>> =
+ true;
+
+} // namespace shared_allocator_impl
+
+struct BasicAllocRequest {
+ size_t size;
+};
+struct NamedAllocRequest : public BasicAllocRequest {
+ std::string_view name;
+};
+
+// We are required to add a layer of indirection to hold a handle to the actual
+// block due to sp<> being unable to be created from an object once its
+// ref-count has dropped to zero. So, we have to hold onto an extra reference
+// here. We effectively want to know when the refCount of the object drops to
+// one, since we need to hold on to a reference to pass the object to interfaces
+// requiring an sp<>.
+// TODO is there some way to avoid paying this cost?
+template <typename Allocator>
+class ScopedAllocator;
+template <typename AllocationT, typename AllocatorHandleType>
+class ScopedAllocation : public BnMemory {
+ public:
+ template <typename T>
+ friend class ScopedAllocator;
+ ScopedAllocation(const AllocationT& allocation, const AllocatorHandleType& handle)
+ : mAllocation(allocation), mHandle(handle) {}
+
+ // Defer the implementation to the underlying mAllocation
+
+ virtual sp<IMemoryHeap> getMemory(ssize_t* offset = nullptr,
+ size_t* size = nullptr) const override {
+ return mAllocation->getMemory(offset, size);
+ }
+
+ private:
+ ~ScopedAllocation() override { mHandle->deallocate(mAllocation); }
+
+ const AllocationT mAllocation;
+ const AllocatorHandleType mHandle;
+};
+
+// Allocations are only deallocated when going out of scope.
+// This should almost always be the outermost allocator.
+template <typename Allocator>
+class ScopedAllocator {
+ public:
+ using HandleT = std::shared_ptr<Allocator>;
+ static constexpr size_t alignment() { return Allocator::alignment(); }
+
+ explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
+
+ ScopedAllocator() : mAllocator(std::make_shared<Allocator>()) {}
+
+ template <typename T>
+ auto allocate(T&& request) {
+ const auto allocation = mAllocator->allocate(std::forward<T>(request));
+ if (!allocation) {
+ return sp<ScopedAllocation<AllocationType, HandleT>>{};
+ }
+ return sp<ScopedAllocation<AllocationType, HandleT>>::make(allocation, mAllocator);
+ }
+
+ // Deallocate and deallocate_all are implicitly unsafe due to double
+ // deallocates upon ScopedAllocation destruction. We can protect against this
+ // efficiently with a gencount (for deallocate_all) or inefficiently (for
+ // deallocate) but we choose not to
+ //
+ // Owns is only safe to pseudo-impl due to static cast reqs
+ template <typename Enable = bool>
+ auto owns(const sp<ScopedAllocation<AllocationType, HandleT>>& allocation) const
+ -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+ return mAllocator->owns(allocation->mAllocation);
+ }
+
+ template <typename Enable = std::string>
+ auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+ return mAllocator->dump();
+ }
+
+ private:
+ // We store a shared pointer in order to ensure that the allocator outlives
+ // allocations (which call back to become dereferenced).
+ const HandleT mAllocator;
+};
+
+// A simple policy for PolicyAllocator which enforces a pool size and an allocation
+// size range.
+template <size_t PoolSize, size_t MinAllocSize = 0,
+ size_t MaxAllocSize = std::numeric_limits<size_t>::max()>
+class SizePolicy {
+ static_assert(PoolSize > 0);
+
+ public:
+ template <typename T>
+ bool isValid(T&& request) const {
+ static_assert(std::is_base_of_v<BasicAllocRequest, std::decay_t<T>>);
+ return !(request.size > kMaxAllocSize || request.size < kMinAllocSize ||
+ mPoolSize + request.size > kPoolSize);
+ }
+
+ void allocated(const AllocationType& alloc) { mPoolSize += alloc->size(); }
+
+ void deallocated(const AllocationType& alloc) { mPoolSize -= alloc->size(); }
+
+ void deallocated_all() { mPoolSize = 0; }
+
+ static constexpr size_t kPoolSize = PoolSize;
+ static constexpr size_t kMinAllocSize = MinAllocSize;
+ static constexpr size_t kMaxAllocSize = MaxAllocSize;
+
+ private:
+ size_t mPoolSize = 0;
+};
+
+// An allocator which accepts or rejects allocation requests by a parametrized
+// policy (which can carry state).
+template <typename Allocator, typename Policy>
+class PolicyAllocator {
+ public:
+ static constexpr size_t alignment() { return Allocator::alignment(); }
+
+ PolicyAllocator(Allocator allocator, Policy policy)
+ : mAllocator(allocator), mPolicy(std::move(policy)) {}
+
+ // Default initialize the allocator and policy
+ PolicyAllocator() = default;
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ static_assert(std::is_base_of_v<android::mediautils::BasicAllocRequest, std::decay_t<T>>);
+ request.size = shared_allocator_impl::roundup(request.size, alignment());
+ if (!mPolicy.isValid(request)) {
+ return {};
+ }
+ AllocationType val = mAllocator.allocate(std::forward<T>(request));
+ if (val == nullptr) return val;
+ mPolicy.allocated(val);
+ return val;
+ }
+
+ void deallocate(const AllocationType& allocation) {
+ if (!allocation) return;
+ mPolicy.deallocated(allocation);
+ mAllocator.deallocate(allocation);
+ }
+
+ template <typename Enable = void>
+ auto deallocate_all()
+ -> std::enable_if_t<shared_allocator_impl::has_deallocate_all<Allocator>, Enable> {
+ mAllocator.deallocate_all();
+ mPolicy.deallocated_all();
+ }
+
+ template <typename Enable = bool>
+ auto owns(const AllocationType& allocation) const
+ -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+ return mAllocator.owns(allocation);
+ }
+
+ template <typename Enable = std::string>
+ auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+ return mAllocator.dump();
+ }
+
+ private:
+ [[no_unique_address]] Allocator mAllocator;
+ [[no_unique_address]] Policy mPolicy;
+};
+
+// An allocator which keeps track of outstanding allocations for logging and
+// querying ownership.
+template <class Allocator>
+class SnoopingAllocator {
+ public:
+ struct AllocationData {
+ std::string name;
+ size_t allocation_number;
+ };
+ static constexpr size_t alignment() { return Allocator::alignment(); }
+
+ SnoopingAllocator(Allocator allocator, std::string_view name)
+ : mName(name), mAllocator(std::move(allocator)) {}
+
+ explicit SnoopingAllocator(std::string_view name) : mName(name), mAllocator(Allocator{}) {}
+
+ explicit SnoopingAllocator(Allocator allocator) : mAllocator(std::move(allocator)) {}
+
+ // Default construct allocator and name
+ SnoopingAllocator() = default;
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ static_assert(std::is_base_of_v<NamedAllocRequest, std::decay_t<T>>);
+ AllocationType allocation = mAllocator.allocate(request);
+ if (allocation)
+ mAllocations.insert({WeakAllocationType{allocation},
+ {std::string{request.name}, mAllocationNumber++}});
+ return allocation;
+ }
+
+ void deallocate(const AllocationType& allocation) {
+ if (!allocation) return;
+ mAllocations.erase(WeakAllocationType{allocation});
+ mAllocator.deallocate(allocation);
+ }
+
+ void deallocate_all() {
+ if constexpr (shared_allocator_impl::has_deallocate_all<Allocator>) {
+ mAllocator.deallocate_all();
+ } else {
+ for (auto& [mem, value] : mAllocations) {
+ mAllocator.deallocate(mem);
+ }
+ }
+ mAllocations.clear();
+ }
+
+ bool owns(const AllocationType& allocation) const {
+ return (mAllocations.count(WeakAllocationType{allocation}) > 0);
+ }
+
+ std::string dump() const {
+ std::ostringstream dump;
+ dump << mName << " Allocator Dump:\n";
+ dump << std::setw(8) << "HeapID" << std::setw(8) << "Size" << std::setw(8) << "Offset"
+ << std::setw(8) << "Order"
+ << " Name\n";
+ for (auto& [mem, value] : mAllocations) {
+ // TODO Imem size and offset
+ const AllocationType handle = mem.promote();
+ if (!handle) {
+ dump << "Invalid memory lifetime!";
+ continue;
+ }
+ const auto heap = handle->getMemory();
+ dump << std::setw(8) << heap->getHeapID() << std::setw(8) << heap->getSize()
+ << std::setw(8) << heap->getOffset() << std::setw(8) << value.allocation_number
+ << " " << value.name << "\n";
+ }
+ return dump.str();
+ }
+
+ const std::unordered_map<WeakAllocationType, AllocationData>& getAllocations() {
+ return mAllocations;
+ }
+
+ private:
+ const std::string mName;
+ [[no_unique_address]] Allocator mAllocator;
+ // We don't take copies of the underlying information in an allocation,
+ // rather, the allocation information is put on the heap and referenced via
+ // a ref-counted solution. So, the address of the allocation information is
+ // appropriate to hash. In order for this block to be freed, the underlying
+ // allocation must be referenced by no one (thus deallocated).
+ std::unordered_map<WeakAllocationType, AllocationData> mAllocations;
+ // For debugging purposes, monotonic
+ size_t mAllocationNumber = 0;
+};
+
+// An allocator which passes a failed allocation request to a backup allocator.
+template <class PrimaryAllocator, class SecondaryAllocator>
+class FallbackAllocator {
+ public:
+ static_assert(PrimaryAllocator::alignment() == SecondaryAllocator::alignment());
+ static_assert(shared_allocator_impl::has_owns<PrimaryAllocator>);
+
+ static constexpr size_t alignment() { return PrimaryAllocator::alignment(); }
+
+ FallbackAllocator(const PrimaryAllocator& primary, const SecondaryAllocator& secondary)
+ : mPrimary(primary), mSecondary(secondary) {}
+
+ // Default construct primary and secondary allocator
+ FallbackAllocator() = default;
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ AllocationType allocation = mPrimary.allocate(std::forward<T>(request));
+ if (!allocation) allocation = mSecondary.allocate(std::forward<T>(request));
+ return allocation;
+ }
+
+ void deallocate(const AllocationType& allocation) {
+ if (!allocation) return;
+ if (mPrimary.owns(allocation)) {
+ mPrimary.deallocate(allocation);
+ } else {
+ mSecondary.deallocate(allocation);
+ }
+ }
+
+ template <typename Enable = void>
+ auto deallocate_all() -> std::enable_if_t<
+ shared_allocator_impl::has_deallocate_all<PrimaryAllocator> &&
+ shared_allocator_impl::has_deallocate_all<SecondaryAllocator>,
+ Enable> {
+ mPrimary.deallocate_all();
+ mSecondary.deallocate_all();
+ }
+
+ template <typename Enable = bool>
+ auto owns(const AllocationType& allocation) const
+ -> std::enable_if_t<shared_allocator_impl::has_owns<SecondaryAllocator>, Enable> {
+ return mPrimary.owns(allocation) || mSecondary.owns(allocation);
+ }
+
+ template <typename Enable = std::string>
+ auto dump() const
+ -> std::enable_if_t<shared_allocator_impl::has_dump<PrimaryAllocator> &&
+ shared_allocator_impl::has_dump<SecondaryAllocator>,
+ Enable> {
+ return std::string("Primary: \n") + mPrimary.dump() + std::string("Secondary: \n") +
+ mSecondary.dump();
+ }
+
+ private:
+ [[no_unique_address]] PrimaryAllocator mPrimary;
+ [[no_unique_address]] SecondaryAllocator mSecondary;
+};
+
+// An allocator which is backed by a shared_ptr to an allocator, so multiple
+// allocators can share the same backing allocator (and thus the same state).
+template <typename Allocator>
+class IndirectAllocator {
+ public:
+ static constexpr size_t alignment() { return Allocator::alignment(); }
+
+ explicit IndirectAllocator(const std::shared_ptr<Allocator>& allocator)
+ : mAllocator(allocator) {}
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ return mAllocator->allocate(std::forward<T>(request));
+ }
+
+ void deallocate(const AllocationType& allocation) {
+ if (!allocation) return;
+ mAllocator->deallocate(allocation);
+ }
+
+ // We can't implement deallocate_all/dump/owns, since we may not be the only allocator with
+ // access to the underlying allocator (making it not well-defined). If these
+ // methods are necesesary, we need to wrap with a snooping allocator.
+ private:
+ const std::shared_ptr<Allocator> mAllocator;
+};
+
+// Stateless. This allocator allocates full page-aligned MemoryHeapBases (backed by
+// a shared memory mapped anonymous file) as allocations.
+class MemoryHeapBaseAllocator {
+ public:
+ static constexpr size_t alignment() { return 4096; /* PAGE_SIZE */ }
+ static constexpr unsigned FLAGS = 0; // default flags
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ static_assert(std::is_base_of_v<BasicAllocRequest, std::decay_t<T>>);
+ auto heap =
+ sp<MemoryHeapBase>::make(shared_allocator_impl::roundup(request.size, alignment()));
+ if (!shared_allocator_impl::isHeapValid(heap)) {
+ return {};
+ }
+ return sp<MemoryBase>::make(heap, 0, heap->getSize());
+ }
+
+ // Passing a block not allocated by a HeapAllocator is undefined.
+ void deallocate(const AllocationType& allocation) {
+ if (!allocation) return;
+ const auto heap = allocation->getMemory();
+ if (!heap) return;
+ // This causes future mapped accesses (even across process boundaries)
+ // to receive SIGBUS.
+ ftruncate(heap->getHeapID(), 0);
+ // This static cast is safe, since as long as the block was originally
+ // allocated by us, the underlying IMemoryHeap was a MemoryHeapBase
+ static_cast<MemoryHeapBase&>(*heap).dispose();
+ }
+};
+} // namespace android::mediautils
diff --git a/media/utils/include/mediautils/TidWrapper.h b/media/utils/include/mediautils/TidWrapper.h
new file mode 100644
index 0000000..aeefa01
--- /dev/null
+++ b/media/utils/include/mediautils/TidWrapper.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#if defined(__linux__)
+#include <signal.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+#endif
+
+namespace android::mediautils {
+
+// The library wrapper for gettid is only available on bionic. If we don't link
+// against it, we syscall directly.
+inline pid_t getThreadIdWrapper() {
+#if defined(__BIONIC__)
+ return ::gettid();
+#else
+ return syscall(SYS_gettid);
+#endif
+}
+
+// Send an abort signal to a (linux) thread id.
+inline int abortTid(int tid) {
+#if defined(__linux__)
+ const pid_t pid = getpid();
+ siginfo_t siginfo = {
+ .si_code = SI_QUEUE,
+ .si_pid = pid,
+ .si_uid = getuid(),
+ };
+ return syscall(SYS_rt_tgsigqueueinfo, pid, tid, SIGABRT, &siginfo);
+#else
+ errno = ENODEV;
+ return -1;
+#endif
+}
+
+}
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index bdb5337..f9ea50c 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -123,7 +123,6 @@
const Duration secondChanceDuration;
const std::chrono::system_clock::time_point startSystemTime;
const pid_t tid;
-
void onCancel(TimerThread::Handle handle) const;
void onTimeout(TimerThread::Handle handle) const;
};
diff --git a/media/utils/include/mediautils/TimerThread.h b/media/utils/include/mediautils/TimerThread.h
index c76fa7d..d5be177 100644
--- a/media/utils/include/mediautils/TimerThread.h
+++ b/media/utils/include/mediautils/TimerThread.h
@@ -21,9 +21,11 @@
#include <deque>
#include <functional>
#include <map>
+#include <memory>
#include <mutex>
#include <string>
#include <thread>
+#include <vector>
#include <android-base/thread_annotations.h>
@@ -151,7 +153,15 @@
*/
bool cancelTask(Handle handle);
- std::string toString(size_t retiredCount = SIZE_MAX) const;
+ struct SnapshotAnalysis;
+ /**
+ * Take a snapshot of the current state of the TimerThread and determine the
+ * potential cause of a deadlock.
+ * \param retiredCount The number of successfully retired calls to capture
+ * (may be many).
+ * \return See below for a description of a SnapShotAnalysis object
+ */
+ SnapshotAnalysis getSnapshotAnalysis(size_t retiredCount = SIZE_MAX) const;
/**
* Returns a string representation of the TimerThread queue.
@@ -202,7 +212,6 @@
return s;
}
- private:
// To minimize movement of data, we pass around shared_ptrs to Requests.
// These are allocated and deallocated outside of the lock.
// TODO(b/243839867) consider options to merge Request with the
@@ -232,6 +241,40 @@
std::string toString() const;
};
+
+ // SnapshotAnalysis contains info deduced by analysisTimeout().
+
+ struct SnapshotAnalysis {
+ // If we were unable to determine any applicable thread ids,
+ // we leave their value as INVALID_PID.
+ // Note, we use the linux thread id (not pthread), so its type is pid_t.
+ static constexpr pid_t INVALID_PID = -1;
+ // Description of likely issue and/or blocked method.
+ // Empty if no actionable info.
+ std::string description;
+ // Tid of the (latest) monitored thread which has timed out.
+ // This is the thread which the suspect is deduced with respect to.
+ // Most often, this is the thread which an abort is being triggered
+ // from.
+ pid_t timeoutTid = INVALID_PID;
+ // Tid of the (HAL) thread which has likely halted progress, selected
+ // from pendingRequests. May be the same as timeoutTid, if the timed-out
+ // thread directly called into the HAL.
+ pid_t suspectTid = INVALID_PID;
+ // Number of second chances given by the timer thread
+ size_t secondChanceCount;
+ // List of pending requests
+ std::vector<std::shared_ptr<const Request>> pendingRequests;
+ // List of timed-out requests
+ std::vector<std::shared_ptr<const Request>> timeoutRequests;
+ // List of retired requests
+ std::vector<std::shared_ptr<const Request>> retiredRequests;
+ // Dumps the information contained above as well as additional call
+ // stacks where applicable.
+ std::string toString() const;
+ };
+
+ private:
// Deque of requests, in order of add().
// This class is thread-safe.
class RequestQueue {
@@ -326,36 +369,11 @@
}
};
- // Analysis contains info deduced by analysisTimeout().
- //
- // Summary is the result string from checking timeoutRequests to see if
- // any might be caused by blocked calls in pendingRequests.
- //
- // Summary string is empty if there is no automatic actionable info.
- //
- // timeoutTid is the tid selected from timeoutRequests (if any).
- //
- // HALBlockedTid is the tid that is blocked from pendingRequests believed
- // to cause the timeout.
- // HALBlockedTid may be INVALID_PID if no suspected tid is found,
- // and if HALBlockedTid is valid, it will not be the same as timeoutTid.
- //
- static constexpr pid_t INVALID_PID = -1;
- struct Analysis {
- std::string summary;
- pid_t timeoutTid = INVALID_PID;
- pid_t HALBlockedTid = INVALID_PID;
- };
// A HAL method is where the substring "Hidl" is in the class name.
// The tag should look like: ... Hidl ... :: ...
static bool isRequestFromHal(const std::shared_ptr<const Request>& request);
- // Returns analysis from the requests.
- static Analysis analyzeTimeout(
- const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
- const std::vector<std::shared_ptr<const Request>>& pendingRequests);
-
std::vector<std::shared_ptr<const Request>> getPendingRequests() const;
static constexpr size_t kRetiredQueueMax = 16;
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 48d18b0..0689083 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -20,10 +20,34 @@
],
sanitize: {
- address: true,
- cfi: true,
+ undefined: true,
+ misc_undefined: [
+ "float-divide-by-zero",
+ "local-bounds",
+ ],
integer_overflow: true,
+ cfi: true,
memtag_heap: true,
+ diag: {
+ undefined: true,
+ misc_undefined: [
+ "float-divide-by-zero",
+ "local-bounds",
+ ],
+ integer_overflow: true,
+ cfi: true,
+ memtag_heap: true,
+ },
+ },
+ target: {
+ host: {
+ sanitize: {
+ cfi: false,
+ diag: {
+ cfi: false,
+ },
+ },
+ },
},
}
@@ -35,23 +59,7 @@
host_supported: true,
shared_libs: [
- "liblog",
- "libutils",
- ],
-
- static_libs: [
- "libmediautils_core",
- ],
-}
-
-cc_defaults {
- name: "libmediautils_tests_host_unavail",
-
- defaults: ["libmediautils_tests_config"],
-
- host_supported: false,
-
- shared_libs: [
+ "libbinder",
"liblog",
"libmediautils",
"libutils",
@@ -89,22 +97,17 @@
cc_test {
name: "libmediautils_test",
- defaults: ["libmediautils_tests_host_unavail"],
-
- shared_libs: [
- "libmediautils",
- ],
+ defaults: ["libmediautils_tests_defaults"],
srcs: [
"memory-test.cpp",
- "TimerThread-test.cpp",
],
}
cc_test {
name: "media_process_tests",
- defaults: ["libmediautils_tests_host_unavail"],
+ defaults: ["libmediautils_tests_defaults"],
srcs: [
"media_process_tests.cpp",
@@ -114,7 +117,7 @@
cc_test {
name: "media_synchronization_tests",
- defaults: ["libmediautils_tests_host_unavail"],
+ defaults: ["libmediautils_tests_defaults"],
srcs: [
"media_synchronization_tests.cpp",
@@ -124,7 +127,7 @@
cc_test {
name: "media_threadsnapshot_tests",
- defaults: ["libmediautils_tests_host_unavail"],
+ defaults: ["libmediautils_tests_defaults"],
srcs: [
"media_threadsnapshot_tests.cpp",
@@ -186,7 +189,7 @@
cc_test {
name: "timecheck_tests",
- defaults: ["libmediautils_tests_host_unavail"],
+ defaults: ["libmediautils_tests_defaults"],
srcs: [
"timecheck_tests.cpp",
@@ -194,6 +197,16 @@
}
cc_test {
+ name: "timerthread_tests",
+
+ defaults: ["libmediautils_tests_defaults"],
+
+ srcs: [
+ "TimerThread-test.cpp",
+ ],
+}
+
+cc_test {
name: "extended_accumulator_tests",
defaults: ["libmediautils_tests_defaults"],
@@ -209,6 +222,14 @@
defaults: ["libmediautils_tests_defaults"],
srcs: [
- "inplace_function_tests.cpp"
+ "inplace_function_tests.cpp",
+ ],
+}
+
+cc_test {
+ name: "shared_memory_allocator_tests",
+ defaults: ["libmediautils_tests_defaults"],
+ srcs: [
+ "shared_memory_allocator_tests.cpp",
],
}
diff --git a/media/utils/tests/TimerThread-test.cpp b/media/utils/tests/TimerThread-test.cpp
index 8a92a26..468deed 100644
--- a/media/utils/tests/TimerThread-test.cpp
+++ b/media/utils/tests/TimerThread-test.cpp
@@ -52,7 +52,7 @@
std::atomic<bool> taskRan = false;
TimerThread thread;
TimerThread::Handle handle =
- thread.scheduleTask("Basic", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("Basic", [&taskRan](TimerThread::Handle) {
taskRan = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
std::this_thread::sleep_for(100ms - kJitter);
@@ -70,7 +70,7 @@
std::atomic<bool> taskRan = false;
TimerThread thread;
TimerThread::Handle handle =
- thread.scheduleTask("Cancel", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("Cancel", [&taskRan](TimerThread::Handle) {
taskRan = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
std::this_thread::sleep_for(100ms - kJitter);
@@ -90,7 +90,7 @@
TimerThread thread;
TimerThread::Handle handle =
thread.scheduleTask("CancelAfterRun",
- [&taskRan](TimerThread::Handle handle __unused) {
+ [&taskRan](TimerThread::Handle) {
taskRan = true; },
DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
@@ -110,17 +110,17 @@
auto startTime = std::chrono::steady_clock::now();
- thread.scheduleTask("0", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("0", [&taskRan](TimerThread::Handle) {
taskRan[0] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(300, frac));
- thread.scheduleTask("1", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("1", [&taskRan](TimerThread::Handle) {
taskRan[1] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
- thread.scheduleTask("2", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("2", [&taskRan](TimerThread::Handle) {
taskRan[2] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
- thread.scheduleTask("3", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("3", [&taskRan](TimerThread::Handle) {
taskRan[3] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(400, frac));
- auto handle4 = thread.scheduleTask("4", [&taskRan](TimerThread::Handle handle __unused) {
+ auto handle4 = thread.scheduleTask("4", [&taskRan](TimerThread::Handle) {
taskRan[4] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
- thread.scheduleTask("5", [&taskRan](TimerThread::Handle handle __unused) {
+ thread.scheduleTask("5", [&taskRan](TimerThread::Handle) {
taskRan[5] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
// 6 tasks pending
diff --git a/media/utils/tests/library_tests.cpp b/media/utils/tests/library_tests.cpp
index c5c500c..f15f7f9 100644
--- a/media/utils/tests/library_tests.cpp
+++ b/media/utils/tests/library_tests.cpp
@@ -26,8 +26,9 @@
namespace {
-static int32_t here = 0; // accessed on same thread.
+[[maybe_unused]] static int32_t here = 0; // accessed on same thread.
+#if __android__
TEST(library_tests, basic) {
std::string path = android::base::GetExecutableDirectory() + "/libsharedtest.so";
// The flags to loadLibrary should not include RTLD_GLOBAL or RTLD_NODELETE
@@ -64,6 +65,7 @@
// will prevent unloading libraries.
ASSERT_EQ(1, here);
}
+#endif
TEST(library_tests, sad_library) {
std::string path = android::base::GetExecutableDirectory()
diff --git a/media/utils/tests/media_process_tests.cpp b/media/utils/tests/media_process_tests.cpp
index 6e738b1..391c6a7 100644
--- a/media/utils/tests/media_process_tests.cpp
+++ b/media/utils/tests/media_process_tests.cpp
@@ -15,6 +15,7 @@
*/
#include <mediautils/Process.h>
+#include <mediautils/TidWrapper.h>
#define LOG_TAG "media_process_tests"
@@ -33,7 +34,7 @@
}
TEST(media_process_tests, basic) {
- const std::string schedString = getThreadSchedAsString(gettid());
+ const std::string schedString = getThreadSchedAsString(getThreadIdWrapper());
(void)schedString;
// We don't test schedString, only that we haven't crashed.
diff --git a/media/utils/tests/media_threadsnapshot_tests.cpp b/media/utils/tests/media_threadsnapshot_tests.cpp
index fc9aeab..57cf698 100644
--- a/media/utils/tests/media_threadsnapshot_tests.cpp
+++ b/media/utils/tests/media_threadsnapshot_tests.cpp
@@ -15,6 +15,7 @@
*/
#include <mediautils/ThreadSnapshot.h>
+#include <mediautils/TidWrapper.h>
#define LOG_TAG "media_threadsnapshot_tests"
@@ -38,7 +39,7 @@
TEST(media_threadsnapshot_tests, basic) {
using namespace std::chrono_literals;
- ThreadSnapshot threadSnapshot(gettid());
+ ThreadSnapshot threadSnapshot(getThreadIdWrapper());
threadSnapshot.onBegin();
diff --git a/media/utils/tests/shared_memory_allocator_tests.cpp b/media/utils/tests/shared_memory_allocator_tests.cpp
new file mode 100644
index 0000000..11bc72a
--- /dev/null
+++ b/media/utils/tests/shared_memory_allocator_tests.cpp
@@ -0,0 +1,350 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "shared_memory_allocator_tests"
+
+#include <gtest/gtest.h>
+#include <mediautils/SharedMemoryAllocator.h>
+#include <sys/stat.h>
+#include <utils/Log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+namespace {
+void validate_block(const AllocationType& block) {
+ ASSERT_TRUE(block != nullptr);
+ memset(block->unsecurePointer(), 10, 4096);
+ EXPECT_EQ(*(static_cast<char*>(block->unsecurePointer()) + 100), static_cast<char>(10));
+}
+
+template <size_t N = 0, bool FatalOwn = true>
+struct ValidateForwarding {
+ static constexpr size_t alignment() { return 1337; }
+
+ bool owns(const AllocationType& allocation) const {
+ if (allocation == owned) return true;
+ if constexpr (FatalOwn) {
+ LOG_ALWAYS_FATAL_IF(allocation != not_owned, "Invalid allocation passed to allocator");
+ }
+ return false;
+ }
+
+ void deallocate_all() { deallocate_all_count++; }
+ std::string dump() const { return dump_string; }
+
+ static inline size_t deallocate_all_count = 0;
+ static inline const AllocationType owned =
+ MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+ static inline const AllocationType not_owned =
+ MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+ static inline const std::string dump_string = std::to_string(N) + "Test Dump Forwarding";
+};
+
+}; // namespace
+static_assert(shared_allocator_impl::has_owns<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_dump<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_deallocate_all<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_owns<SnoopingAllocator<MemoryHeapBaseAllocator>> == true);
+static_assert(shared_allocator_impl::has_dump<SnoopingAllocator<MemoryHeapBaseAllocator>> == true);
+static_assert(
+ shared_allocator_impl::has_deallocate_all<SnoopingAllocator<MemoryHeapBaseAllocator>> ==
+ true);
+static_assert(
+ shared_allocator_impl::has_owns<
+ PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+ true);
+static_assert(
+ shared_allocator_impl::has_dump<
+ PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+ true);
+static_assert(
+ shared_allocator_impl::has_deallocate_all<
+ PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+ true);
+static_assert(shared_allocator_impl::has_owns<
+ FallbackAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+ SnoopingAllocator<MemoryHeapBaseAllocator>>> == true);
+
+TEST(shared_memory_allocator_tests, roundup) {
+ using namespace shared_allocator_impl;
+ EXPECT_EQ(roundup(1023, 1024), 1024ul);
+ EXPECT_EQ(roundup(1024, 1024), 1024ul);
+ EXPECT_EQ(roundup(1025, 1024), 2048ul);
+ EXPECT_DEATH(roundup(1023, 1023), "");
+ EXPECT_DEATH(roundup(1023, 0), "");
+}
+
+TEST(shared_memory_allocator_tests, mheapbase_allocator) {
+ MemoryHeapBaseAllocator allocator;
+ const auto memory = allocator.allocate(BasicAllocRequest{500});
+ ASSERT_TRUE(memory != nullptr);
+ const auto fd = dup(memory->getMemory()->getHeapID());
+ EXPECT_EQ(memory->size(), static_cast<unsigned>(4096));
+ EXPECT_EQ(memory->size(), memory->getMemory()->getSize());
+ validate_block(memory);
+ allocator.deallocate(memory);
+ // Ensures we have closed the fd
+ EXPECT_EQ(memory->unsecurePointer(), nullptr);
+ EXPECT_EQ(memory->getMemory()->getBase(), nullptr);
+ struct stat st;
+ const auto err = fstat(fd, &st);
+ EXPECT_EQ(err, 0);
+ // Ensure we reclaim pages (overly-zealous)
+ EXPECT_EQ(st.st_size, 0);
+}
+
+TEST(shared_memory_allocator_tests, mheapbase_allocator_independence) {
+ static_assert(MemoryHeapBaseAllocator::alignment() == 4096);
+ MemoryHeapBaseAllocator allocator;
+ const auto first_memory = allocator.allocate(BasicAllocRequest{500});
+ const auto second_memory = allocator.allocate(BasicAllocRequest{500});
+ ASSERT_TRUE(first_memory != nullptr && second_memory != nullptr);
+ EXPECT_NE(first_memory->getMemory()->getHeapID(), second_memory->getMemory()->getHeapID());
+ allocator.deallocate(first_memory);
+ validate_block(second_memory);
+ allocator.deallocate(second_memory);
+}
+
+TEST(shared_memory_allocator_tests, snooping_allocator) {
+ static_assert(SnoopingAllocator<ValidateForwarding<0>>::alignment() ==
+ ValidateForwarding<0>::alignment());
+
+ SnoopingAllocator<MemoryHeapBaseAllocator> allocator{"allocator"};
+ const auto first_memory = allocator.allocate(NamedAllocRequest{{500}, "allocate_1"});
+ auto second_memory = first_memory;
+ {
+ const auto tmp = allocator.allocate(NamedAllocRequest{{5000}, "allocate_2"});
+ // Test copying handle around
+ second_memory = tmp;
+ }
+ ASSERT_TRUE(first_memory && second_memory);
+ EXPECT_TRUE(allocator.owns(first_memory) && allocator.owns(second_memory));
+ const auto first_allocations = allocator.getAllocations();
+ EXPECT_EQ(first_allocations.size(), 2ull);
+ for (const auto& [key, val] : allocator.getAllocations()) {
+ if (val.allocation_number == 0) {
+ EXPECT_EQ(val.name, "allocate_1");
+ EXPECT_TRUE(first_memory == key);
+ }
+ if (val.allocation_number == 1) {
+ EXPECT_EQ(val.name, "allocate_2");
+ EXPECT_TRUE(second_memory == key);
+ }
+ }
+ // TODO test dump and deallocate forwarding
+ // EXPECT_EQ(allocator.dump(), std::string{});
+ validate_block(second_memory);
+ allocator.deallocate(second_memory);
+ EXPECT_EQ(second_memory->unsecurePointer(), nullptr);
+ EXPECT_FALSE(allocator.owns(second_memory));
+ EXPECT_TRUE(allocator.owns(first_memory));
+ const auto second_allocations = allocator.getAllocations();
+ EXPECT_EQ(second_allocations.size(), 1ul);
+ for (const auto& [key, val] : second_allocations) {
+ EXPECT_EQ(val.name, "allocate_1");
+ EXPECT_TRUE(first_memory == key);
+ }
+ // EXPECT_EQ(allocator.dump(), std::string{});
+ // TODO test deallocate_all O(1)
+}
+
+// TODO generic policy test
+TEST(shared_memory_allocator_tests, size_policy_allocator_enforcement) {
+ PolicyAllocator allocator{MemoryHeapBaseAllocator{},
+ SizePolicy<4096 * 7, 4096 * 2, 4096 * 4>{}};
+ // Violate max size
+ EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 5}) == nullptr);
+ // Violate min alloc size
+ EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096}) == nullptr);
+ const auto first_memory = allocator.allocate(BasicAllocRequest{4096 * 4});
+ validate_block(first_memory);
+ // Violate pool size
+ EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 4}) == nullptr);
+ const auto second_memory = allocator.allocate(BasicAllocRequest{4096 * 3});
+ validate_block(second_memory);
+ allocator.deallocate(second_memory);
+ // Check pool size update after deallocation
+ const auto new_second_memory = allocator.allocate(BasicAllocRequest{4096 * 2});
+ validate_block(new_second_memory);
+}
+
+TEST(shared_memory_allocator_tests, indirect_allocator) {
+ static_assert(IndirectAllocator<ValidateForwarding<0>>::alignment() ==
+ ValidateForwarding<0>::alignment());
+ const auto allocator_handle = std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>();
+ IndirectAllocator allocator{allocator_handle};
+ const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+ EXPECT_TRUE(allocator_handle->owns(memory));
+ EXPECT_TRUE(allocator_handle->getAllocations().size() == 1);
+ allocator.deallocate(memory);
+ EXPECT_FALSE(allocator_handle->owns(memory));
+ EXPECT_TRUE(allocator_handle->getAllocations().size() == 0);
+}
+
+TEST(shared_memory_allocator_tests, policy_allocator_forwarding) {
+ // Test appropriate forwarding of allocator, deallocate
+ const auto primary_allocator =
+ std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("allocator");
+ PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<4096>{}};
+ const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+ EXPECT_TRUE(primary_allocator->owns(memory));
+ const auto& allocations = primary_allocator->getAllocations();
+ EXPECT_TRUE(allocations.size() == 1);
+ allocator.deallocate(memory);
+ EXPECT_TRUE(allocations.size() == 0);
+ const auto memory2 = allocator.allocate(NamedAllocRequest{{4096}, "allocation_2"});
+ EXPECT_TRUE(allocations.size() == 1);
+ EXPECT_TRUE(primary_allocator->owns(memory2));
+ allocator.deallocate(memory2);
+ EXPECT_FALSE(primary_allocator->owns(memory2));
+ EXPECT_TRUE(allocations.size() == 0);
+ // Test appropriate forwarding of own, dump, alignment, deallocate_all
+ PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<4096>{}};
+ EXPECT_TRUE(allocator2.owns(ValidateForwarding<0>::owned));
+ EXPECT_FALSE(allocator2.owns(ValidateForwarding<0>::not_owned));
+ EXPECT_TRUE(allocator2.dump().find(ValidateForwarding<0>::dump_string) != std::string::npos);
+ static_assert(decltype(allocator2)::alignment() == ValidateForwarding<0>::alignment());
+ size_t prev = ValidateForwarding<0>::deallocate_all_count;
+ allocator2.deallocate_all();
+ EXPECT_EQ(ValidateForwarding<0>::deallocate_all_count, prev + 1);
+}
+
+TEST(shared_memory_allocator_tests, snooping_allocator_nullptr) {
+ SnoopingAllocator allocator{PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<4096 * 2>{}}};
+ const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+ validate_block(memory);
+ ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{5000}, "allocation_2"}) == nullptr);
+ const auto& allocations = allocator.getAllocations();
+ EXPECT_EQ(allocations.size(), 1ul);
+ for (const auto& [key, val] : allocations) {
+ EXPECT_EQ(val.name, "allocation_1");
+ EXPECT_EQ(val.allocation_number, 0ul);
+ EXPECT_TRUE(key == memory);
+ }
+}
+
+TEST(shared_memory_allocator_tests, fallback_allocator) {
+ // Construct Fallback Allocator
+ const auto primary_allocator = std::make_shared<
+ SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>>>(
+ PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>{}, "primary_allocator");
+ const auto secondary_allocator =
+ std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("secondary_allocator");
+
+ FallbackAllocator fallback_allocator{SnoopingAllocator{IndirectAllocator{primary_allocator}},
+ SnoopingAllocator{IndirectAllocator{secondary_allocator}}};
+ static_assert(decltype(fallback_allocator)::alignment() == 4096);
+ // Basic Allocation Test
+ const auto memory = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+ validate_block(memory);
+ // Correct allocator selected
+ EXPECT_TRUE(fallback_allocator.owns(memory));
+ EXPECT_TRUE(primary_allocator->owns(memory));
+ EXPECT_FALSE(secondary_allocator->owns(memory));
+ // Test fallback allocation
+ const auto memory2 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_2"});
+ validate_block(memory2);
+ // Correct allocator selected
+ EXPECT_TRUE(fallback_allocator.owns(memory2));
+ EXPECT_FALSE(primary_allocator->owns(memory2));
+ EXPECT_TRUE(secondary_allocator->owns(memory2));
+ // Allocations ended up in the correct allocators
+ const auto& primary_allocations = primary_allocator->getAllocations();
+ EXPECT_TRUE(primary_allocations.size() == 1ul);
+ ASSERT_TRUE(primary_allocations.find(memory) != primary_allocations.end());
+ EXPECT_EQ(primary_allocations.find(memory)->second.name, std::string{"allocation_1"});
+ const auto& secondary_allocations = secondary_allocator->getAllocations();
+ EXPECT_TRUE(secondary_allocations.size() == 1ul);
+ ASSERT_TRUE(secondary_allocations.find(memory2) != secondary_allocations.end());
+ EXPECT_EQ(secondary_allocations.find(memory2)->second.name, std::string{"allocation_2"});
+ // Test deallocate appropriate forwarding
+ fallback_allocator.deallocate(memory);
+ EXPECT_TRUE(primary_allocator->getAllocations().size() == 0ul);
+ EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+ // Appropriate fallback after deallocation
+ const auto memory3 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_3"});
+ EXPECT_TRUE(fallback_allocator.owns(memory3));
+ EXPECT_TRUE(primary_allocator->owns(memory3));
+ EXPECT_FALSE(secondary_allocator->owns(memory3));
+ EXPECT_TRUE(primary_allocator->getAllocations().size() == 1ul);
+ // Test deallocate appropriate forwarding
+ EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+ fallback_allocator.deallocate(memory2);
+ EXPECT_TRUE(secondary_allocator->getAllocations().size() == 0ul);
+ const auto memory4 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_4"});
+ EXPECT_TRUE(fallback_allocator.owns(memory4));
+ EXPECT_FALSE(primary_allocator->owns(memory4));
+ EXPECT_TRUE(secondary_allocator->owns(memory4));
+ // Allocations ended up in the correct allocators
+ EXPECT_TRUE(primary_allocator->getAllocations().size() == 1ul);
+ EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+ ASSERT_TRUE(primary_allocations.find(memory3) != primary_allocations.end());
+ EXPECT_EQ(primary_allocations.find(memory3)->second.name, std::string{"allocation_3"});
+ ASSERT_TRUE(secondary_allocations.find(memory4) != secondary_allocations.end());
+ EXPECT_EQ(secondary_allocations.find(memory4)->second.name, std::string{"allocation_4"});
+}
+
+TEST(shared_memory_allocator_tests, fallback_allocator_forwarding) {
+ // Test forwarding
+ using Alloc1 = ValidateForwarding<0, false>;
+ using Alloc2 = ValidateForwarding<1, false>;
+ FallbackAllocator forward_test{Alloc1{}, Alloc2{}};
+ EXPECT_TRUE(forward_test.dump().find(Alloc1::dump_string) != std::string::npos);
+ EXPECT_TRUE(forward_test.dump().find(Alloc2::dump_string) != std::string::npos);
+ // Test owned forwarding
+ EXPECT_TRUE(forward_test.owns(Alloc1::owned));
+ EXPECT_TRUE(forward_test.owns(Alloc2::owned));
+ EXPECT_FALSE(forward_test.owns(Alloc1::not_owned));
+ EXPECT_FALSE(forward_test.owns(Alloc2::not_owned));
+ // Test alignment forwarding
+ static_assert(FallbackAllocator<Alloc1, Alloc2>::alignment() == Alloc1::alignment());
+ // Test deallocate_all forwarding
+ size_t prev1 = Alloc1::deallocate_all_count;
+ size_t prev2 = Alloc2::deallocate_all_count;
+ forward_test.deallocate_all();
+ EXPECT_EQ(prev1 + 1, Alloc1::deallocate_all_count);
+ EXPECT_EQ(prev2 + 1, Alloc2::deallocate_all_count);
+}
+
+TEST(shared_memory_allocator_tests, scoped_allocator) {
+ const auto underlying_allocator =
+ std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("Allocator");
+ ScopedAllocator allocator{underlying_allocator};
+ const auto& allocations = underlying_allocator->getAllocations();
+ {
+ decltype(allocator.allocate(NamedAllocRequest{})) copy;
+ {
+ EXPECT_EQ(allocations.size(), 0ul);
+ const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+ copy = memory;
+ EXPECT_EQ(allocations.size(), 1ul);
+ EXPECT_TRUE(allocator.owns(copy));
+ EXPECT_TRUE(allocator.owns(memory));
+ }
+ EXPECT_TRUE(allocator.owns(copy));
+ EXPECT_EQ(allocations.size(), 1ul);
+ for (const auto& [key, value] : allocations) {
+ EXPECT_EQ(value.name, std::string{"allocation_1"});
+ }
+ }
+ EXPECT_EQ(allocations.size(), 0ul);
+ // Test forwarding
+ static_assert(ScopedAllocator<ValidateForwarding<0>>::alignment() ==
+ ValidateForwarding<0>::alignment());
+ ScopedAllocator<ValidateForwarding<0>> forwarding{};
+ EXPECT_EQ(forwarding.dump(), ValidateForwarding<0>::dump_string);
+}
diff --git a/media/utils/tests/timecheck_tests.cpp b/media/utils/tests/timecheck_tests.cpp
index 8236174..bd91efa 100644
--- a/media/utils/tests/timecheck_tests.cpp
+++ b/media/utils/tests/timecheck_tests.cpp
@@ -26,7 +26,6 @@
using namespace std::chrono_literals;
namespace {
-
TEST(timecheck_tests, success) {
bool timeoutRegistered = false;
float elapsedMsRegistered = 0.f;
@@ -69,4 +68,33 @@
// Note: We do not test TimeCheck crash because TimeCheck is multithreaded and the
// EXPECT_EXIT() signal catching is imperfect due to the gtest fork.
+// Note, the following test is to manually verify the correct thread is aborted.
+// Due to difficulties with gtest and EXPECT_EXIT, this is difficult to verify
+// automatically. TODO(b/246446561) Attempt to use EXPECT_EXIT
+
+#if 0
+void threadFunction() {
+ bool timeoutRegistered = false;
+ float elapsedMsRegistered = 0.f;
+ std::atomic_bool event = false; // seq-cst implies acquire-release
+ {
+ TimeCheck timeCheck("timeout",
+ [&event, &timeoutRegistered, &elapsedMsRegistered]
+ (bool timeout, float elapsedMs) {
+ timeoutRegistered = timeout;
+ elapsedMsRegistered = elapsedMs;
+ event = true; // store-release, must be last.
+ }, 1ms /* timeoutDuration */, {} /* secondChanceDuration */, true /* crash */);
+ std::this_thread::sleep_for(100ms);
+ ADD_FAILURE();
+ }
+}
+
+TEST(timecheck_tests, death) {
+ std::thread mthread{threadFunction};
+ mthread.join();
+}
+#endif
+
} // namespace
+
diff --git a/services/audioflinger/AllocatorFactory.h b/services/audioflinger/AllocatorFactory.h
new file mode 100644
index 0000000..7534607
--- /dev/null
+++ b/services/audioflinger/AllocatorFactory.h
@@ -0,0 +1,95 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#include <mediautils/SharedMemoryAllocator.h>
+
+#pragma once
+
+// TODO how do we appropriately restrict visibility of this header?
+// It should only be included in AudioFlinger.h
+// We will make everything internal linkage for now.
+namespace android {
+namespace AllocatorFactory {
+namespace {
+// TODO make sure these are appropriate
+constexpr inline size_t MAX_MEMORY_SIZE = 1024 * 1024 * 100; // 100 MiB
+constexpr inline size_t DED_SIZE = (MAX_MEMORY_SIZE * 4) / 10; // 40 MiB
+constexpr inline size_t SHARED_SIZE = MAX_MEMORY_SIZE - DED_SIZE; // 60 MiB
+constexpr inline size_t SHARED_SIZE_LARGE = (SHARED_SIZE * 4) / 6; // 40 MiB
+constexpr inline size_t SHARED_SIZE_SMALL = SHARED_SIZE - SHARED_SIZE_LARGE; // 20 MiB
+constexpr inline size_t SMALL_THRESHOLD = 1024 * 40; // 40 KiB
+
+inline auto getDedicated() {
+ using namespace mediautils;
+ static const auto allocator =
+ std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<DED_SIZE>>>();
+ return allocator;
+}
+
+inline auto getSharedLarge() {
+ using namespace mediautils;
+ static const auto allocator = std::make_shared<
+ PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<SHARED_SIZE_LARGE>>>();
+ return allocator;
+}
+
+inline auto getSharedSmall() {
+ using namespace mediautils;
+ static const auto allocator =
+ std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator,
+ SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>>();
+ return allocator;
+}
+
+template <typename Policy, typename Allocator>
+inline auto wrapWithPolicySnooping(Allocator allocator, std::string_view name) {
+ using namespace mediautils;
+ return SnoopingAllocator{PolicyAllocator{IndirectAllocator{allocator}, Policy{}}, name};
+}
+
+// A reasonable upper bound on how many clients we expect, and how many pieces to slice
+// the dedicate pool.
+constexpr inline size_t CLIENT_BOUND = 32;
+// Maximum amount of shared pools a single client can take (50%).
+constexpr inline size_t ADV_THRESHOLD_INV = 2;
+
+inline auto getClientAllocator() {
+ using namespace mediautils;
+ const auto makeDedPool = []() {
+ return wrapWithPolicySnooping<SizePolicy<DED_SIZE / CLIENT_BOUND>>(getDedicated(),
+ "Dedicated Pool");
+ };
+ const auto makeLargeShared = []() {
+ return wrapWithPolicySnooping<SizePolicy<SHARED_SIZE_LARGE / ADV_THRESHOLD_INV>>(
+ getSharedLarge(), "Large Shared");
+ };
+ const auto makeSmallShared = []() {
+ return wrapWithPolicySnooping<
+ SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
+ getSharedSmall(), "Small Shared");
+ };
+
+ return ScopedAllocator{std::make_shared<
+ FallbackAllocator<decltype(makeDedPool()),
+ decltype(FallbackAllocator(makeLargeShared(), makeSmallShared()))>>(
+ makeDedPool(), FallbackAllocator{makeLargeShared(), makeSmallShared()})};
+}
+
+using ClientAllocator = decltype(getClientAllocator());
+} // namespace
+} // namespace AllocatorFactory
+} // namespace android
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 27b6790..12cf70d 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -642,21 +642,22 @@
}
/* static */
-int AudioFlinger::onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
+os::HapticScale AudioFlinger::onExternalVibrationStart(
+ const sp<os::ExternalVibration>& externalVibration) {
sp<os::IExternalVibratorService> evs = getExternalVibratorService();
if (evs != nullptr) {
int32_t ret;
binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
if (status.isOk()) {
ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
- return ret;
+ return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
}
}
ALOGD("%s, start external vibration with intensity as MUTE due to %s",
__func__,
evs == nullptr ? "external vibration service not found"
: "error when querying intensity");
- return static_cast<int>(os::HapticScale::MUTE);
+ return os::HapticScale::MUTE;
}
/* static */
@@ -731,15 +732,14 @@
{
String8 result;
- result.append("Clients:\n");
- result.append(" pid heap_size\n");
+ result.append("Client Allocators:\n");
for (size_t i = 0; i < mClients.size(); ++i) {
sp<Client> client = mClients.valueAt(i).promote();
if (client != 0) {
- result.appendFormat("%6d %12zu\n", client->pid(),
- client->heap()->getMemoryHeap()->getSize());
+ result.appendFormat("Client: %d\n", client->pid());
+ result.append(client->allocator().dump().c_str());
}
- }
+ }
result.append("Notification Clients:\n");
result.append(" pid uid name\n");
@@ -2186,12 +2186,8 @@
AudioFlinger::Client::Client(const sp<AudioFlinger>& audioFlinger, pid_t pid)
: RefBase(),
mAudioFlinger(audioFlinger),
- mPid(pid)
-{
- mMemoryDealer = new MemoryDealer(
- audioFlinger->getClientSharedHeapSize(),
- (std::string("AudioFlinger::Client(") + std::to_string(pid) + ")").c_str());
-}
+ mPid(pid),
+ mClientAllocator(AllocatorFactory::getClientAllocator()) {}
// Client destructor must be called with AudioFlinger::mClientLock held
AudioFlinger::Client::~Client()
@@ -2199,9 +2195,9 @@
mAudioFlinger->removeClient_l(mPid);
}
-sp<MemoryDealer> AudioFlinger::Client::heap() const
+AllocatorFactory::ClientAllocator& AudioFlinger::Client::allocator()
{
- return mMemoryDealer;
+ return mClientAllocator;
}
// ----------------------------------------------------------------------------
@@ -2871,13 +2867,15 @@
ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
*output, thread.get());
} else if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
- thread = new OffloadThread(this, outputStream, *output, mSystemReady);
+ thread = new OffloadThread(this, outputStream, *output,
+ mSystemReady, halConfig->offload_info);
ALOGV("openOutput_l() created offload output: ID %d thread %p",
*output, thread.get());
} else if ((flags & AUDIO_OUTPUT_FLAG_DIRECT)
|| !isValidPcmSinkFormat(halConfig->format)
|| !isValidPcmSinkChannelMask(halConfig->channel_mask)) {
- thread = new DirectOutputThread(this, outputStream, *output, mSystemReady);
+ thread = new DirectOutputThread(this, outputStream, *output,
+ mSystemReady, halConfig->offload_info);
ALOGV("openOutput_l() created direct output: ID %d thread %p",
*output, thread.get());
} else {
@@ -3735,6 +3733,12 @@
using namespace std::chrono_literals;
auto inChannelMask = audio_channel_mask_out_to_in(track->channelMask());
+ if (inChannelMask == AUDIO_CHANNEL_INVALID) {
+ // The downstream PatchTrack has the proper output channel mask,
+ // so if there is no input channel mask equivalent, we can just
+ // use an index mask here to create the PatchRecord.
+ inChannelMask = audio_channel_mask_out_to_in_index_mask(track->channelMask());
+ }
sp patchRecord = new RecordThread::PatchRecord(nullptr /* thread */,
track->sampleRate(),
inChannelMask,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 08594e2..fcf19c9 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -75,6 +75,7 @@
#include <media/ExtendedAudioBufferProvider.h>
#include <media/VolumeShaper.h>
#include <mediautils/ServiceUtilities.h>
+#include <mediautils/SharedMemoryAllocator.h>
#include <mediautils/Synchronization.h>
#include <mediautils/ThreadSnapshot.h>
@@ -94,7 +95,7 @@
#include "NBAIO_Tee.h"
#include "ThreadMetrics.h"
#include "TrackMetrics.h"
-
+#include "AllocatorFactory.h"
#include <android/os/IPowerManager.h>
#include <media/nblog/NBLog.h>
@@ -319,7 +320,8 @@
sp<MmapStreamInterface>& interface,
audio_port_handle_t *handle);
- static int onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration);
+ static os::HapticScale onExternalVibrationStart(
+ const sp<os::ExternalVibration>& externalVibration);
static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
status_t addEffectToHal(audio_port_handle_t deviceId,
@@ -499,19 +501,19 @@
// --- Client ---
class Client : public RefBase {
- public:
- Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
+ public:
+ Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
virtual ~Client();
- sp<MemoryDealer> heap() const;
+ AllocatorFactory::ClientAllocator& allocator();
pid_t pid() const { return mPid; }
sp<AudioFlinger> audioFlinger() const { return mAudioFlinger; }
private:
DISALLOW_COPY_AND_ASSIGN(Client);
- const sp<AudioFlinger> mAudioFlinger;
- sp<MemoryDealer> mMemoryDealer;
+ const sp<AudioFlinger> mAudioFlinger;
const pid_t mPid;
+ AllocatorFactory::ClientAllocator mClientAllocator;
};
// --- Notification Client ---
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index fc3c07f..ca1bba6 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -278,8 +278,8 @@
if (!doRegister && !(registered && doEnable)) {
return NO_ERROR;
}
- mPolicyLock.lock();
}
+ mPolicyLock.lock();
ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
__func__, mDescriptor.name, mId, mSessionId, doRegister, registered, doEnable, enabled);
if (doRegister) {
@@ -1597,7 +1597,7 @@
return isHapticGenerator(&mDescriptor.type);
}
-status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
{
if (mStatus != NO_ERROR) {
return mStatus;
@@ -1613,7 +1613,7 @@
param->vsize = sizeof(int32_t) * 2;
*(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
*((int32_t*)param->data + 1) = id;
- *((int32_t*)param->data + 2) = intensity;
+ *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
std::vector<uint8_t> response;
status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
if (status == NO_ERROR) {
@@ -1761,7 +1761,14 @@
return;
}
int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
- mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset);
+ mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{
+ {static_cast<size_t>(EFFECT_PARAM_BUFFER_SIZE + bufOffset)},
+ std::string("Effect ID: ")
+ .append(std::to_string(effect->id()))
+ .append(" Session ID: ")
+ .append(std::to_string(static_cast<int>(effect->sessionId())))
+ .append(" \n")
+ });
if (mCblkMemory == 0 ||
(mCblk = static_cast<effect_param_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
ALOGE("not enough memory for Effect size=%zu", EFFECT_PARAM_BUFFER_SIZE +
@@ -2669,7 +2676,7 @@
return false;
}
-void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
{
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mEffects.size(); ++i) {
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 78788df..72ec0e5 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -280,7 +280,7 @@
static bool isHapticGenerator(const effect_uuid_t* type);
bool isHapticGenerator() const;
- status_t setHapticIntensity(int id, int intensity);
+ status_t setHapticIntensity(int id, os::HapticScale intensity);
status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo);
status_t getConfigs(audio_config_base_t* inputCfg,
@@ -550,7 +550,7 @@
bool containsHapticGeneratingEffect_l();
- void setHapticIntensity_l(int id, int intensity);
+ void setHapticIntensity_l(int id, os::HapticScale intensity);
sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 45dd258..b54b41f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -313,12 +313,19 @@
patch->sources[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
patch->sources[0].flags.input : AUDIO_INPUT_FLAG_NONE;
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+ audio_source_t source = AUDIO_SOURCE_MIC;
+ // For telephony patches, propagate voice communication use case to record side
+ if (patch->num_sources == 2
+ && patch->sources[1].ext.mix.usecase.stream
+ == AUDIO_STREAM_VOICE_CALL) {
+ source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+ }
sp<ThreadBase> thread = mAudioFlinger.openInput_l(srcModule,
&input,
&config,
device,
address,
- AUDIO_SOURCE_MIC,
+ source,
flags,
outputDevice,
outputDeviceAddress);
@@ -516,9 +523,14 @@
audio_output_flags_t outputFlags = mAudioPatch.sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
mAudioPatch.sinks[0].flags.output : AUDIO_OUTPUT_FLAG_NONE;
audio_stream_type_t streamType = AUDIO_STREAM_PATCH;
+ audio_source_t source = AUDIO_SOURCE_DEFAULT;
if (mAudioPatch.num_sources == 2 && mAudioPatch.sources[1].type == AUDIO_PORT_TYPE_MIX) {
// "reuse one existing output mix" case
streamType = mAudioPatch.sources[1].ext.mix.usecase.stream;
+ // For telephony patches, propagate voice communication use case to record side
+ if (streamType == AUDIO_STREAM_VOICE_CALL) {
+ source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+ }
}
if (mPlayback.thread()->hasFastMixer()) {
// Create a fast track if the playback thread has fast mixer to get better performance.
@@ -546,7 +558,8 @@
inChannelMask,
format,
frameCount,
- inputFlags);
+ inputFlags,
+ source);
} else {
// use a pseudo LCM between input and output framecount
int playbackShift = __builtin_ctz(playbackFrameCount);
@@ -566,7 +579,9 @@
frameCount,
nullptr,
(size_t)0 /* bufferSize */,
- inputFlags);
+ inputFlags,
+ {} /* timeout */,
+ source);
}
status = mRecord.checkTrack(tempRecordTrack.get());
if (status != NO_ERROR) {
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index e8552c4..daec57e 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -87,6 +87,10 @@
&& (flags & AUDIO_INPUT_FLAG_HW_AV_SYNC) == 0;
}
+ using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
+ using MetadataInserter = std::back_insert_iterator<SinkMetadatas>;
+ virtual void copyMetadataTo(MetadataInserter& backInserter) const;
+
private:
friend class AudioFlinger; // for mState
@@ -134,7 +138,8 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags,
- const Timeout& timeout = {});
+ const Timeout& timeout = {},
+ audio_source_t source = AUDIO_SOURCE_DEFAULT);
virtual ~PatchRecord();
virtual Source* getSource() { return nullptr; }
@@ -166,7 +171,8 @@
audio_channel_mask_t channelMask,
audio_format_t format,
size_t frameCount,
- audio_input_flags_t flags);
+ audio_input_flags_t flags,
+ audio_source_t source = AUDIO_SOURCE_DEFAULT);
Source* getSource() override { return static_cast<Source*>(this); }
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
index 9aff137..5d3fb0a 100644
--- a/services/audioflinger/TEST_MAPPING
+++ b/services/audioflinger/TEST_MAPPING
@@ -1,5 +1,5 @@
{
- "postsubmit": [
+ "presubmit": [
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/services/audioflinger/ThreadMetrics.h b/services/audioflinger/ThreadMetrics.h
index 6526655..5493b3c 100644
--- a/services/audioflinger/ThreadMetrics.h
+++ b/services/audioflinger/ThreadMetrics.h
@@ -148,7 +148,14 @@
item.set(AMEDIAMETRICS_PROP_CUMULATIVETIMENS, mCumulativeTimeNs)
.set(AMEDIAMETRICS_PROP_DEVICETIMENS, mDeviceTimeNs)
.set(AMEDIAMETRICS_PROP_EVENT, eventName)
- .set(AMEDIAMETRICS_PROP_INTERVALCOUNT, (int32_t)mIntervalCount);
+ .set(AMEDIAMETRICS_PROP_INTERVALCOUNT, (int32_t)mIntervalCount)
+ // we set "last" device to indicate the device the group was
+ // associated with (because a createPatch which is logged in ThreadMetrics
+ // could have changed the device).
+ .set(mIsOut
+ ? AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_OUTPUTDEVICES
+ : AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_INPUTDEVICES,
+ mDevices.c_str());
if (mDeviceLatencyMs.getN() > 0) {
item.set(AMEDIAMETRICS_PROP_DEVICELATENCYMS, mDeviceLatencyMs.getMean());
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 71c24f3..69c7934 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -791,6 +791,7 @@
(CreateAudioPatchConfigEventData *)event->mData.get();
event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
const DeviceTypeSet newDevices = getDeviceTypes();
+ configChanged = oldDevices != newDevices;
mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -801,6 +802,7 @@
(ReleaseAudioPatchConfigEventData *)event->mData.get();
event->mStatus = releaseAudioPatch_l(data->mHandle);
const DeviceTypeSet newDevices = getDeviceTypes();
+ configChanged = oldDevices != newDevices;
mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -2742,7 +2744,7 @@
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
mLock.unlock();
- const int intensity = AudioFlinger::onExternalVibrationStart(
+ const os::HapticScale intensity = AudioFlinger::onExternalVibrationStart(
track->getExternalVibration());
std::optional<media::AudioVibratorInfo> vibratorInfo;
{
@@ -2752,7 +2754,7 @@
vibratorInfo = std::move(mAudioFlinger->getDefaultVibratorInfo_l());
}
mLock.lock();
- track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
+ track->setHapticIntensity(intensity);
if (vibratorInfo) {
track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
}
@@ -3435,9 +3437,15 @@
mActiveSleepTimeUs = activeSleepTimeUs();
mIdleSleepTimeUs = idleSleepTimeUs();
+ mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
+ // Shorten standby delay on VOIP RX output to avoid delayed routing updates
+ // after a call due to call end tone.
+ if (mOutput != nullptr && (mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
+ const nsecs_t NS_PER_MS = 1000000;
+ mStandbyDelayNs = std::min(mStandbyDelayNs, latency_l() * NS_PER_MS);
+ }
// make sure standby delay is not too short when connected to an A2DP sink to avoid
// truncating audio when going to standby.
- mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
if (!Intersection(outDeviceTypes(), getAudioDeviceOutAllA2dpSet()).empty()) {
if (mStandbyDelayNs < kDefaultStandbyTimeInNsecs) {
mStandbyDelayNs = kDefaultStandbyTimeInNsecs;
@@ -4103,10 +4111,19 @@
mEffectBufferFormat,
mNormalFrameCount * mHapticChannelCount);
}
-
- memcpy_by_audio_format(mSinkBuffer, mFormat, effectBuffer, mEffectBufferFormat,
- mNormalFrameCount * (mChannelCount + mHapticChannelCount));
-
+ const size_t framesToCopy = mNormalFrameCount * (mChannelCount + mHapticChannelCount);
+ if (mFormat == AUDIO_FORMAT_PCM_FLOAT &&
+ mEffectBufferFormat == AUDIO_FORMAT_PCM_FLOAT) {
+ // Clamp PCM float values more than this distance from 0 to insulate
+ // a HAL which doesn't handle NaN correctly.
+ static constexpr float HAL_FLOAT_SAMPLE_LIMIT = 2.0f;
+ memcpy_to_float_from_float_with_clamping(static_cast<float*>(mSinkBuffer),
+ static_cast<const float*>(effectBuffer),
+ framesToCopy, HAL_FLOAT_SAMPLE_LIMIT /* absMax */);
+ } else {
+ memcpy_by_audio_format(mSinkBuffer, mFormat,
+ effectBuffer, mEffectBufferFormat, framesToCopy);
+ }
// The sample data is partially interleaved when haptic channels exist,
// we need to adjust channels here.
if (mHapticChannelCount > 0) {
@@ -4488,7 +4505,7 @@
// When the track is stop, set the haptic intensity as MUTE
// for the HapticGenerator effect.
if (chain != nullptr) {
- chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+ chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
}
}
}
@@ -6134,8 +6151,10 @@
// ----------------------------------------------------------------------------
AudioFlinger::DirectOutputThread::DirectOutputThread(const sp<AudioFlinger>& audioFlinger,
- AudioStreamOut* output, audio_io_handle_t id, ThreadBase::type_t type, bool systemReady)
+ AudioStreamOut* output, audio_io_handle_t id, ThreadBase::type_t type, bool systemReady,
+ const audio_offload_info_t& offloadInfo)
: PlaybackThread(audioFlinger, output, id, type, systemReady)
+ , mOffloadInfo(offloadInfo)
{
setMasterBalance(audioFlinger->getMasterBalance_l());
}
@@ -6414,7 +6433,8 @@
// fill a buffer, then remove it from active list.
// Only consider last track started for mixer state control
bool isTimestampAdvancing = mIsTimestampAdvancing.check(mOutput);
- if (--(track->mRetryCount) <= 0) {
+ if (!isTunerStream() // tuner streams remain active in underrun
+ && --(track->mRetryCount) <= 0) {
if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
track->mRetryCount = kMaxTrackRetriesOffload;
} else {
@@ -6458,6 +6478,7 @@
(doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
status_t result = mOutput->stream->pause();
ALOGE_IF(result != OK, "Error when pausing output stream: %d", result);
+ doHwResume = !doHwPause; // resume if pause is due to flush.
}
if (mFlushPending) {
flushHw_l();
@@ -6777,8 +6798,9 @@
// ----------------------------------------------------------------------------
AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
- AudioStreamOut* output, audio_io_handle_t id, bool systemReady)
- : DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady),
+ AudioStreamOut* output, audio_io_handle_t id, bool systemReady,
+ const audio_offload_info_t& offloadInfo)
+ : DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady, offloadInfo),
mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true)
{
//FIXME: mStandby should be set to true by ThreadBase constructo
@@ -6997,7 +7019,8 @@
// No buffers for this track. Give it a few chances to
// fill a buffer, then remove it from active list.
bool isTimestampAdvancing = mIsTimestampAdvancing.check(mOutput);
- if (--(track->mRetryCount) <= 0) {
+ if (!isTunerStream() // tuner streams remain active in underrun
+ && --(track->mRetryCount) <= 0) {
if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
track->mRetryCount = kMaxTrackRetriesOffload;
} else {
@@ -7026,6 +7049,7 @@
if (!mStandby && (doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
status_t result = mOutput->stream->pause();
ALOGE_IF(result != OK, "Error when pausing output stream: %d", result);
+ doHwResume = !doHwPause; // resume if pause is due to flush.
}
if (mFlushPending) {
flushHw_l();
@@ -7345,6 +7369,27 @@
if (status != INVALID_OPERATION) {
updateHalSupportedLatencyModes_l();
}
+
+ // update priority if specified.
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost =
+ property_get_int32("audio.spatializer.priority", kRTPriorityMin);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ const pid_t pid = getpid();
+ const pid_t tid = getTid();
+
+ if (tid == -1) {
+ // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
+ ALOGW("%s: audio.spatializer.priority %d ignored, thread not running",
+ __func__, priorityBoost);
+ } else {
+ ALOGD("%s: audio.spatializer.priority %d, allowing real time for pid %d tid %d",
+ __func__, priorityBoost, pid, tid);
+ sendPrioConfigEvent_l(pid, tid, priorityBoost, false /*forApp*/);
+ stream()->setHalThreadPriority(priorityBoost);
+ }
+ }
}
status_t AudioFlinger::SpatializerThread::createAudioPatch_l(const struct audio_patch *patch,
@@ -8785,21 +8830,9 @@
return; // nothing to do
}
StreamInHalInterface::SinkMetadata metadata;
+ auto backInserter = std::back_inserter(metadata.tracks);
for (const sp<RecordTrack> &track : mActiveTracks) {
- // Do not forward PatchRecord metadata to audio HAL
- if (track->isPatchTrack()) {
- continue;
- }
- // No track is invalid as this is called after prepareTrack_l in the same critical section
- record_track_metadata_v7_t trackMetadata;
- trackMetadata.base = {
- .source = track->attributes().source,
- .gain = 1, // capture tracks do not have volumes
- };
- trackMetadata.channel_mask = track->channelMask(),
- strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
- metadata.tracks.push_back(trackMetadata);
+ track->copyMetadataTo(backInserter);
}
mInput->stream->updateSinkMetadata(metadata);
}
@@ -10273,19 +10306,22 @@
void AudioFlinger::MmapThread::checkInvalidTracks_l()
{
+ sp<MmapStreamCallback> callback;
for (const sp<MmapTrack> &track : mActiveTracks) {
if (track->isInvalid()) {
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback != 0) {
- mLock.unlock();
- callback->onTearDown(track->portId());
- mLock.lock();
- } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
- ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
+ callback = mCallback.promote();
+ if (callback == nullptr && mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+ ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
}
+ break;
}
}
+ if (callback != 0) {
+ mLock.unlock();
+ callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ mLock.lock();
+ }
}
void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index b477d65..c509d73 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1546,8 +1546,9 @@
public:
DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, bool systemReady)
- : DirectOutputThread(audioFlinger, output, id, DIRECT, systemReady) { }
+ audio_io_handle_t id, bool systemReady,
+ const audio_offload_info_t& offloadInfo)
+ : DirectOutputThread(audioFlinger, output, id, DIRECT, systemReady, offloadInfo) { }
virtual ~DirectOutputThread();
@@ -1579,11 +1580,14 @@
virtual void onAddNewTrack_l();
+ const audio_offload_info_t mOffloadInfo;
bool mVolumeShaperActive = false;
DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, ThreadBase::type_t type, bool systemReady);
+ audio_io_handle_t id, ThreadBase::type_t type, bool systemReady,
+ const audio_offload_info_t& offloadInfo);
void processVolume_l(Track *track, bool lastTrack);
+ bool isTunerStream() const { return (mOffloadInfo.content_id > 0); }
// prepareTracks_l() tells threadLoop_mix() the name of the single active track
sp<Track> mActiveTrack;
@@ -1621,7 +1625,8 @@
public:
OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, bool systemReady);
+ audio_io_handle_t id, bool systemReady,
+ const audio_offload_info_t& offloadInfo);
virtual ~OffloadThread() {};
void flushHw_l() override;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 44a93c1..d7dbff3 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -162,11 +162,12 @@
}
if (client != 0) {
- mCblkMemory = client->heap()->allocate(size);
+ mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{{size},
+ std::string("Track ID: ").append(std::to_string(mId))});
if (mCblkMemory == 0 ||
(mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
ALOGE("%s(%d): not enough memory for AudioTrack size=%zu", __func__, mId, size);
- client->heap()->dump("AudioTrack");
+ ALOGE("%s", client->allocator().dump().c_str());
mCblkMemory.clear();
return;
}
@@ -1480,7 +1481,7 @@
}
}
- metadata.channel_mask = mChannelMask,
+ metadata.channel_mask = mChannelMask;
strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
*backInserter++ = metadata;
}
@@ -2059,7 +2060,6 @@
{
Buffer *pInBuffer;
Buffer inBuffer;
- bool outputBufferFull = false;
inBuffer.frameCount = frames;
inBuffer.raw = data;
@@ -2089,7 +2089,6 @@
ALOGV("%s(%d): thread %d no more output buffers; status %d",
__func__, mId,
(int)mThreadIoHandle, status);
- outputBufferFull = true;
break;
}
uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime);
@@ -2785,6 +2784,25 @@
}
}
+void AudioFlinger::RecordThread::RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
+{
+
+ // Do not forward PatchRecord metadata with unspecified audio source
+ if (mAttr.source == AUDIO_SOURCE_DEFAULT) {
+ return;
+ }
+
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ record_track_metadata_v7_t metadata;
+ metadata.base = {
+ .source = mAttr.source,
+ .gain = 1, // capture tracks do not have volumes
+ };
+ metadata.channel_mask = mChannelMask;
+ strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+ *backInserter++ = metadata;
+}
// ----------------------------------------------------------------------------
#undef LOG_TAG
@@ -2798,9 +2816,10 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags,
- const Timeout& timeout)
+ const Timeout& timeout,
+ audio_source_t source)
: RecordTrack(recordThread, NULL,
- audio_attributes_t{} /* currently unused for patch track */,
+ audio_attributes_t{ .source = source } ,
sampleRate, format, channelMask, frameCount,
buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
@@ -2911,9 +2930,10 @@
audio_channel_mask_t channelMask,
audio_format_t format,
size_t frameCount,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ audio_source_t source)
: PatchRecord(recordThread, sampleRate, channelMask, format, frameCount,
- nullptr /*buffer*/, 0 /*bufferSize*/, flags),
+ nullptr /*buffer*/, 0 /*bufferSize*/, flags, {} /* timeout */, source),
mPatchRecordAudioBufferProvider(*this),
mSinkBuffer(allocAligned(32, mFrameCount * mFrameSize)),
mStubBuffer(allocAligned(32, mFrameCount * mFrameSize))
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index c4c27e8..b85382e 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -307,13 +307,13 @@
virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
virtual status_t getProductStrategyFromAudioAttributes(
- const AudioAttributes &aa, product_strategy_t &productStrategy,
+ const audio_attributes_t &aa, product_strategy_t &productStrategy,
bool fallbackOnDefault) = 0;
virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) = 0;
virtual status_t getVolumeGroupFromAudioAttributes(
- const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
+ const audio_attributes_t &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
virtual bool isCallScreenModeSupported() = 0;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index bb1699e..54a143c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -55,7 +55,7 @@
status_t getAudioPolicyMix(audio_devices_t deviceType,
const String8& address, sp<AudioPolicyMix> &policyMix) const;
- status_t registerMix(AudioMix mix, sp<SwAudioOutputDescriptor> desc);
+ status_t registerMix(const AudioMix& mix, const sp<SwAudioOutputDescriptor>& desc);
status_t unregisterMix(const AudioMix& mix);
@@ -72,13 +72,16 @@
*/
status_t getOutputForAttr(const audio_attributes_t& attributes,
const audio_config_base_t& config,
- uid_t uid, audio_output_flags_t flags,
+ uid_t uid,
+ audio_session_t session,
+ audio_output_flags_t flags,
sp<AudioPolicyMix> &primaryMix,
std::vector<sp<AudioPolicyMix>> *secondaryMixes);
- sp<DeviceDescriptor> getDeviceAndMixForInputSource(audio_source_t inputSource,
+ sp<DeviceDescriptor> getDeviceAndMixForInputSource(const audio_attributes_t& attributes,
const DeviceVector &availableDeviceTypes,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *policyMix) const;
/**
@@ -124,11 +127,13 @@
void dump(String8 *dst) const;
private:
- enum class MixMatchStatus { MATCH, NO_MATCH, INVALID_MIX };
- MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
+ bool mixMatch(const AudioMix* mix, size_t mixIndex,
const audio_attributes_t& attributes,
const audio_config_base_t& config,
- uid_t uid);
+ uid_t uid,
+ audio_session_t session);
};
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr);
+
} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 551eab6..003dcaf 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -17,6 +17,10 @@
#define LOG_TAG "APM_AudioPolicyMix"
//#define LOG_NDEBUG 0
+#include <algorithm>
+#include <iterator>
+#include <optional>
+#include <regex>
#include "AudioPolicyMix.h"
#include "TypeConverter.h"
#include "HwModule.h"
@@ -25,6 +29,98 @@
#include <AudioOutputDescriptor.h>
namespace android {
+namespace {
+
+bool matchAddressToTags(const audio_attributes_t& attr, const String8& addr) {
+ std::optional<std::string> tagAddress = extractAddressFromAudioAttributes(attr);
+ return tagAddress.has_value() && tagAddress->compare(addr.c_str()) == 0;
+}
+
+// Returns true if the criterion matches.
+// The exclude criteria are handled in the same way as positive
+// ones - only condition is matched (the function will return
+// same result both for RULE_MATCH_X and RULE_EXCLUDE_X).
+bool isCriterionMatched(const AudioMixMatchCriterion& criterion,
+ const audio_attributes_t& attr,
+ const uid_t uid,
+ const audio_session_t session) {
+ uint32_t ruleWithoutExclusion = criterion.mRule & ~RULE_EXCLUSION_MASK;
+ switch(ruleWithoutExclusion) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ return criterion.mValue.mUsage == attr.usage;
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ return criterion.mValue.mSource == attr.source;
+ case RULE_MATCH_UID:
+ return criterion.mValue.mUid == uid;
+ case RULE_MATCH_USERID:
+ {
+ userid_t userId = multiuser_get_user_id(uid);
+ return criterion.mValue.mUserId == userId;
+ }
+ case RULE_MATCH_AUDIO_SESSION_ID:
+ return criterion.mValue.mAudioSessionId == session;
+ }
+ ALOGE("Encountered invalid mix rule 0x%x", criterion.mRule);
+ return false;
+}
+
+// Returns true if vector of criteria is matched:
+// - If any of the exclude criteria is matched the criteria doesn't match.
+// - Otherwise, for each 'dimension' of positive rule present
+// (usage, capture preset, uid, userid...) at least one rule must match
+// for the criteria to match.
+bool areMixCriteriaMatched(const std::vector<AudioMixMatchCriterion>& criteria,
+ const audio_attributes_t& attr,
+ const uid_t uid,
+ const audio_session_t session) {
+ // If any of the exclusion criteria are matched the mix doesn't match.
+ auto isMatchingExcludeCriterion = [&](const AudioMixMatchCriterion& c) {
+ return c.isExcludeCriterion() && isCriterionMatched(c, attr, uid, session);
+ };
+ if (std::any_of(criteria.begin(), criteria.end(), isMatchingExcludeCriterion)) {
+ return false;
+ }
+
+ uint32_t presentPositiveRules = 0; // Bitmask of all present positive criteria.
+ uint32_t matchedPositiveRules = 0; // Bitmask of all matched positive criteria.
+ for (const auto& criterion : criteria) {
+ if (criterion.isExcludeCriterion()) {
+ continue;
+ }
+ presentPositiveRules |= criterion.mRule;
+ if (isCriterionMatched(criterion, attr, uid, session)) {
+ matchedPositiveRules |= criterion.mRule;
+ }
+ }
+ return presentPositiveRules == matchedPositiveRules;
+}
+
+// Consistency checks: for each "dimension" of rules (usage, uid...), we can
+// only have MATCH rules, or EXCLUDE rules in each dimension, not a combination.
+bool areMixCriteriaConsistent(const std::vector<AudioMixMatchCriterion>& criteria) {
+ std::set<uint32_t> positiveCriteria;
+ for (const AudioMixMatchCriterion& c : criteria) {
+ if (c.isExcludeCriterion()) {
+ continue;
+ }
+ positiveCriteria.insert(c.mRule);
+ }
+
+ auto isConflictingCriterion = [&positiveCriteria](const AudioMixMatchCriterion& c) {
+ uint32_t ruleWithoutExclusion = c.mRule & ~RULE_EXCLUSION_MASK;
+ return c.isExcludeCriterion() &&
+ (positiveCriteria.find(ruleWithoutExclusion) != positiveCriteria.end());
+ };
+ return std::none_of(criteria.begin(), criteria.end(), isConflictingCriterion);
+}
+
+template <typename Predicate>
+void EraseCriteriaIf(std::vector<AudioMixMatchCriterion>& v,
+ const Predicate& predicate) {
+ v.erase(std::remove_if(v.begin(), v.end(), predicate), v.end());
+}
+
+} // namespace
void AudioPolicyMix::dump(String8 *dst, int spaces, int index) const
{
@@ -66,6 +162,9 @@
case RULE_MATCH_USERID:
ruleValue = std::to_string(criterion.mValue.mUserId);
break;
+ case RULE_MATCH_AUDIO_SESSION_ID:
+ ruleValue = std::to_string(criterion.mValue.mAudioSessionId);
+ break;
default:
unknownRule = true;
}
@@ -78,7 +177,8 @@
}
}
-status_t AudioPolicyMixCollection::registerMix(AudioMix mix, sp<SwAudioOutputDescriptor> desc)
+status_t AudioPolicyMixCollection::registerMix(const AudioMix& mix,
+ const sp<SwAudioOutputDescriptor>& desc)
{
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(i);
@@ -89,12 +189,17 @@
return BAD_VALUE;
}
}
- sp<AudioPolicyMix> policyMix = new AudioPolicyMix(mix);
+ if (!areMixCriteriaConsistent(mix.mCriteria)) {
+ ALOGE("registerMix(): Mix contains inconsistent criteria "
+ "(MATCH & EXCLUDE criteria of the same type)");
+ return BAD_VALUE;
+ }
+ sp<AudioPolicyMix> policyMix = sp<AudioPolicyMix>::make(mix);
add(policyMix);
ALOGD("registerMix(): adding mix for dev=0x%x addr=%s",
policyMix->mDeviceType, policyMix->mDeviceAddress.string());
- if (desc != 0) {
+ if (desc != nullptr) {
desc->mPolicyMix = policyMix;
policyMix->setOutput(desc);
}
@@ -151,7 +256,8 @@
}
status_t AudioPolicyMixCollection::getOutputForAttr(
- const audio_attributes_t& attributes, const audio_config_base_t& config, uid_t uid,
+ const audio_attributes_t& attributes, const audio_config_base_t& config, const uid_t uid,
+ const audio_session_t session,
audio_output_flags_t flags,
sp<AudioPolicyMix> &primaryMix,
std::vector<sp<AudioPolicyMix>> *secondaryMixes)
@@ -177,15 +283,9 @@
continue; // Primary output already found
}
- switch (mixMatch(policyMix.get(), i, attributes, config, uid)) {
- case MixMatchStatus::INVALID_MIX:
- // The mix has contradictory rules, ignore it
- // TODO: reject invalid mix at registration
- continue;
- case MixMatchStatus::NO_MATCH:
- ALOGV("%s: Mix %zu: does not match", __func__, i);
- continue; // skip the mix
- case MixMatchStatus::MATCH:;
+ if(!mixMatch(policyMix.get(), i, attributes, config, uid, session)) {
+ ALOGV("%s: Mix %zu: does not match", __func__, i);
+ continue; // skip the mix
}
if (primaryOutputMix) {
@@ -201,9 +301,9 @@
return NO_ERROR;
}
-AudioPolicyMixCollection::MixMatchStatus AudioPolicyMixCollection::mixMatch(
- const AudioMix* mix, size_t mixIndex, const audio_attributes_t& attributes,
- const audio_config_base_t& config, uid_t uid) {
+bool AudioPolicyMixCollection::mixMatch(const AudioMix* mix, size_t mixIndex,
+ const audio_attributes_t& attributes, const audio_config_base_t& config,
+ uid_t uid, audio_session_t session) {
if (mix->mMixType == MIX_TYPE_PLAYERS) {
// Loopback render mixes are created from a public API and thus restricted
@@ -213,171 +313,45 @@
attributes.usage == AUDIO_USAGE_MEDIA ||
attributes.usage == AUDIO_USAGE_GAME ||
attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
auto hasFlag = [](auto flags, auto flag) { return (flags & flag) == flag; };
if (hasFlag(attributes.flags, AUDIO_FLAG_NO_SYSTEM_CAPTURE)) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
if (attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION) {
if (!mix->mVoiceCommunicationCaptureAllowed) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
} else if (!mix->mAllowPrivilegedMediaPlaybackCapture &&
hasFlag(attributes.flags, AUDIO_FLAG_NO_MEDIA_PROJECTION)) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
}
// Permit match only if requested format and mix format are PCM and can be format
// adapted by the mixer, or are the same (compressed) format.
- if (!((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
+ if (!is_mix_loopback(mix->mRouteFlags) &&
+ !((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
(config.format == mix->mFormat.format)) &&
config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
- int userId = (int) multiuser_get_user_id(uid);
-
- // TODO if adding more player rules (currently only 2), make rule handling "generic"
- // as there is no difference in the treatment of usage- or uid-based rules
- bool hasUsageMatchRules = false;
- bool hasUsageExcludeRules = false;
- bool usageMatchFound = false;
- bool usageExclusionFound = false;
-
- bool hasUidMatchRules = false;
- bool hasUidExcludeRules = false;
- bool uidMatchFound = false;
- bool uidExclusionFound = false;
-
- bool hasUserIdExcludeRules = false;
- bool userIdExclusionFound = false;
- bool hasUserIdMatchRules = false;
- bool userIdMatchFound = false;
-
-
- bool hasAddrMatch = false;
-
- // iterate over all mix criteria to list what rules this mix contains
- for (size_t j = 0; j < mix->mCriteria.size(); j++) {
- ALOGV(" getOutputForAttr: mix %zu: inspecting mix criteria %zu of %zu",
- mixIndex, j, mix->mCriteria.size());
-
- // if there is an address match, prioritize that match
- if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
- strncmp(attributes.tags + strlen("addr="),
- mix->mDeviceAddress.string(),
- AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
- hasAddrMatch = true;
- break;
- }
-
- switch (mix->mCriteria[j].mRule) {
- case RULE_MATCH_ATTRIBUTE_USAGE:
- ALOGV("\tmix has RULE_MATCH_ATTRIBUTE_USAGE for usage %d",
- mix->mCriteria[j].mValue.mUsage);
- hasUsageMatchRules = true;
- if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
- // found one match against all allowed usages
- usageMatchFound = true;
- }
- break;
- case RULE_EXCLUDE_ATTRIBUTE_USAGE:
- ALOGV("\tmix has RULE_EXCLUDE_ATTRIBUTE_USAGE for usage %d",
- mix->mCriteria[j].mValue.mUsage);
- hasUsageExcludeRules = true;
- if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
- // found this usage is to be excluded
- usageExclusionFound = true;
- }
- break;
- case RULE_MATCH_UID:
- ALOGV("\tmix has RULE_MATCH_UID for uid %d", mix->mCriteria[j].mValue.mUid);
- hasUidMatchRules = true;
- if (mix->mCriteria[j].mValue.mUid == uid) {
- // found one UID match against all allowed UIDs
- uidMatchFound = true;
- }
- break;
- case RULE_EXCLUDE_UID:
- ALOGV("\tmix has RULE_EXCLUDE_UID for uid %d", mix->mCriteria[j].mValue.mUid);
- hasUidExcludeRules = true;
- if (mix->mCriteria[j].mValue.mUid == uid) {
- // found this UID is to be excluded
- uidExclusionFound = true;
- }
- break;
- case RULE_MATCH_USERID:
- ALOGV("\tmix has RULE_MATCH_USERID for userId %d",
- mix->mCriteria[j].mValue.mUserId);
- hasUserIdMatchRules = true;
- if (mix->mCriteria[j].mValue.mUserId == userId) {
- // found one userId match against all allowed userIds
- userIdMatchFound = true;
- }
- break;
- case RULE_EXCLUDE_USERID:
- ALOGV("\tmix has RULE_EXCLUDE_USERID for userId %d",
- mix->mCriteria[j].mValue.mUserId);
- hasUserIdExcludeRules = true;
- if (mix->mCriteria[j].mValue.mUserId == userId) {
- // found this userId is to be excluded
- userIdExclusionFound = true;
- }
- break;
- default:
- break;
- }
-
- // consistency checks: for each "dimension" of rules (usage, uid...), we can
- // only have MATCH rules, or EXCLUDE rules in each dimension, not a combination
- if (hasUsageMatchRules && hasUsageExcludeRules) {
- ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_ATTRIBUTE_USAGE"
- " and RULE_EXCLUDE_ATTRIBUTE_USAGE in mix %zu", mixIndex);
- return MixMatchStatus::INVALID_MIX;
- }
- if (hasUidMatchRules && hasUidExcludeRules) {
- ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_UID"
- " and RULE_EXCLUDE_UID in mix %zu", mixIndex);
- return MixMatchStatus::INVALID_MIX;
- }
- if (hasUserIdMatchRules && hasUserIdExcludeRules) {
- ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_USERID"
- " and RULE_EXCLUDE_USERID in mix %zu", mixIndex);
- return MixMatchStatus::INVALID_MIX;
- }
-
- if ((hasUsageExcludeRules && usageExclusionFound)
- || (hasUidExcludeRules && uidExclusionFound)
- || (hasUserIdExcludeRules && userIdExclusionFound)) {
- break; // stop iterating on criteria because an exclusion was found (will fail)
- }
- }//iterate on mix criteria
-
- // determine if exiting on success (or implicit failure as desc is 0)
- if (hasAddrMatch ||
- !((hasUsageExcludeRules && usageExclusionFound) ||
- (hasUsageMatchRules && !usageMatchFound) ||
- (hasUidExcludeRules && uidExclusionFound) ||
- (hasUidMatchRules && !uidMatchFound) ||
- (hasUserIdExcludeRules && userIdExclusionFound) ||
- (hasUserIdMatchRules && !userIdMatchFound))) {
- ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
- return MixMatchStatus::MATCH;
+ // if there is an address match, prioritize that match
+ if (matchAddressToTags(attributes, mix->mDeviceAddress)
+ || areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
+ ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
+ return true;
}
-
} else if (mix->mMixType == MIX_TYPE_RECORDERS) {
if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
- strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
- strncmp(attributes.tags + strlen("addr="),
- mix->mDeviceAddress.string(),
- AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
- return MixMatchStatus::MATCH;
+ matchAddressToTags(attributes, mix->mDeviceAddress)) {
+ return true;
}
}
- return MixMatchStatus::NO_MATCH;
+ return false;
}
sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForOutput(
@@ -398,9 +372,10 @@
}
sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForInputSource(
- audio_source_t inputSource,
+ const audio_attributes_t& attributes,
const DeviceVector &availDevices,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *policyMix) const
{
for (size_t i = 0; i < size(); i++) {
@@ -408,28 +383,17 @@
if (mix->mMixType != MIX_TYPE_RECORDERS) {
continue;
}
- for (size_t j = 0; j < mix->mCriteria.size(); j++) {
- if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
- mix->mCriteria[j].mValue.mSource == inputSource) ||
- (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
- mix->mCriteria[j].mValue.mSource != inputSource) ||
- (RULE_MATCH_UID == mix->mCriteria[j].mRule &&
- mix->mCriteria[j].mValue.mUid == uid) ||
- (RULE_EXCLUDE_UID == mix->mCriteria[j].mRule &&
- mix->mCriteria[j].mValue.mUid != uid)) {
- // assuming PolicyMix only for remote submix for input
- // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX
- audio_devices_t device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
- auto mixDevice =
- availDevices.getDevice(device, mix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
+ if (areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
+ // Assuming PolicyMix only for remote submix for input
+ // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX.
+ auto mixDevice = availDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+ mix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
if (mixDevice != nullptr) {
if (policyMix != nullptr) {
*policyMix = mix;
}
return mixDevice;
}
- break;
- }
}
}
return nullptr;
@@ -438,14 +402,14 @@
status_t AudioPolicyMixCollection::getInputMixForAttr(
audio_attributes_t attr, sp<AudioPolicyMix> *policyMix)
{
- if (strncmp(attr.tags, "addr=", strlen("addr=")) != 0) {
+ std::optional<std::string> address = extractAddressFromAudioAttributes(attr);
+ if (!address.has_value()) {
return BAD_VALUE;
}
- String8 address(attr.tags + strlen("addr="));
#ifdef LOG_NDEBUG
ALOGV("getInputMixForAttr looking for address %s for source %d\n mixes available:",
- address.string(), attr.source);
+ address->c_str(), attr.source);
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix> audioPolicyMix = itemAt(i);
ALOGV("\tmix %zu address=%s", i, audioPolicyMix->mDeviceAddress.string());
@@ -455,20 +419,20 @@
size_t index;
for (index = 0; index < size(); index++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(index);
- if (registeredMix->mDeviceAddress.compare(address) == 0) {
+ if (address->compare(registeredMix->mDeviceAddress.c_str()) == 0) {
ALOGD("getInputMixForAttr found addr=%s dev=0x%x",
registeredMix->mDeviceAddress.string(), registeredMix->mDeviceType);
break;
}
}
if (index == size()) {
- ALOGW("getInputMixForAttr() no policy for address %s", address.string());
+ ALOGW("getInputMixForAttr() no policy for address %s", address->c_str());
return BAD_VALUE;
}
const sp<AudioPolicyMix> audioPolicyMix = itemAt(index);
if (audioPolicyMix->mMixType != MIX_TYPE_PLAYERS) {
- ALOGW("getInputMixForAttr() bad policy mix type for address %s", address.string());
+ ALOGW("getInputMixForAttr() bad policy mix type for address %s", address->c_str());
return BAD_VALUE;
}
if (policyMix != nullptr) {
@@ -500,7 +464,7 @@
// AND it doesn't have a "match uid" rule
// THEN add a rule to exclude the uid
for (size_t i = 0; i < size(); i++) {
- const AudioPolicyMix *mix = itemAt(i).get();
+ AudioPolicyMix *mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
@@ -530,27 +494,16 @@
status_t AudioPolicyMixCollection::removeUidDeviceAffinities(uid_t uid) {
// for each player mix: remove existing rules that match or exclude this uid
for (size_t i = 0; i < size(); i++) {
- bool foundUidRule = false;
- const AudioPolicyMix *mix = itemAt(i).get();
+ AudioPolicyMix *mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
- std::vector<size_t> criteriaToRemove;
- for (size_t j = 0; j < mix->mCriteria.size(); j++) {
- const uint32_t rule = mix->mCriteria[j].mRule;
- // is this rule excluding the uid? (not considering uid match rules
- // as those are not used for uid-device affinity)
- if (rule == RULE_EXCLUDE_UID
- && uid == mix->mCriteria[j].mValue.mUid) {
- foundUidRule = true;
- criteriaToRemove.insert(criteriaToRemove.begin(), j);
- }
- }
- if (foundUidRule) {
- for (size_t j = 0; j < criteriaToRemove.size(); j++) {
- mix->mCriteria.removeAt(criteriaToRemove[j]);
- }
- }
+
+ // is this rule excluding the uid? (not considering uid match rules
+ // as those are not used for uid-device affinity)
+ EraseCriteriaIf(mix->mCriteria, [uid](const AudioMixMatchCriterion& c) {
+ return c.mRule == RULE_EXCLUDE_UID && c.mValue.mUid == uid;
+ });
}
return NO_ERROR;
}
@@ -585,7 +538,7 @@
// "match userId" rule for this userId, return an error
// (adding a userId-device affinity would result in contradictory rules)
for (size_t i = 0; i < size(); i++) {
- const AudioPolicyMix* mix = itemAt(i).get();
+ AudioPolicyMix* mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
@@ -602,7 +555,7 @@
// AND it doesn't have a "match userId" rule
// THEN add a rule to exclude the userId
for (size_t i = 0; i < size(); i++) {
- const AudioPolicyMix *mix = itemAt(i).get();
+ AudioPolicyMix *mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
@@ -632,27 +585,16 @@
status_t AudioPolicyMixCollection::removeUserIdDeviceAffinities(int userId) {
// for each player mix: remove existing rules that match or exclude this userId
for (size_t i = 0; i < size(); i++) {
- bool foundUserIdRule = false;
- const AudioPolicyMix *mix = itemAt(i).get();
+ AudioPolicyMix *mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
- std::vector<size_t> criteriaToRemove;
- for (size_t j = 0; j < mix->mCriteria.size(); j++) {
- const uint32_t rule = mix->mCriteria[j].mRule;
- // is this rule excluding the userId? (not considering userId match rules
- // as those are not used for userId-device affinity)
- if (rule == RULE_EXCLUDE_USERID
- && userId == mix->mCriteria[j].mValue.mUserId) {
- foundUserIdRule = true;
- criteriaToRemove.insert(criteriaToRemove.begin(), j);
- }
- }
- if (foundUserIdRule) {
- for (size_t j = 0; j < criteriaToRemove.size(); j++) {
- mix->mCriteria.removeAt(criteriaToRemove[j]);
- }
- }
+
+ // is this rule excluding the userId? (not considering userId match rules
+ // as those are not used for userId-device affinity)
+ EraseCriteriaIf(mix->mCriteria, [userId](const AudioMixMatchCriterion& c) {
+ return c.mRule == RULE_EXCLUDE_USERID && c.mValue.mUserId == userId;
+ });
}
return NO_ERROR;
}
@@ -690,4 +632,14 @@
}
}
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr) {
+ static const std::regex addrTagRegex("addr=([^;]+)");
+
+ std::cmatch match;
+ if (std::regex_search(attr.tags, match, addrTagRegex)) {
+ return match[1].str();
+ }
+ return std::nullopt;
+}
+
}; //namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index c5b3546..8a44547 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -56,10 +56,12 @@
MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
MAKE_STRING_FROM_ENUM(RULE_MATCH_USERID),
+ MAKE_STRING_FROM_ENUM(RULE_MATCH_AUDIO_SESSION_ID),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_USERID),
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_AUDIO_SESSION_ID),
TERMINATOR
};
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 2aa2f9a..e8251e3 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -24,7 +24,7 @@
#include <vector>
#include <HandleGenerator.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioPolicy.h>
@@ -43,20 +43,14 @@
class ProductStrategy : public virtual RefBase, private HandleGenerator<uint32_t>
{
private:
- struct AudioAttributes {
- audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
- volume_group_t mVolumeGroup = VOLUME_GROUP_NONE;
- audio_attributes_t mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
- };
-
- using AudioAttributesVector = std::vector<AudioAttributes>;
+ using VolumeGroupAttributesVector = std::vector<VolumeGroupAttributes>;
public:
ProductStrategy(const std::string &name);
- void addAttributes(const AudioAttributes &audioAttributes);
+ void addAttributes(const VolumeGroupAttributes &volumeGroupAttributes);
- std::vector<android::AudioAttributes> listAudioAttributes() const;
+ std::vector<android::VolumeGroupAttributes> listVolumeGroupAttributes() const;
std::string getName() const { return mName; }
AttributesVector getAudioAttributes() const;
@@ -105,7 +99,7 @@
private:
std::string mName;
- AudioAttributesVector mAttributesVector;
+ VolumeGroupAttributesVector mAttributesVector;
product_strategy_t mId;
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 99507ee..9b78758 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -145,7 +145,7 @@
};
auto addSupportedAttributesToGroup = [](auto &group, auto &volumeGroup, auto &strategy) {
for (const auto &attr : group.attributesVect) {
- strategy->addAttributes({group.stream, volumeGroup->getId(), attr});
+ strategy->addAttributes({volumeGroup->getId(), group.stream, attr});
volumeGroup->addSupportedAttributes(attr);
}
};
@@ -284,7 +284,7 @@
for (const auto &iter : mProductStrategies) {
const auto &productStrategy = iter.second;
strategies.push_back(
- {productStrategy->getName(), productStrategy->listAudioAttributes(),
+ {productStrategy->getName(), productStrategy->listVolumeGroupAttributes(),
productStrategy->getId()});
}
return NO_ERROR;
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 60b16ac..548a20d 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -16,6 +16,8 @@
#pragma once
+#include <EngineConfig.h>
+
#include <system/audio.h>
namespace android {
@@ -138,19 +140,19 @@
* For compatibility reason why apm volume config file, volume group name is the stream type.
*/
const engineConfig::ProductStrategies gOrderedSystemStrategies = {
- {"rerouting",
+ {"STRATEGY_REROUTING",
{
{AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
- AUDIO_FLAG_NONE, ""}}
+ AUDIO_FLAG_NONE, AUDIO_TAG_APM_RESERVED_INTERNAL}}
}
},
},
- {"patch",
+ {"STRATEGY_PATCH",
{
{AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
- AUDIO_FLAG_NONE, ""}}
+ AUDIO_FLAG_NONE, AUDIO_TAG_APM_RESERVED_INTERNAL}}
}
},
}
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index fbfcf72..c104c97 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -36,16 +36,16 @@
{
}
-void ProductStrategy::addAttributes(const AudioAttributes &audioAttributes)
+void ProductStrategy::addAttributes(const VolumeGroupAttributes &volumeGroupAttributes)
{
- mAttributesVector.push_back(audioAttributes);
+ mAttributesVector.push_back(volumeGroupAttributes);
}
-std::vector<android::AudioAttributes> ProductStrategy::listAudioAttributes() const
+std::vector<android::VolumeGroupAttributes> ProductStrategy::listVolumeGroupAttributes() const
{
- std::vector<android::AudioAttributes> androidAa;
+ std::vector<android::VolumeGroupAttributes> androidAa;
for (const auto &attr : mAttributesVector) {
- androidAa.push_back({attr.mVolumeGroup, attr.mStream, attr.mAttributes});
+ androidAa.push_back({attr.getGroupId(), attr.getStreamType(), attr.getAttributes()});
}
return androidAa;
}
@@ -54,7 +54,7 @@
{
AttributesVector attrVector;
for (const auto &attrGroup : mAttributesVector) {
- attrVector.push_back(attrGroup.mAttributes);
+ attrVector.push_back(attrGroup.getAttributes());
}
if (not attrVector.empty()) {
return attrVector;
@@ -66,7 +66,7 @@
{
return std::find_if(begin(mAttributesVector), end(mAttributesVector),
[&attr](const auto &supportedAttr) {
- return AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr);
+ return AudioProductStrategy::attributesMatches(supportedAttr.getAttributes(), attr);
}) != end(mAttributesVector);
}
@@ -75,11 +75,11 @@
{
const auto &iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
[&attr](const auto &supportedAttr) {
- return AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr); });
+ return AudioProductStrategy::attributesMatches(supportedAttr.getAttributes(), attr); });
if (iter == end(mAttributesVector)) {
return AUDIO_STREAM_DEFAULT;
}
- audio_stream_type_t streamType = iter->mStream;
+ audio_stream_type_t streamType = iter->getStreamType();
ALOGW_IF(streamType == AUDIO_STREAM_DEFAULT,
"%s: Strategy %s supporting attributes %s has not stream type associated"
"fallback on MUSIC. Do not use stream volume API", __func__, mName.c_str(),
@@ -91,23 +91,23 @@
{
const auto iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
[&streamType](const auto &supportedAttr) {
- return supportedAttr.mStream == streamType; });
- return iter != end(mAttributesVector) ? iter->mAttributes : AUDIO_ATTRIBUTES_INITIALIZER;
+ return supportedAttr.getStreamType() == streamType; });
+ return iter != end(mAttributesVector) ? iter->getAttributes() : AUDIO_ATTRIBUTES_INITIALIZER;
}
bool ProductStrategy::isDefault() const
{
return std::find_if(begin(mAttributesVector), end(mAttributesVector), [](const auto &attr) {
- return attr.mAttributes == defaultAttr; }) != end(mAttributesVector);
+ return attr.getAttributes() == defaultAttr; }) != end(mAttributesVector);
}
StreamTypeVector ProductStrategy::getSupportedStreams() const
{
StreamTypeVector streams;
for (const auto &supportedAttr : mAttributesVector) {
- if (std::find(begin(streams), end(streams), supportedAttr.mStream) == end(streams) &&
- supportedAttr.mStream != AUDIO_STREAM_DEFAULT) {
- streams.push_back(supportedAttr.mStream);
+ if (std::find(begin(streams), end(streams), supportedAttr.getStreamType())
+ == end(streams) && supportedAttr.getStreamType() != AUDIO_STREAM_DEFAULT) {
+ streams.push_back(supportedAttr.getStreamType());
}
}
return streams;
@@ -117,14 +117,14 @@
{
return std::find_if(begin(mAttributesVector), end(mAttributesVector),
[&streamType](const auto &supportedAttr) {
- return supportedAttr.mStream == streamType; }) != end(mAttributesVector);
+ return supportedAttr.getStreamType() == streamType; }) != end(mAttributesVector);
}
volume_group_t ProductStrategy::getVolumeGroupForAttributes(const audio_attributes_t &attr) const
{
for (const auto &supportedAttr : mAttributesVector) {
- if (AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr)) {
- return supportedAttr.mVolumeGroup;
+ if (AudioProductStrategy::attributesMatches(supportedAttr.getAttributes(), attr)) {
+ return supportedAttr.getGroupId();
}
}
return VOLUME_GROUP_NONE;
@@ -133,8 +133,8 @@
volume_group_t ProductStrategy::getVolumeGroupForStreamType(audio_stream_type_t stream) const
{
for (const auto &supportedAttr : mAttributesVector) {
- if (supportedAttr.mStream == stream) {
- return supportedAttr.mVolumeGroup;
+ if (supportedAttr.getStreamType() == stream) {
+ return supportedAttr.getGroupId();
}
}
return VOLUME_GROUP_NONE;
@@ -143,8 +143,10 @@
volume_group_t ProductStrategy::getDefaultVolumeGroup() const
{
const auto &iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
- [](const auto &attr) {return attr.mAttributes == defaultAttr;});
- return iter != end(mAttributesVector) ? iter->mVolumeGroup : VOLUME_GROUP_NONE;
+ [](const auto &attr) {
+ return attr.getAttributes() == defaultAttr;
+ });
+ return iter != end(mAttributesVector) ? iter->getGroupId() : VOLUME_GROUP_NONE;
}
void ProductStrategy::dump(String8 *dst, int spaces) const
@@ -155,11 +157,11 @@
deviceLiteral.c_str(), mDeviceAddress.c_str());
for (const auto &attr : mAttributesVector) {
- dst->appendFormat("%*sGroup: %d stream: %s\n", spaces + 3, "", attr.mVolumeGroup,
- android::toString(attr.mStream).c_str());
+ dst->appendFormat("%*sGroup: %d stream: %s\n", spaces + 3, "", attr.getGroupId(),
+ android::toString(attr.getStreamType()).c_str());
dst->appendFormat("%*s Attributes: ", spaces + 3, "");
- std::string attStr =
- attr.mAttributes == defaultAttr ? "{ Any }" : android::toString(attr.mAttributes);
+ std::string attStr = attr.getAttributes() == defaultAttr ?
+ "{ Any }" : android::toString(attr.getAttributes());
dst->appendFormat("%s\n", attStr.c_str());
}
}
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index 8aa4b08..fccbc60 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -52,7 +52,7 @@
volIdx = volIndexMin;
} else {
// This would result in a divide-by-zero below
- ALOG_ASSERT(volIndexmin != volIndexMax, "Invalid volume index range & value: 0");
+ ALOG_ASSERT(volIndexMin != volIndexMax, "Invalid volume index range & value: 0");
return NAN;
}
} else {
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 8036eea..4de16c5 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -25,6 +25,12 @@
struct _xmlNode;
struct _xmlDoc;
+/**
+ * AudioAttributes custom tag to identify internal strategies, whose volumes are exclusively
+ * controlled by AudioPolicyManager
+ */
+#define AUDIO_TAG_APM_RESERVED_INTERNAL "reserved_internal_strategy"
+
namespace android {
namespace engineConfig {
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index 518f86e..70d25fc 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -173,10 +173,11 @@
* @param[out] mix to be used if a mix has been installed for the given audio attributes.
* @return selected input device for the audio attributes, may be null if error.
*/
- virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
- uid_t uid = 0,
- sp<AudioPolicyMix> *mix = nullptr)
- const = 0;
+ virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
+ const audio_attributes_t &attr,
+ uid_t uid = 0,
+ audio_session_t session = AUDIO_SESSION_NONE,
+ sp<AudioPolicyMix> *mix = nullptr) const = 0;
/**
* Get the legacy stream type for a given audio attributes.
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 3d74920..9d53017 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -315,6 +315,7 @@
sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *mix) const
{
const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -333,9 +334,10 @@
return device;
}
- device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+ device = policyMixes.getDeviceAndMixForInputSource(attr,
availableInputDevices,
uid,
+ session,
mix);
if (device != nullptr) {
return device;
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 4b559f0..6ac20cd 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -63,6 +63,7 @@
sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid = 0,
+ audio_session_t session = AUDIO_SESSION_NONE,
sp<AudioPolicyMix> *mix = nullptr)
const override;
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 77ea507..b6089b7 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -21,18 +21,6 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-python_defaults {
- name: "tools_default",
- version: {
- py2: {
- enabled: false,
- },
- py3: {
- enabled: true,
- },
- },
-}
-
//##################################################################################################
// Tools for audio policy engine criterion type configuration file
//
@@ -42,7 +30,6 @@
srcs: [
"buildPolicyCriterionTypes.py",
],
- defaults: ["tools_default"],
}
genrule_defaults {
@@ -77,7 +64,6 @@
srcs: [
"domainGeneratorPolicy.py",
],
- defaults: ["tools_default"],
libs: [
"EddParser.py",
"hostConfig.py",
@@ -126,7 +112,6 @@
srcs: [
"buildStrategiesStructureFile.py",
],
- defaults: ["tools_default"],
}
genrule_defaults {
@@ -154,7 +139,6 @@
srcs: [
"buildCommonTypesStructureFile.py",
],
- defaults: ["tools_default"],
}
genrule_defaults {
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 7534984..d96ae21 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -463,7 +463,7 @@
}
if (devices.isEmpty()) {
- ALOGV("%s no device found for strategy %d", __func__, strategy);
+ ALOGI("%s no device found for strategy %d", __func__, strategy);
sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
if (defaultOutputDevice != nullptr) {
devices.add(defaultOutputDevice);
@@ -699,6 +699,18 @@
return preferredAvailableDevVec;
}
+DeviceVector Engine::getDisabledDevicesForProductStrategy(
+ const DeviceVector &availableOutputDevices, product_strategy_t strategy) const {
+ DeviceVector disabledDevices = {};
+ AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+ const status_t status = getDevicesForRoleAndStrategy(
+ strategy, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+ if (status == NO_ERROR) {
+ disabledDevices =
+ availableOutputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+ }
+ return disabledDevices;
+}
DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
@@ -722,6 +734,11 @@
return preferredAvailableDevVec;
}
+ // Remove all disabled devices from the available device list.
+ DeviceVector disabledDevVec =
+ getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+ availableOutputDevices.remove(disabledDevVec);
+
return getDevicesForStrategyInt(legacyStrategy,
availableOutputDevices,
outputs);
@@ -761,6 +778,7 @@
sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *mix) const
{
const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -780,9 +798,10 @@
return device;
}
- device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+ device = policyMixes.getDeviceAndMixForInputSource(attr,
availableInputDevices,
uid,
+ session,
mix);
if (device != nullptr) {
return device;
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 595e289..ab556ee 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -64,6 +64,7 @@
sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid = 0,
+ audio_session_t session = AUDIO_SESSION_NONE,
sp<AudioPolicyMix> *mix = nullptr)
const override;
@@ -96,6 +97,8 @@
const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
DeviceVector getPreferredAvailableDevicesForProductStrategy(
const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+ DeviceVector getDisabledDevicesForProductStrategy(
+ const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
DeviceStrategyMap mDevicesForStrategies;
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 48f7410..28268c9 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -544,10 +544,11 @@
status_t AudioPolicyManagerFuzzerDynamicPolicy::addPolicyMix(
int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
const audio_config_t &audioConfig, const std::vector<PolicyMixTuple> &rules) {
- Vector<AudioMixMatchCriterion> myMixMatchCriteria;
+ std::vector<AudioMixMatchCriterion> myMixMatchCriteria;
+ myMixMatchCriteria.reserve(rules.size());
for (const auto &rule : rules) {
- myMixMatchCriteria.add(
+ myMixMatchCriteria.push_back(
AudioMixMatchCriterion(std::get<0>(rule), std::get<1>(rule), std::get<2>(rule)));
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 8a3cd63..db2fd23 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -792,7 +792,8 @@
ALOGV("%s between source %s and sink %s", __func__,
srcDevice->toString().c_str(), sinkDevice->toString().c_str());
auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
- const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+ const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
struct audio_port_config source = {};
srcDevice->toAudioPortConfig(&source);
mCallTxSourceClient = new InternalSourceClientDescriptor(
@@ -1169,8 +1170,8 @@
.channel_mask = config->channel_mask,
.format = config->format,
};
- status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, *flags, primaryMix,
- secondaryMixes);
+ status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, session, *flags,
+ primaryMix, secondaryMixes);
if (status != OK) {
return status;
}
@@ -2504,7 +2505,7 @@
*inputType = API_INPUT_INVALID;
if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
- strncmp(attributes.tags, "addr=", strlen("addr=")) == 0) {
+ extractAddressFromAudioAttributes(attributes).has_value()) {
status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
if (status != NO_ERROR) {
ALOGW("%s could not find input mix for attr %s",
@@ -2532,7 +2533,7 @@
} else {
// Prevent from storing invalid requested device id in clients
requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
- device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+ device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
__FUNCTION__, device->type());
}
@@ -2939,7 +2940,8 @@
bool close = false;
for (const auto& client : input->clientsList()) {
sp<DeviceDescriptor> device =
- mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
+ mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+ client->session());
if (!input->supportedDevices().contains(device)) {
close = true;
break;
@@ -4104,6 +4106,9 @@
status_t AudioPolicyManager::getDirectProfilesForAttributes(const audio_attributes_t* attr,
AudioProfileVector& audioProfilesVector) {
+ if (mEffects.isNonOffloadableEffectEnabled()) {
+ return OK;
+ }
DeviceVector devices;
status_t status = getDevicesForAttributes(*attr, devices, false /* forVolume */);
if (status != OK) {
@@ -4506,7 +4511,7 @@
// In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
// in config XML to reach the sink so that is can be declared as available.
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+ sp<SwAudioOutputDescriptor> outputDesc;
if (!sourceDesc->isInternal()) {
// take care of dynamic routing for SwOutput selection,
audio_attributes_t attributes = sourceDesc->attributes();
@@ -4581,7 +4586,8 @@
audio_port_config srcMixPortConfig = {};
outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
// for volume control, we may need a valid stream
- srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+ srcMixPortConfig.ext.mix.usecase.stream =
+ (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
AUDIO_STREAM_PATCH;
patchBuilder.addSource(srcMixPortConfig);
@@ -6331,7 +6337,7 @@
}
sp<AudioPolicyMix> primaryMix;
status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
- client->uid(), client->flags(), primaryMix, nullptr);
+ client->uid(), client->session(), client->flags(), primaryMix, nullptr);
if (status != OK) {
continue;
}
@@ -6444,7 +6450,7 @@
sp<AudioPolicyMix> primaryMix;
std::vector<sp<AudioPolicyMix>> secondaryMixes;
status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
- client->uid(), client->flags(), primaryMix, &secondaryMixes);
+ client->uid(), client->session(), client->flags(), primaryMix, &secondaryMixes);
std::vector<sp<SwAudioOutputDescriptor>> secondaryDescs;
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
@@ -6652,20 +6658,23 @@
// a null sp<>, causing the patch on the input stream to be released.
audio_attributes_t attributes;
uid_t uid;
+ audio_session_t session;
sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
if (topClient != nullptr) {
attributes = topClient->attributes();
uid = topClient->uid();
+ session = topClient->session();
} else {
attributes = { .source = AUDIO_SOURCE_DEFAULT };
uid = 0;
+ session = AUDIO_SESSION_NONE;
}
if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
}
if (attributes.source != AUDIO_SOURCE_DEFAULT) {
- device = mEngine->getInputDeviceForAttributes(attributes, uid);
+ device = mEngine->getInputDeviceForAttributes(attributes, uid, session);
}
return device;
@@ -7860,7 +7869,7 @@
// audio routing, only used for duplication for playback capture)
sp<AudioPolicyMix> policyMix;
status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
- 0 /*uid unknown here*/, AUDIO_OUTPUT_FLAG_NONE, policyMix,
+ 0 /*uid unknown here*/, AUDIO_SESSION_NONE, AUDIO_OUTPUT_FLAG_NONE, policyMix,
nullptr /* secondaryMixes */);
if (status != OK) {
return status;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 74460c7..a3600a0 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -356,11 +356,10 @@
}
virtual status_t getProductStrategyFromAudioAttributes(
- const AudioAttributes &aa, product_strategy_t &productStrategy,
+ const audio_attributes_t &aa, product_strategy_t &productStrategy,
bool fallbackOnDefault)
{
- productStrategy = mEngine->getProductStrategyForAttributes(
- aa.getAttributes(), fallbackOnDefault);
+ productStrategy = mEngine->getProductStrategyForAttributes(aa, fallbackOnDefault);
return (fallbackOnDefault && productStrategy == PRODUCT_STRATEGY_NONE) ?
BAD_VALUE : NO_ERROR;
}
@@ -371,10 +370,9 @@
}
virtual status_t getVolumeGroupFromAudioAttributes(
- const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
+ const audio_attributes_t &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
{
- volumeGroup = mEngine->getVolumeGroupForAttributes(
- aa.getAttributes(), fallbackOnDefault);
+ volumeGroup = mEngine->getVolumeGroupForAttributes(aa, fallbackOnDefault);
return (fallbackOnDefault && volumeGroup == VOLUME_GROUP_NONE) ?
BAD_VALUE : NO_ERROR;
}
@@ -639,6 +637,10 @@
return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
}
+ bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
+ return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
+ }
+
void connectTelephonyRxAudioSource();
void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index b15b61d..e8be08f 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1176,12 +1176,12 @@
return Status::ok();
}
-Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesEx& attrAidl,
+Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesInternal& attrAidl,
bool forVolume,
std::vector<AudioDevice>* _aidl_return)
{
- AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioAttributesEx_AudioAttributes(attrAidl));
+ audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
AudioDeviceTypeAddrVector devices;
if (mAudioPolicyManager == NULL) {
@@ -1190,8 +1190,7 @@
Mutex::Autolock _l(mLock);
AutoCallerClear acc;
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
- mAudioPolicyManager->getDevicesForAttributes(
- aa.getAttributes(), &devices, forVolume)));
+ mAudioPolicyManager->getDevicesForAttributes(aa, &devices, forVolume)));
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
convertContainer<std::vector<AudioDevice>>(devices,
legacy2aidl_AudioDeviceTypeAddress));
@@ -2084,9 +2083,10 @@
}
Status AudioPolicyService::getProductStrategyFromAudioAttributes(
- const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
- AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+ const media::AudioAttributesInternal& aaAidl,
+ bool fallbackOnDefault, int32_t* _aidl_return) {
+ audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
product_strategy_t productStrategy;
if (mAudioPolicyManager == NULL) {
@@ -2117,9 +2117,10 @@
}
Status AudioPolicyService::getVolumeGroupFromAudioAttributes(
- const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
- AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+ const media::AudioAttributesInternal& aaAidl,
+ bool fallbackOnDefault, int32_t* _aidl_return) {
+ audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
volume_group_t volumeGroup;
if (mAudioPolicyManager == NULL) {
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 4f3ea6c..92e1b6b 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -272,6 +272,11 @@
if (hasSpatializer) {
mSpatializer = Spatializer::create(this);
}
+ if (mSpatializer == nullptr) {
+ // No spatializer created, signal the reason: NO_INIT a failure, OK means intended.
+ const status_t createStatus = hasSpatializer ? NO_INIT : OK;
+ Spatializer::sendEmptyCreateSpatializerMetricWithStatus(createStatus);
+ }
}
AudioSystem::audioPolicyReady();
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5c37f99..3a7fffa 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -134,7 +134,7 @@
int32_t* _aidl_return) override;
binder::Status getStrategyForStream(AudioStreamType stream,
int32_t* _aidl_return) override;
- binder::Status getDevicesForAttributes(const media::AudioAttributesEx& attr,
+ binder::Status getDevicesForAttributes(const media::AudioAttributesInternal& attr,
bool forVolume,
std::vector<AudioDevice>* _aidl_return) override;
binder::Status getOutputForEffect(const media::EffectDescriptor& desc,
@@ -224,12 +224,12 @@
binder::Status isUltrasoundSupported(bool* _aidl_return) override;
binder::Status listAudioProductStrategies(
std::vector<media::AudioProductStrategy>* _aidl_return) override;
- binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesEx& aa,
+ binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesInternal& aa,
bool fallbackOnDefault,
int32_t* _aidl_return) override;
binder::Status listAudioVolumeGroups(
std::vector<media::AudioVolumeGroup>* _aidl_return) override;
- binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesEx& aa,
+ binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesInternal& aa,
bool fallbackOnDefault,
int32_t* _aidl_return) override;
binder::Status setRttEnabled(bool enabled) override;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index d411b8e..d9b856b 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -387,6 +387,18 @@
return NO_ERROR;
}
+/* static */
+void Spatializer::sendEmptyCreateSpatializerMetricWithStatus(status_t status) {
+ mediametrics::LogItem(kDefaultMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
+ .set(AMEDIAMETRICS_PROP_CHANNELMASKS, "")
+ .set(AMEDIAMETRICS_PROP_LEVELS, "")
+ .set(AMEDIAMETRICS_PROP_MODES, "")
+ .set(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, "")
+ .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+ .record();
+}
+
/** Gets the channel mask, sampling rate and format set for the spatializer input. */
audio_config_base_t Spatializer::getAudioInConfig() const {
std::lock_guard lock(mLock);
@@ -729,6 +741,17 @@
msg->post();
}
+void Spatializer::resetEngineHeadPose_l() {
+ ALOGV("%s mEngine %p", __func__, mEngine.get());
+ if (mEngine == nullptr) {
+ return;
+ }
+ const std::vector<float> headToStage(6, 0.0);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+}
+
void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
ALOGV("%s", __func__);
sp<media::ISpatializerHeadTrackingCallback> callback;
@@ -748,11 +771,9 @@
}
void Spatializer::onActualModeChange(HeadTrackingMode mode) {
- std::string modeStr = SpatializerPoseController::toString(mode);
+ std::string modeStr = media::toString(mode);
ALOGV("%s(%s)", __func__, modeStr.c_str());
- mLocalLog.log("%s with %s", __func__, modeStr.c_str());
- sp<AMessage> msg =
- new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
+ sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
msg->post();
}
@@ -782,12 +803,15 @@
}
mActualHeadTrackingMode = spatializerMode;
if (mEngine != nullptr) {
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+ resetEngineHeadPose_l();
+ } else {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ }
}
callback = mHeadTrackingCallback;
- mLocalLog.log("%s: %s, spatializerMode %s", __func__,
- SpatializerPoseController::toString(mode).c_str(),
+ mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
media::toString(spatializerMode).c_str());
}
if (callback != nullptr) {
@@ -915,16 +939,25 @@
bool lowLatencySupported = mSupportedLatencyModes.empty()
|| (std::find(mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end());
- if (mSupportsHeadTracking && mPoseController != nullptr) {
- if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
- && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
- && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
- mPoseController->setHeadSensor(mHeadSensor);
- mPoseController->setScreenSensor(mScreenSensor);
- requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ if (mSupportsHeadTracking) {
+ if (mPoseController != nullptr) {
+ if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+ && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+ && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+ if (mEngine != nullptr) {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+ }
+ mPoseController->setHeadSensor(mHeadSensor);
+ mPoseController->setScreenSensor(mScreenSensor);
+ requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ } else {
+ mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
+ }
} else {
- mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
- mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
}
}
if (mOutput != AUDIO_IO_HANDLE_NONE) {
@@ -938,8 +971,6 @@
mEngine->setEnabled(true);
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{mLevel});
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
} else {
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{SpatializationLevel::NONE});
@@ -961,6 +992,7 @@
mPoseController->setDisplayOrientation(mDisplayOrientation);
} else if (!isControllerNeeded && mPoseController != nullptr) {
mPoseController.reset();
+ resetEngineHeadPose_l();
}
if (mPoseController != nullptr) {
mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
@@ -1009,7 +1041,7 @@
base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
}
base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
- SpatializerPoseController::toString(mDesiredHeadTrackingMode).c_str(),
+ media::toString(mDesiredHeadTrackingMode).c_str(),
media::toString(mActualHeadTrackingMode).c_str());
base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
@@ -1044,9 +1076,9 @@
ss.append(prefixSpace +
"Sensor data format - [rx, ry, rz, vx, vy, vz] (units-degree, "
"r-transform, v-angular velocity, x-pitch, y-roll, z-yaw):\n");
- ss.append(prefixSpace + "PerMinuteHistory:\n");
+ ss.append(prefixSpace + " PerMinuteHistory:\n");
ss += mPoseDurableRecorder.toString(level + 1);
- ss.append(prefixSpace + "PerSecondHistory:\n");
+ ss.append(prefixSpace + " PerSecondHistory:\n");
ss += mPoseRecorder.toString(level + 1);
} else {
ss.append(prefixSpace).append("SpatializerPoseController not exist\n");
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 586fc1f..ba60cae 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -199,6 +199,11 @@
return ss;
};
+ // If the Spatializer is not created, we send the status for metrics purposes.
+ // OK: Spatializer not expected to be created.
+ // NO_INIT: Spatializer creation failed.
+ static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
+
private:
Spatializer(effect_descriptor_t engineDescriptor,
SpatializerPolicyCallback *callback);
@@ -346,13 +351,21 @@
*/
void checkEngineState_l() REQUIRES(mLock);
+ /**
+ * Reset head tracking mode and recenter pose in engine: Called when the head tracking
+ * is disabled.
+ */
+ void resetEngineHeadPose_l() REQUIRES(mLock);
+
/** Effect engine descriptor */
const effect_descriptor_t mEngineDescriptor;
/** Callback interface to parent audio policy service */
SpatializerPolicyCallback* const mPolicyCallback;
/** Currently there is only one version of the spatializer running */
- const std::string mMetricsId = AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
+ static constexpr const char* kDefaultMetricsId =
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
+ const std::string mMetricsId = kDefaultMetricsId;
/** Mutex protecting internal state */
mutable std::mutex mLock;
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 8624aa1..72dba3d 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -43,7 +43,7 @@
constexpr float kMaxTranslationalVelocity = 2;
// This is how fast, in rad/s, we allow rotation angle to shift during rate-limiting.
-constexpr float kMaxRotationalVelocity = 8;
+constexpr float kMaxRotationalVelocity = 0.8f;
// This is how far into the future we predict the head pose, using linear extrapolation based on
// twist (velocity). It should be set to a value that matches the characteristic durations of moving
@@ -312,14 +312,14 @@
}
ss += prefixSpace;
- if (mHeadSensor == media::SensorPoseProvider::INVALID_HANDLE) {
- ss.append("HeadSensor: INVALID\n");
+ if (mHeadSensor == INVALID_SENSOR) {
+ ss += "HeadSensor: INVALID\n";
} else {
base::StringAppendF(&ss, "HeadSensor: 0x%08x\n", mHeadSensor);
}
ss += prefixSpace;
- if (mScreenSensor == media::SensorPoseProvider::INVALID_HANDLE) {
+ if (mScreenSensor == INVALID_SENSOR) {
ss += "ScreenSensor: INVALID\n";
} else {
base::StringAppendF(&ss, "ScreenSensor: 0x%08x\n", mScreenSensor);
@@ -327,7 +327,7 @@
ss += prefixSpace;
if (mActualMode.has_value()) {
- base::StringAppendF(&ss, "ActualMode: %s", toString(mActualMode.value()).c_str());
+ base::StringAppendF(&ss, "ActualMode: %s\n", media::toString(mActualMode.value()).c_str());
} else {
ss += "ActualMode NOTEXIST\n";
}
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 546eba0..233f94c 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -116,18 +116,6 @@
// convert fields to a printable string
std::string toString(unsigned level) const;
- static std::string toString(media::HeadTrackingMode mode) {
- switch (mode) {
- case media::HeadTrackingMode::STATIC:
- return "STATIC";
- case media::HeadTrackingMode::WORLD_RELATIVE:
- return "WORLD_RELATIVE";
- case media::HeadTrackingMode::SCREEN_RELATIVE:
- return "SCREEN_RELATIVE";
- }
- return "EnumNotImplemented";
- };
-
private:
mutable std::timed_mutex mMutex;
Listener* const mListener;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index bb00c48..ba5b6b2 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include <cstring>
#include <memory>
#include <string>
#include <sys/wait.h>
@@ -42,6 +43,40 @@
using testing::UnorderedElementsAre;
using android::content::AttributionSourceState;
+namespace {
+
+AudioMixMatchCriterion createUidCriterion(uint32_t uid, bool exclude = false) {
+ AudioMixMatchCriterion criterion;
+ criterion.mValue.mUid = uid;
+ criterion.mRule = exclude ? RULE_EXCLUDE_UID : RULE_MATCH_UID;
+ return criterion;
+}
+
+AudioMixMatchCriterion createUsageCriterion(audio_usage_t usage, bool exclude = false) {
+ AudioMixMatchCriterion criterion;
+ criterion.mValue.mUsage = usage;
+ criterion.mRule = exclude ? RULE_EXCLUDE_ATTRIBUTE_USAGE : RULE_MATCH_ATTRIBUTE_USAGE;
+ return criterion;
+}
+
+AudioMixMatchCriterion createCapturePresetCriterion(audio_source_t source, bool exclude = false) {
+ AudioMixMatchCriterion criterion;
+ criterion.mValue.mSource = source;
+ criterion.mRule = exclude ?
+ RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET : RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET;
+ return criterion;
+}
+
+AudioMixMatchCriterion createSessionIdCriterion(audio_session_t session, bool exclude = false) {
+ AudioMixMatchCriterion criterion;
+ criterion.mValue.mAudioSessionId = session;
+ criterion.mRule = exclude ?
+ RULE_EXCLUDE_AUDIO_SESSION_ID : RULE_MATCH_AUDIO_SESSION_ID;
+ return criterion;
+}
+
+} // namespace
+
TEST(AudioPolicyManagerTestInit, EngineFailure) {
AudioPolicyTestClient client;
AudioPolicyTestManager manager(&client);
@@ -125,9 +160,11 @@
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_io_handle_t *output = nullptr,
audio_port_handle_t *portId = nullptr,
- audio_attributes_t attr = {});
+ audio_attributes_t attr = {},
+ audio_session_t session = AUDIO_SESSION_NONE);
void getInputForAttr(
const audio_attributes_t &attr,
+ audio_session_t session,
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
@@ -207,7 +244,8 @@
audio_output_flags_t flags,
audio_io_handle_t *output,
audio_port_handle_t *portId,
- audio_attributes_t attr) {
+ audio_attributes_t attr,
+ audio_session_t session) {
audio_io_handle_t localOutput;
if (!output) output = &localOutput;
*output = AUDIO_IO_HANDLE_NONE;
@@ -226,7 +264,7 @@
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
ASSERT_EQ(OK, mManager->getOutputForAttr(
- &attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
+ &attr, output, session, &stream, attributionSource, &config, &flags,
selectedDeviceId, portId, {}, &outputType, &isSpatialized));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
@@ -234,6 +272,7 @@
void AudioPolicyManagerTest::getInputForAttr(
const audio_attributes_t &attr,
+ const audio_session_t session,
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
@@ -255,7 +294,7 @@
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
ASSERT_EQ(OK, mManager->getInputForAttr(
- &attr, &input, riid, AUDIO_SESSION_NONE, attributionSource, &config, flags,
+ &attr, &input, riid, session, attributionSource, &config, flags,
selectedDeviceId, &inputType, portId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
}
@@ -888,8 +927,8 @@
audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
audio_attributes_t attr = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, 1, &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
std::vector<audio_port_v7> ports;
ASSERT_NO_FATAL_FAILURE(
@@ -943,15 +982,13 @@
}
}
-using PolicyMixTuple = std::tuple<audio_usage_t, audio_source_t, uint32_t>;
-
class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
protected:
void TearDown() override;
status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
std::string mixAddress, const audio_config_t& audioConfig,
- const std::vector<PolicyMixTuple>& rules);
+ const std::vector<AudioMixMatchCriterion>& matchCriteria);
void clearPolicyMix();
Vector<AudioMix> mAudioMixes;
@@ -965,15 +1002,8 @@
status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(int mixType, int mixFlag,
audio_devices_t deviceType, std::string mixAddress, const audio_config_t& audioConfig,
- const std::vector<PolicyMixTuple>& rules) {
- Vector<AudioMixMatchCriterion> myMixMatchCriteria;
-
- for(const auto &rule: rules) {
- myMixMatchCriteria.add(AudioMixMatchCriterion(
- std::get<0>(rule), std::get<1>(rule), std::get<2>(rule)));
- }
-
- AudioMix myAudioMix(myMixMatchCriteria, mixType, audioConfig, mixFlag,
+ const std::vector<AudioMixMatchCriterion>& matchCriteria = {}) {
+ AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
String8(mixAddress.c_str()), 0);
myAudioMix.mDeviceType = deviceType;
// Clear mAudioMix before add new one to make sure we don't add already exist mixes.
@@ -1007,13 +1037,13 @@
// Only capture of playback is allowed in LOOP_BACK &RENDER mode
ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
// Fail due to the device is already connected.
clearPolicyMix();
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
// The first time to register policy mixes with valid parameter should succeed.
@@ -1022,8 +1052,7 @@
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
- std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
// Registering the same policy mixes should fail.
ret = mManager->registerPolicyMixes(mAudioMixes);
@@ -1034,19 +1063,19 @@
// This will need to be updated if earpiece is added in the test configuration file.
clearPolicyMix();
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_EARPIECE, "", audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_EARPIECE, "", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
// Registration should fail due to output not found.
clearPolicyMix();
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
// The first time to register valid policy mixes should succeed.
clearPolicyMix();
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_SPEAKER, "", audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_SPEAKER, "", audioConfig);
ASSERT_EQ(NO_ERROR, ret);
// Registering the same policy mixes should fail.
ret = mManager->registerPolicyMixes(mAudioMixes);
@@ -1061,8 +1090,7 @@
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
- std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
// After successfully registering policy mixes, it should be able to unregister.
@@ -1075,6 +1103,37 @@
ASSERT_EQ(INVALID_OPERATION, ret);
}
+TEST_F(AudioPolicyManagerTestDynamicPolicy, RegisterPolicyWithConsistentMixSucceeds) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+ mixMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+}
+
+TEST_F(AudioPolicyManagerTestDynamicPolicy, RegisterPolicyWithInconsistentMixFails) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+ mixMatchCriteria);
+ ASSERT_EQ(INVALID_OPERATION, ret);
+}
+
class AudioPolicyManagerTestForHdmi
: public AudioPolicyManagerTestWithConfigurationFile,
public testing::WithParamInterface<audio_format_t> {
@@ -1299,26 +1358,52 @@
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
ret = mManager->unregisterPolicyMixes(mAudioMixes);
ASSERT_EQ(INVALID_OPERATION, ret);
}
+struct DPTestParam {
+ DPTestParam(const std::vector<AudioMixMatchCriterion>& mixCriteria,
+ bool expected_match = false)
+ : mixCriteria(mixCriteria), attributes(defaultAttr), session(AUDIO_SESSION_NONE),
+ expected_match(expected_match) {}
+
+ DPTestParam& withUsage(audio_usage_t usage) {
+ attributes.usage = usage;
+ return *this;
+ }
+
+ DPTestParam& withTags(const char *tags) {
+ std::strncpy(attributes.tags, tags, sizeof(attributes.tags));
+ return *this;
+ }
+
+ DPTestParam& withSource(audio_source_t source) {
+ attributes.source = source;
+ return *this;
+ }
+
+ DPTestParam& withSessionId(audio_session_t sessionId) {
+ session = sessionId;
+ return *this;
+ }
+
+ std::vector<AudioMixMatchCriterion> mixCriteria;
+ audio_attributes_t attributes;
+ audio_session_t session;
+ bool expected_match;
+};
+
class AudioPolicyManagerTestDPPlaybackReRouting : public AudioPolicyManagerTestDynamicPolicy,
- public testing::WithParamInterface<audio_attributes_t> {
+ public testing::WithParamInterface<DPTestParam> {
protected:
void SetUp() override;
void TearDown() override;
std::unique_ptr<RecordingActivityTracker> mTracker;
-
- std::vector<PolicyMixTuple> mUsageRules = {
- {AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE},
- {AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE}
- };
-
struct audio_port_v7 mInjectionPort;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
};
@@ -1332,8 +1417,10 @@
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
+
+ DPTestParam param = GetParam();
status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
ASSERT_EQ(NO_ERROR, ret);
struct audio_port_v7 extractionPort;
@@ -1346,8 +1433,9 @@
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
std::string tags = "addr=" + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &mPortId);
+ getInputForAttr(attr, param.session, mTracker->getRiid(), &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE, &mPortId);
ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
ASSERT_EQ(extractionPort.id, selectedDeviceId);
@@ -1360,151 +1448,169 @@
AudioPolicyManagerTestDynamicPolicy::TearDown();
}
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, InitSuccess) {
- // SetUp must finish with no assertions
-}
-
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, Dump) {
- dumpToLog();
-}
-
TEST_P(AudioPolicyManagerTestDPPlaybackReRouting, PlaybackReRouting) {
- const audio_attributes_t attr = GetParam();
- const audio_usage_t usage = attr.usage;
+ const DPTestParam param = GetParam();
+ const audio_attributes_t& attr = param.attributes;
audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
- attr);
- if (std::find_if(begin(mUsageRules), end(mUsageRules), [&usage](const auto &usageRule) {
- return (std::get<0>(usageRule) == usage) &&
- (std::get<2>(usageRule) == RULE_MATCH_ATTRIBUTE_USAGE);}) != end(mUsageRules) ||
- (strncmp(attr.tags, "addr=", strlen("addr=")) == 0 &&
- strncmp(attr.tags + strlen("addr="), mMixAddress.c_str(),
- AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0)) {
+ attr, param.session);
+ if (param.expected_match) {
EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
} else {
EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
}
}
-INSTANTIATE_TEST_CASE_P(
- PlaybackReroutingUsageMatch,
- AudioPolicyManagerTestDPPlaybackReRouting,
- testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
- )
- );
+const std::vector<AudioMixMatchCriterion> USAGE_MEDIA_ALARM_CRITERIA = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA),
+ createUsageCriterion(AUDIO_USAGE_ALARM)
+};
-INSTANTIATE_TEST_CASE_P(
- PlaybackReroutingAddressPriorityMatch,
- AudioPolicyManagerTestDPPlaybackReRouting,
- testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
- )
- );
+INSTANTIATE_TEST_SUITE_P(
+ PlaybackReroutingUsageMatch,
+ AudioPolicyManagerTestDPPlaybackReRouting,
+ testing::Values(
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_MEDIA),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=other"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ALARM),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_GAME),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANT)));
-INSTANTIATE_TEST_CASE_P(
- PlaybackReroutingUnHandledUsages,
- AudioPolicyManagerTestDPPlaybackReRouting,
- testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
- )
- );
+INSTANTIATE_TEST_SUITE_P(
+ PlaybackReroutingAddressPriorityMatch,
+ AudioPolicyManagerTestDPPlaybackReRouting,
+ testing::Values(
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION).withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ALARM)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_GAME)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_VIRTUAL_SOURCE)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("sometag;addr=remote_submix_media;othertag=somevalue"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("addr=remote_submix_media;othertag"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("sometag;othertag;addr=remote_submix_media")));
+
+static constexpr audio_session_t TEST_SESSION_ID = static_cast<audio_session_t>(42);
+static constexpr audio_session_t OTHER_SESSION_ID = static_cast<audio_session_t>(77);
+
+INSTANTIATE_TEST_SUITE_P(
+ PlaybackReRoutingWithSessionId,
+ AudioPolicyManagerTestDPPlaybackReRouting,
+ testing::Values(
+ // Mix is matched because the session id matches the one specified by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID),
+ // Mix is not matched because the session id doesn't match the one specified
+ // by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ false)
+ .withSessionId(OTHER_SESSION_ID),
+ // Mix is matched, the session id doesn't match the one specified by rule,
+ // but there's address specified in the tags which takes precedence.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ true)
+ .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+ // Mix is matched, both the session id and the usage match ones specified by mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createUsageCriterion(AUDIO_USAGE_MEDIA)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA),
+ // Mix is not matched, the session id matches the one specified by mix rule,
+ // but usage does not.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createUsageCriterion(AUDIO_USAGE_MEDIA)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_GAME),
+ // Mix is not matched, the usage matches the one specified by mix rule,
+ // but the session id is excluded.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID, /*exclude=*/ true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA)));
class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
- public testing::WithParamInterface<audio_attributes_t> {
+ public testing::WithParamInterface<DPTestParam> {
protected:
void SetUp() override;
void TearDown() override;
std::unique_ptr<RecordingActivityTracker> mTracker;
-
- std::vector<PolicyMixTuple> mSourceRules = {
- {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_CAMCORDER, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET},
- {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_MIC, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET},
- {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_VOICE_COMMUNICATION, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}
- };
-
struct audio_port_v7 mExtractionPort;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
};
@@ -1518,8 +1624,10 @@
audioConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
+
+ DPTestParam param = GetParam();
status_t ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
+ AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
ASSERT_EQ(NO_ERROR, ret);
struct audio_port_v7 injectionPort;
@@ -1546,72 +1654,94 @@
AudioPolicyManagerTestDynamicPolicy::TearDown();
}
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, InitSuccess) {
- // SetUp mush finish with no assertions.
-}
-
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, Dump) {
- dumpToLog();
-}
-
TEST_P(AudioPolicyManagerTestDPMixRecordInjection, RecordingInjection) {
- const audio_attributes_t attr = GetParam();
- const audio_source_t source = attr.source;
+ const DPTestParam param = GetParam();
audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- getInputForAttr(attr, mTracker->getRiid(), &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &portId);
- if (std::find_if(begin(mSourceRules), end(mSourceRules), [&source](const auto &sourceRule) {
- return (std::get<1>(sourceRule) == source) &&
- (std::get<2>(sourceRule) == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET);})
- != end(mSourceRules)) {
+ getInputForAttr(param.attributes, param.session, mTracker->getRiid(), &captureRoutedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE, &portId);
+ if (param.expected_match) {
EXPECT_EQ(mExtractionPort.id, captureRoutedPortId);
} else {
EXPECT_NE(mExtractionPort.id, captureRoutedPortId);
}
}
+const std::vector<AudioMixMatchCriterion> SOURCE_CAM_MIC_VOICE_CRITERIA = {
+ createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER),
+ createCapturePresetCriterion(AUDIO_SOURCE_MIC),
+ createCapturePresetCriterion(AUDIO_SOURCE_VOICE_COMMUNICATION)
+};
+
// No address priority rule for remote recording, address is a "don't care"
INSTANTIATE_TEST_CASE_P(
- RecordInjectionSourceMatch,
+ RecordInjectionSource,
AudioPolicyManagerTestDPMixRecordInjection,
testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"}
- )
- );
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_CAMCORDER),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_CAMCORDER)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_MIC),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_MIC)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_VOICE_RECOGNITION),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_VOICE_RECOGNITION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_HOTWORD),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_HOTWORD)
+ .withTags("addr=remote_submix_media")));
-// No address priority rule for remote recording
INSTANTIATE_TEST_CASE_P(
- RecordInjectionSourceNotMatch,
+ RecordInjectionWithSessionId,
AudioPolicyManagerTestDPMixRecordInjection,
testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"}
- )
- );
+ // Mix is matched because the session id matches the one specified by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID),
+ // Mix is not matched because the session id doesn't match the one specified
+ // by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ false)
+ .withSessionId(OTHER_SESSION_ID),
+ // Mix is not matched, the session id doesn't match the one specified by rule,
+ // but tand address specified in the tags is ignored for recorder mix.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ false)
+ .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+ // Mix is matched, both the session id and the source match ones specified by mix rule
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_CAMCORDER),
+ // Mix is not matched, the session id matches the one specified by mix rule,
+ // but source does not.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC),
+ // Mix is not matched, the source matches the one specified by mix rule,
+ // but the session id is excluded.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID,
+ /*exclude=*/ true),
+ createCapturePresetCriterion(AUDIO_SOURCE_MIC)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC)));
using DeviceConnectionTestParams =
std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/>;
@@ -1714,8 +1844,9 @@
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
- getInputForAttr({}, tracker.getRiid(), &routedPortId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE);
+ getInputForAttr({}, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE);
}
ASSERT_EQ(devicePort.id, routedPortId);
@@ -1801,7 +1932,7 @@
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
const std::string kTestBusMediaOutput = "bus0_media_out";
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, kTestBusMediaOutput, audioConfig, std::vector<PolicyMixTuple>());
+ AUDIO_DEVICE_OUT_BUS, kTestBusMediaOutput, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..f4ac2a1 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -156,14 +156,14 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
"android.hardware.camera.device@3.7",
- "android.hardware.camera.device-V1-ndk",
+ "android.hardware.camera.device-V2-ndk",
"media_permission-aidl-cpp",
],
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index bc8981e..be76216 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -1767,20 +1767,68 @@
return ret;
}
+String16 CameraService::getPackageNameFromUid(int clientUid) {
+ String16 packageName("");
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16(kPermissionServiceName));
+ if (binder == 0) {
+ ALOGE("Cannot get permission service");
+ // Return empty package name and the further interaction
+ // with camera will likely fail
+ return packageName;
+ }
+
+ sp<IPermissionController> permCtrl = interface_cast<IPermissionController>(binder);
+ Vector<String16> packages;
+
+ permCtrl->getPackagesForUid(clientUid, packages);
+
+ if (packages.isEmpty()) {
+ ALOGE("No packages for calling UID %d", clientUid);
+ // Return empty package name and the further interaction
+ // with camera will likely fail
+ return packageName;
+ }
+
+ // Arbitrarily pick the first name in the list
+ packageName = packages[0];
+
+ return packageName;
+}
+
template<class CALLBACK, class CLIENT>
Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
- int api1CameraId, const String16& clientPackageName, bool systemNativeClient,
+ int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
/*out*/sp<CLIENT>& device) {
binder::Status ret = binder::Status::ok();
+ bool isNonSystemNdk = false;
+ String16 clientPackageName;
+ if (clientPackageNameMaybe.size() <= 0) {
+ // NDK calls don't come with package names, but we need one for various cases.
+ // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
+ // do exist. For all authentication cases, all packages under the same UID get the
+ // same permissions, so picking any associated package name is sufficient. For some
+ // other cases, this may give inaccurate names for clients in logs.
+ isNonSystemNdk = true;
+ int packageUid = (clientUid == USE_CALLING_UID) ?
+ CameraThreadState::getCallingUid() : clientUid;
+ clientPackageName = getPackageNameFromUid(packageUid);
+ } else {
+ clientPackageName = clientPackageNameMaybe;
+ }
+
String8 clientName8(clientPackageName);
int originalClientPid = 0;
+ int packagePid = (clientPid == USE_CALLING_PID) ?
+ CameraThreadState::getCallingPid() : clientPid;
ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
- "Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
+ "Camera API version %d", packagePid, clientName8.string(), cameraId.string(),
static_cast<int>(effectiveApiLevel));
nsecs_t openTimeNs = systemTime();
@@ -1788,7 +1836,7 @@
sp<CLIENT> client = nullptr;
int facing = -1;
int orientation = 0;
- bool isNonSystemNdk = (clientPackageName.size() == 0);
+
{
// Acquire mServiceLock and prevent other clients from connecting
std::unique_ptr<AutoConditionLock> lock =
@@ -2046,6 +2094,10 @@
onlineClientDesc->getOwnerId(), onlinePriority.getState(),
// native clients don't have offline processing support.
/*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+ if (offlineClientDesc == nullptr) {
+ ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
// Allow only one offline device per camera
auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -3279,37 +3331,6 @@
sCameraService = cameraService;
}
- // In some cases the calling code has no access to the package it runs under.
- // For example, NDK camera API.
- // In this case we will get the packages for the calling UID and pick the first one
- // for attributing the app op. This will work correctly for runtime permissions
- // as for legacy apps we will toggle the app op for all packages in the UID.
- // The caveat is that the operation may be attributed to the wrong package and
- // stats based on app ops may be slightly off.
- if (mClientPackageName.size() <= 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder = sm->getService(String16(kPermissionServiceName));
- if (binder == 0) {
- ALOGE("Cannot get permission service");
- // Leave mClientPackageName unchanged (empty) and the further interaction
- // with camera will fail in BasicClient::startCameraOps
- return;
- }
-
- sp<IPermissionController> permCtrl = interface_cast<IPermissionController>(binder);
- Vector<String16> packages;
-
- permCtrl->getPackagesForUid(mClientUid, packages);
-
- if (packages.isEmpty()) {
- ALOGE("No packages for calling UID");
- // Leave mClientPackageName unchanged (empty) and the further interaction
- // with camera will fail in BasicClient::startCameraOps
- return;
- }
- mClientPackageName = packages[0];
- }
-
// There are 2 scenarios in which a client won't have AppOps operations
// (both scenarios : native clients)
// 1) It's an system native client*, the package name will be empty
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 0395475..6e2300a 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -834,10 +834,19 @@
// sorted in alpha-numeric order.
void filterAPI1SystemCameraLocked(const std::vector<std::string> &normalDeviceIds);
+ // In some cases the calling code has no access to the package it runs under.
+ // For example, NDK camera API.
+ // In this case we will get the packages for the calling UID and pick the first one
+ // for attributing the app op. This will work correctly for runtime permissions
+ // as for legacy apps we will toggle the app op for all packages in the UID.
+ // The caveat is that the operation may be attributed to the wrong package and
+ // stats based on app ops may be slightly off.
+ String16 getPackageNameFromUid(int clientUid);
+
// Single implementation shared between the various connect calls
template<class CALLBACK, class CLIENT>
binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
- int api1CameraId, const String16& clientPackageName, bool systemNativeClient,
+ int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
/*out*/sp<CLIENT>& device);
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index e101dd3..74497d1 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -41,7 +41,8 @@
tidToCycleCounterMap[currentThreadId]++;
if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
- ALOGW("CameraServiceWatchdog triggering abort for pid: %d", getpid());
+ ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
+ currentThreadId);
// We use abort here so we can get a tombstone for better
// debugging.
abort();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index ba26ac4..c49ecb2 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -884,6 +884,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -928,7 +929,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -975,7 +976,7 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace);
}
if (err != OK) {
@@ -1027,6 +1028,7 @@
int width, height, format, surfaceType;
uint64_t consumerUsage;
android_dataspace dataSpace;
+ int32_t colorSpace;
status_t err;
binder::Status res;
@@ -1040,6 +1042,7 @@
surfaceType = outputConfiguration.getSurfaceType();
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
// Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1089,7 +1092,8 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode()));
+ outputConfiguration.getMirrorMode(),
+ colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1280,6 +1284,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1288,7 +1293,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1646,7 +1651,8 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1662,7 +1668,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 54cc27a..71965f2 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -931,7 +931,7 @@
tempOutputFile.str().c_str(), errno);
return NO_INIT;
}
- inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
+ inputFrame.muxer = MediaMuxer::create(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
if (inputFrame.muxer == nullptr) {
ALOGE("%s: Failed to create MediaMuxer for file fd %d",
__FUNCTION__, inputFrame.fileFd);
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index f926b88..95b4050 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -40,8 +40,6 @@
namespace android {
-const static size_t kDisconnectTimeoutMs = 2500;
-
using namespace camera2;
// Interface used by CameraService
@@ -266,10 +264,16 @@
template <typename TClientBase>
binder::Status Camera2ClientBase<TClientBase>::disconnect() {
- if (mCameraServiceWatchdog != nullptr) {
+ if (mCameraServiceWatchdog != nullptr && mDevice != nullptr) {
+ // Timer for the disconnect call should be greater than getExpectedInFlightDuration
+ // since this duration is used to error handle methods in the disconnect sequence
+ // thus allowing existing error handling methods to execute first
+ uint64_t maxExpectedDuration =
+ ns2ms(mDevice->getExpectedInFlightDuration() + kBufferTimeDisconnectNs);
+
// Initialization from hal succeeded, time disconnect.
return mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(disconnectImpl(),
- kDisconnectTimeoutMs / kCycleLengthMs, kCycleLengthMs);
+ maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
}
return disconnectImpl();
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 6e37589..37a7200 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -134,6 +134,9 @@
protected:
+ // Used for watchdog timeout to monitor disconnect
+ static const nsecs_t kBufferTimeDisconnectNs = 3000000000; // 3 sec.
+
// The PID provided in the constructor call
pid_t mInitialClientPid;
bool mOverrideForPerfClass = false;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 69514f3..89a2af8 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -192,7 +192,9 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -213,7 +215,9 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ = 0;
/**
* Create an input stream of width, height, and format.
@@ -235,11 +239,13 @@
bool dataSpaceOverridden;
android_dataspace originalDataSpace;
int64_t dynamicRangeProfile;
+ int32_t colorSpace;
StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
originalDataSpace(HAL_DATASPACE_UNKNOWN),
- dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+ dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
/**
* Check whether the format matches the current or the original one in case
* it got overridden.
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index ef68f28..6d35391 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -550,6 +550,11 @@
"ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+
+ // b/247038031: In case of system_server crash, camera_server is
+ // restarted as well. If flashlight is turned on before the crash, it
+ // may be stuck to be on. As a workaround, set torch mode to be OFF.
+ interface->setTorchMode(false);
} else {
mHasFlashUnit = false;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 644f682..17a4a44 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -992,7 +992,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1006,7 +1006,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
}
static bool isRawFormat(int format) {
@@ -1027,7 +1027,7 @@
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1036,10 +1036,10 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d",
+ " mirrorMode %d colorSpace %d",
mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+ dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace);
status_t res;
bool wasActive = false;
@@ -1110,7 +1110,7 @@
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1125,25 +1125,25 @@
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
}
size_t consumerCount = consumers.size();
@@ -1231,6 +1231,7 @@
streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
streamInfo->originalDataSpace = stream->getOriginalDataSpace();
streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+ streamInfo->colorSpace = stream->getColorSpace();
return OK;
}
@@ -1744,7 +1745,7 @@
}
// Calculate expected duration for flush with additional buffer time in ms for watchdog
- uint64_t maxExpectedDuration = (getExpectedInFlightDuration() + kBaseGetBufferWait) / 1e6;
+ uint64_t maxExpectedDuration = ns2ms(getExpectedInFlightDuration() + kBaseGetBufferWait);
status_t res = mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(mRequestThread->flush(),
maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
@@ -1880,7 +1881,8 @@
stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
- stream->getDynamicRangeProfile(), streamUseCase);
+ stream->getDynamicRangeProfile(), streamUseCase,
+ stream->getColorSpace());
}
}
}
@@ -2680,7 +2682,7 @@
status_t Camera3Device::registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
- const std::set<std::set<String8>>& physicalCameraIds,
+ bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
const std::set<std::string>& cameraIdsWithZoom,
const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
@@ -2689,7 +2691,7 @@
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
- hasAppCallback, minExpectedDuration, maxExpectedDuration, physicalCameraIds,
+ hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
outputSurfaces));
if (res < 0) return res;
@@ -3249,16 +3251,18 @@
return true;
}
-std::pair<nsecs_t, nsecs_t> Camera3Device::RequestThread::calculateExpectedDurationRange(
- const camera_metadata_t *request) {
- std::pair<nsecs_t, nsecs_t> expectedRange(
+Camera3Device::RequestThread::ExpectedDurationInfo
+ Camera3Device::RequestThread::calculateExpectedDurationRange(
+ const camera_metadata_t *request) {
+ ExpectedDurationInfo expectedDurationInfo = {
InFlightRequest::kDefaultMinExpectedDuration,
- InFlightRequest::kDefaultMaxExpectedDuration);
+ InFlightRequest::kDefaultMaxExpectedDuration,
+ /*isFixedFps*/false};
camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
find_camera_metadata_ro_entry(request,
ANDROID_CONTROL_AE_MODE,
&e);
- if (e.count == 0) return expectedRange;
+ if (e.count == 0) return expectedDurationInfo;
switch (e.data.u8[0]) {
case ANDROID_CONTROL_AE_MODE_OFF:
@@ -3266,29 +3270,32 @@
ANDROID_SENSOR_EXPOSURE_TIME,
&e);
if (e.count > 0) {
- expectedRange.first = e.data.i64[0];
- expectedRange.second = expectedRange.first;
+ expectedDurationInfo.minDuration = e.data.i64[0];
+ expectedDurationInfo.maxDuration = expectedDurationInfo.minDuration;
}
find_camera_metadata_ro_entry(request,
ANDROID_SENSOR_FRAME_DURATION,
&e);
if (e.count > 0) {
- expectedRange.first = std::max(e.data.i64[0], expectedRange.first);
- expectedRange.second = expectedRange.first;
+ expectedDurationInfo.minDuration =
+ std::max(e.data.i64[0], expectedDurationInfo.minDuration);
+ expectedDurationInfo.maxDuration = expectedDurationInfo.minDuration;
}
+ expectedDurationInfo.isFixedFps = false;
break;
default:
find_camera_metadata_ro_entry(request,
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
&e);
if (e.count > 1) {
- expectedRange.first = 1e9 / e.data.i32[1];
- expectedRange.second = 1e9 / e.data.i32[0];
+ expectedDurationInfo.minDuration = 1e9 / e.data.i32[1];
+ expectedDurationInfo.maxDuration = 1e9 / e.data.i32[0];
}
+ expectedDurationInfo.isFixedFps = (e.data.i32[1] == e.data.i32[0]);
break;
}
- return expectedRange;
+ return expectedDurationInfo;
}
bool Camera3Device::RequestThread::skipHFRTargetFPSUpdate(int32_t tag,
@@ -3907,13 +3914,14 @@
isZslCapture = true;
}
}
- auto expectedDurationRange = calculateExpectedDurationRange(settings);
+ auto expectedDurationInfo = calculateExpectedDurationRange(settings);
res = parent->registerInFlight(halRequest->frame_number,
totalNumBuffers, captureRequest->mResultExtras,
/*hasInput*/halRequest->input_buffer != NULL,
hasCallback,
- /*min*/expectedDurationRange.first,
- /*max*/expectedDurationRange.second,
+ expectedDurationInfo.minDuration,
+ expectedDurationInfo.maxDuration,
+ expectedDurationInfo.isFixedFps,
requestedPhysicalCameras, isStillCapture, isZslCapture,
captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
(mUseHalBufManager) ? uniqueSurfaceIdMap :
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index d757eb9..f5e167e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -151,7 +151,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -166,7 +168,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -993,8 +997,13 @@
// send request in mNextRequests to HAL in a batch. Return true = sucssess
bool sendRequestsBatch();
- // Calculate the expected (minimum, maximum) duration range for a request
- std::pair<nsecs_t, nsecs_t> calculateExpectedDurationRange(
+ // Calculate the expected (minimum, maximum, isFixedFps) duration info for a request
+ struct ExpectedDurationInfo {
+ nsecs_t minDuration;
+ nsecs_t maxDuration;
+ bool isFixedFps;
+ };
+ ExpectedDurationInfo calculateExpectedDurationRange(
const camera_metadata_t *request);
// Check and update latest session parameters based on the current request settings.
@@ -1113,7 +1122,7 @@
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
- const std::set<std::set<String8>>& physicalCameraIds,
+ bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
nsecs_t requestTimeNs);
@@ -1365,6 +1374,9 @@
// The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
nsecs_t mMinExpectedDuration = 0;
+ // Whether the camera device runs at fixed frame rate based on AE_MODE and
+ // AE_TARGET_FPS_RANGE
+ bool mIsFixedFps = false;
// Injection camera related methods.
class Camera3DeviceInjectionMethods : public virtual RefBase {
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 8cecabd..a93d1da 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -100,7 +100,7 @@
virtual status_t setBatchSize(size_t batchSize) override;
- virtual void onMinDurationChanged(nsecs_t /*duration*/) {}
+ virtual void onMinDurationChanged(nsecs_t /*duration*/, bool /*fixedFps*/) {}
protected:
/**
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index add1483..a78d01e 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -35,14 +35,17 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
- bool deviceTimeBaseIsRealtime, int timestampBase) :
+ bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+ colorSpace),
mTotalBufferCount(0),
+ mMaxCachedBufferCount(0),
mHandoutTotalBufferCount(0),
mHandoutOutputBufferCount(0),
+ mCachedOutputBufferCount(0),
mFrameCount(0),
mLastTimestamp(0) {
@@ -91,12 +94,13 @@
}
lines.appendFormat(" Dynamic Range Profile: 0x%" PRIx64 "\n",
camera_stream::dynamic_range_profile);
+ lines.appendFormat(" Color Space: %d\n", camera_stream::color_space);
lines.appendFormat(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
lines.appendFormat(" Timestamp base: %d\n", getTimestampBase());
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
mFrameCount, mLastTimestamp);
- lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
- mTotalBufferCount, mHandoutTotalBufferCount);
+ lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu, currently cached: %zu\n",
+ mTotalBufferCount, mHandoutTotalBufferCount, mCachedOutputBufferCount);
write(fd, lines.string(), lines.size());
Camera3Stream::dump(fd, args);
@@ -135,6 +139,14 @@
return (mHandoutTotalBufferCount - mHandoutOutputBufferCount);
}
+size_t Camera3IOStreamBase::getCachedOutputBufferCountLocked() const {
+ return mCachedOutputBufferCount;
+}
+
+size_t Camera3IOStreamBase::getMaxCachedOutputBuffersLocked() const {
+ return mMaxCachedBufferCount;
+}
+
status_t Camera3IOStreamBase::disconnectLocked() {
switch (mState) {
case STATE_IN_RECONFIG:
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index f389d53..6af0875 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
public:
@@ -56,11 +57,18 @@
int getMaxTotalBuffers() const { return mTotalBufferCount; }
protected:
size_t mTotalBufferCount;
+ // The maximum number of cached buffers allowed for this stream
+ size_t mMaxCachedBufferCount;
+
// sum of input and output buffers that are currently acquired by HAL
size_t mHandoutTotalBufferCount;
// number of output buffers that are currently acquired by HAL. This will be
// Redundant when camera3 streams are no longer bidirectional streams.
size_t mHandoutOutputBufferCount;
+ // number of cached output buffers that are currently queued in the camera
+ // server but not yet queued to the buffer queue.
+ size_t mCachedOutputBufferCount;
+
uint32_t mFrameCount;
// Last received output buffer's timestamp
nsecs_t mLastTimestamp;
@@ -97,6 +105,9 @@
virtual size_t getHandoutInputBufferCountLocked();
+ virtual size_t getCachedOutputBufferCountLocked() const;
+ virtual size_t getMaxCachedOutputBuffersLocked() const;
+
virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
status_t getBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index a799719..5ee6ca5 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -248,6 +248,9 @@
// The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
nsecs_t mMinExpectedDuration = 0;
+ // Whether the camera device runs at fixed frame rate based on AE_MODE and
+ // AE_TARGET_FPS_RANGE
+ bool mIsFixedFps = false;
// SetErrorInterface
void setErrorState(const char *fmt, ...) override;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 5a1ee9b..1abcd86 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -56,12 +56,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -91,11 +91,11 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase),
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -131,12 +131,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
@@ -180,13 +180,13 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
@@ -419,6 +419,7 @@
mLock.unlock();
ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
+ bool bufferDeferred = false;
/**
* Return buffer back to ANativeWindow
*/
@@ -478,6 +479,7 @@
__FUNCTION__, mId, strerror(-res), res);
return res;
}
+ bufferDeferred = true;
} else {
nsecs_t presentTime = mSyncToDisplay ?
syncTimestampToDisplayLocked(captureTime) : captureTime;
@@ -501,6 +503,10 @@
}
mLock.lock();
+ if (bufferDeferred) {
+ mCachedOutputBufferCount++;
+ }
+
// Once a valid buffer has been returned to the queue, can no longer
// dequeue all buffers for preallocation.
if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
@@ -692,10 +698,15 @@
!isVideoStream());
if (forceChoreographer || defaultToChoreographer) {
mSyncToDisplay = true;
+ // For choreographer synced stream, extra buffers aren't kept by
+ // camera service. So no need to update mMaxCachedBufferCount.
mTotalBufferCount += kDisplaySyncExtraBuffer;
} else if (defaultToSpacer) {
mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
- mTotalBufferCount ++;
+ // For preview frame spacer, the extra buffer is kept by camera
+ // service. So update mMaxCachedBufferCount.
+ mMaxCachedBufferCount = 1;
+ mTotalBufferCount += mMaxCachedBufferCount;
res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
if (res != OK) {
ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
@@ -965,6 +976,14 @@
return true;
}
+void Camera3OutputStream::onCachedBufferQueued() {
+ Mutex::Autolock l(mLock);
+ mCachedOutputBufferCount--;
+ // Signal whoever is waiting for the buffer to be returned to the buffer
+ // queue.
+ mOutputBufferReturnedSignal.signal();
+}
+
status_t Camera3OutputStream::disconnectLocked() {
status_t res;
@@ -1364,9 +1383,10 @@
return OK;
}
-void Camera3OutputStream::onMinDurationChanged(nsecs_t duration) {
+void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
Mutex::Autolock l(mLock);
mMinExpectedDuration = duration;
+ mFixedFps = fixedFps;
}
void Camera3OutputStream::returnPrefetchedBuffersLocked() {
@@ -1387,29 +1407,39 @@
}
nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+ nsecs_t currentTime = systemTime();
+ if (!mFixedFps) {
+ mLastCaptureTime = t;
+ mLastPresentTime = currentTime;
+ return t;
+ }
+
ParcelableVsyncEventData parcelableVsyncEventData;
auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
if (res != OK) {
ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
mLastCaptureTime = t;
- mLastPresentTime = t;
+ mLastPresentTime = currentTime;
return t;
}
const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
- nsecs_t currentTime = systemTime();
+ nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
- // Reset capture to present time offset if:
- // - More than 1 second between frames.
- // - The frame duration deviates from multiples of vsync frame intervals.
+ // Find the best presentation time without worrying about previous frame's
+ // presentation time if capture interval is more than kSpacingResetIntervalNs.
+ //
+ // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
+ // there is little risk in starting over and finding the earliest vsync to latch onto.
+ // - Update captureToPresentTime offset to be used for later frames.
+ // - Example use cases:
+ // - when frame rate drops down to below 20 fps, or
+ // - A new streaming session starts (stopPreview followed by
+ // startPreview)
+ //
nsecs_t captureInterval = t - mLastCaptureTime;
- float captureToVsyncIntervalRatio = 1.0f * captureInterval / vsyncEventData.frameInterval;
- float ratioDeviation = std::fabs(
- captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
- if (captureInterval > kSpacingResetIntervalNs ||
- ratioDeviation >= kMaxIntervalRatioDeviation) {
- nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
+ if (captureInterval > kSpacingResetIntervalNs) {
for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
const auto& timeline = vsyncEventData.frameTimelines[i];
if (timeline.deadlineTimestamp >= currentTime &&
@@ -1431,21 +1461,54 @@
nsecs_t idealPresentT = t + mCaptureToPresentOffset;
nsecs_t expectedPresentT = mLastPresentTime;
nsecs_t minDiff = INT64_MAX;
- // Derive minimum intervals between presentation times based on minimal
+
+ // In fixed FPS case, when frame durations are close to multiples of display refresh
+ // rate, derive minimum intervals between presentation times based on minimal
// expected duration. The minimum number of Vsyncs is:
// - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
// - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
// - and so on.
+ //
+ // This spaces out the displaying of the frames so that the frame
+ // presentations are roughly in sync with frame captures.
int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
vsyncEventData.frameInterval;
if (minVsyncs < 0) minVsyncs = 0;
nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
+
+ // In fixed FPS case, if the frame duration deviates from multiples of
+ // display refresh rate, find the closest Vsync without requiring a minimum
+ // number of Vsync.
+ //
+ // Example: (24fps camera, 60hz refresh):
+ // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
+ // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
+ // | : 1 frame
+ // t1 : 41.67ms
+ // t2 : 16.67ms
+ // t1/t2 = 2.5
+ //
+ // 24fps is a commonly used video frame rate. Because the capture
+ // interval is 2.5 times of display refresh interval, the minVsyncs
+ // calculation will directly fall at the boundary condition. In this case,
+ // we should fall back to the basic logic of finding closest vsync
+ // timestamp without worrying about minVsyncs.
+ float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
+ float ratioDeviation = std::fabs(
+ captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
+ bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
+ bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
+
// Find best timestamp in the vsync timelines:
- // - Only use at most 3 timelines to avoid long latency
- // - closest to the ideal present time,
+ // - Only use at most kMaxTimelines timelines to avoid long latency
+ // - closest to the ideal presentation time,
// - deadline timestamp is greater than the current time, and
- // - the candidate present time is at least minInterval in the future
- // compared to last present time.
+ // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
+ // the candidate presentation time is at least minInterval in the future compared to last
+ // presentation time.
+ // - For variable FPS, or if the capture interval deviates from refresh
+ // interval for more than 5%, find a presentation time closest to the
+ // (lastPresentationTime + captureToPresentOffset) instead.
int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
float biasForShortDelay = 1.0f;
for (int i = 0; i < maxTimelines; i ++) {
@@ -1458,12 +1521,50 @@
}
if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
vsyncTime.deadlineTimestamp >= currentTime &&
- vsyncTime.expectedPresentationTime >
- mLastPresentTime + minInterval + biasForShortDelay * kTimelineThresholdNs) {
+ ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
+ (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
+ mLastPresentTime + minInterval + biasForShortDelay * kTimelineThresholdNs))) {
expectedPresentT = vsyncTime.expectedPresentationTime;
minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
}
}
+
+ if (expectedPresentT == mLastPresentTime && expectedPresentT <
+ vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
+ // Couldn't find a reasonable presentation time. Using last frame's
+ // presentation time would cause a frame drop. The best option now
+ // is to use the next VSync as long as the last presentation time
+ // doesn't already has the maximum latency, in which case dropping the
+ // buffer is more desired than increasing latency.
+ //
+ // Example: (60fps camera, 59.9hz refresh):
+ // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
+ // \ \ \ \ \ \ \ \ \
+ // queue to BQ: | | | | | | | | |
+ // \ \ \ \ \ \ \ \ \
+ // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
+ //
+ // |: 1 frame
+ // t1 : 16.67ms
+ // t2 : 16.69ms
+ //
+ // It takes 833 frames for capture readout count and display VSYNC count to be off
+ // by 1.
+ // - At frames [0, 832], presentationTime is set to timeline[0]
+ // - At frames [833, 833*2-1], presentationTime is set to timeline[1]
+ // - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
+ // - At frame 833*3, no presentation time is found because we only
+ // search for timeline[0..2].
+ // - Drop one buffer is better than further extend the presentation
+ // time.
+ //
+ // However, if frame 833*2 arrives 16.67ms early (right after frame
+ // 833*2-1), no presentation time can be found because
+ // getLatestVsyncEventData is called early. In that case, it's better to
+ // set presentation time by offseting last presentation time.
+ expectedPresentT += vsyncEventData.frameInterval;
+ }
+
mLastCaptureTime = t;
mLastPresentTime = expectedPresentT;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 3587af4..0d758bc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -113,7 +114,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +131,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
virtual ~Camera3OutputStream();
@@ -247,9 +250,10 @@
virtual status_t setBatchSize(size_t batchSize = 1) override;
/**
- * Notify the stream on change of min frame durations.
+ * Notify the stream on change of min frame durations or variable/fixed
+ * frame rate.
*/
- virtual void onMinDurationChanged(nsecs_t duration) override;
+ virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) override;
/**
* Apply ZSL related consumer usage quirk.
@@ -258,6 +262,7 @@
void setImageDumpMask(int mask) { mImageDumpMask = mask; }
bool shouldLogError(status_t res);
+ void onCachedBufferQueued();
protected:
Camera3OutputStream(int id, camera_stream_type_t type,
@@ -271,7 +276,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Note that we release the lock briefly in this function
@@ -419,6 +425,7 @@
// Re-space frames by overriding timestamp to align with display Vsync.
// Default is on for SurfaceView bound streams.
+ bool mFixedFps = false;
nsecs_t mMinExpectedDuration = 0;
bool mSyncToDisplay = false;
DisplayEventReceiver mDisplayEventReceiver;
@@ -429,7 +436,7 @@
static constexpr nsecs_t kSpacingResetIntervalNs = 50000000LL; // 50 millisecond
static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
static constexpr float kMaxIntervalRatioDeviation = 0.05f;
- static constexpr int kMaxTimelines = 3;
+ static constexpr int kMaxTimelines = 2;
nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
// Re-space frames by delaying queueBuffer so that frame delivery has
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index a6d4b96..dbc6fe1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -110,12 +110,13 @@
virtual status_t setBatchSize(size_t batchSize = 1) = 0;
/**
- * Notify the output stream that the minimum frame duration has changed.
+ * Notify the output stream that the minimum frame duration has changed, or
+ * frame rate has switched between variable and fixed.
*
* The minimum frame duration is calculated based on the upper bound of
* AE_TARGET_FPS_RANGE in the capture request.
*/
- virtual void onMinDurationChanged(nsecs_t duration) = 0;
+ virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) = 0;
};
// Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index f4e3fad..65c45ff 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -128,12 +128,41 @@
return res;
}
+void correctMeteringRegions(camera_metadata_t *meta) {
+ if (meta == nullptr) return;
+
+ uint32_t meteringRegionKeys[] = {
+ ANDROID_CONTROL_AE_REGIONS,
+ ANDROID_CONTROL_AWB_REGIONS,
+ ANDROID_CONTROL_AF_REGIONS };
+
+ for (uint32_t key : meteringRegionKeys) {
+ camera_metadata_entry_t entry;
+ int res = find_camera_metadata_entry(meta, key, &entry);
+ if (res != OK) continue;
+
+ for (size_t i = 0; i < entry.count; i += 5) {
+ if (entry.data.i32[0] > entry.data.i32[2]) {
+ ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
+ __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
+ entry.data.i32[2] = entry.data.i32[0];
+ }
+ if (entry.data.i32[1] > entry.data.i32[3]) {
+ ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
+ __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
+ entry.data.i32[3] = entry.data.i32[1];
+ }
+ }
+ }
+}
+
void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
if (result == nullptr) return;
camera_metadata_t *meta = const_cast<camera_metadata_t *>(
result->mMetadata.getAndLock());
set_camera_metadata_vendor_id(meta, states.vendorTagId);
+ correctMeteringRegions(meta);
result->mMetadata.unlock(meta);
if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
@@ -152,6 +181,7 @@
camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
physicalMetadata.mPhysicalCameraMetadata.getAndLock());
set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+ correctMeteringRegions(pmeta);
physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
}
@@ -858,12 +888,14 @@
r.resultExtras.hasReadoutTimestamp = true;
r.resultExtras.readoutTimestamp = msg.readout_timestamp;
}
- if (r.minExpectedDuration != states.minFrameDuration) {
+ if (r.minExpectedDuration != states.minFrameDuration ||
+ r.isFixedFps != states.isFixedFps) {
for (size_t i = 0; i < states.outputStreams.size(); i++) {
auto outputStream = states.outputStreams[i];
- outputStream->onMinDurationChanged(r.minExpectedDuration);
+ outputStream->onMinDurationChanged(r.minExpectedDuration, r.isFixedFps);
}
states.minFrameDuration = r.minExpectedDuration;
+ states.isFixedFps = r.isFixedFps;
}
if (r.hasCallback) {
ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index d6107c2..8c71c2b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -106,6 +106,7 @@
BufferRecordsInterface& bufferRecordsIntf;
bool legacyClient;
nsecs_t& minFrameDuration;
+ bool& isFixedFps;
};
void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 9215f23..da45227 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+ deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index aac3c2a..5167225 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 7ad6649..4d8495f 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -95,6 +96,7 @@
camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
camera_stream::dynamic_range_profile = dynamicRangeProfile;
camera_stream::use_case = streamUseCase;
+ camera_stream::color_space = colorSpace;
if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
maxSize == 0) {
@@ -135,6 +137,10 @@
return camera_stream::data_space;
}
+int32_t Camera3Stream::getColorSpace() const {
+ return camera_stream::color_space;
+}
+
uint64_t Camera3Stream::getUsage() const {
return mUsage;
}
@@ -665,11 +671,19 @@
}
}
- // Wait for new buffer returned back if we are running into the limit.
+ // Wait for new buffer returned back if we are running into the limit. There
+ // are 2 limits:
+ // 1. The number of HAL buffers is greater than max_buffers
+ // 2. The number of HAL buffers + cached buffers is greater than max_buffers
+ // + maxCachedBuffers
size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
- if (numOutstandingBuffers == camera_stream::max_buffers) {
- ALOGV("%s: Already dequeued max output buffers (%d), wait for next returned one.",
- __FUNCTION__, camera_stream::max_buffers);
+ size_t numCachedBuffers = getCachedOutputBufferCountLocked();
+ size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
+ while (numOutstandingBuffers == camera_stream::max_buffers ||
+ numOutstandingBuffers + numCachedBuffers ==
+ camera_stream::max_buffers + maxNumCachedBuffers) {
+ ALOGV("%s: Already dequeued max output buffers (%d(+%zu)), wait for next returned one.",
+ __FUNCTION__, camera_stream::max_buffers, maxNumCachedBuffers);
nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
if (waitBufferTimeout < kWaitForBufferDuration) {
waitBufferTimeout = kWaitForBufferDuration;
@@ -687,12 +701,16 @@
}
size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
- if (updatedNumOutstandingBuffers >= numOutstandingBuffers) {
- ALOGE("%s: outsanding buffer count goes from %zu to %zu, "
+ size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
+ if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
+ updatedNumCachedBuffers == numCachedBuffers) {
+ ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
"getBuffer(s) call must not run in parallel!", __FUNCTION__,
numOutstandingBuffers, updatedNumOutstandingBuffers);
return INVALID_OPERATION;
}
+ numOutstandingBuffers = updatedNumOutstandingBuffers;
+ numCachedBuffers = updatedNumCachedBuffers;
}
res = getBufferLocked(buffer, surface_ids);
@@ -1057,11 +1075,20 @@
}
size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
- // Wait for new buffer returned back if we are running into the limit.
- while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers) {
- ALOGV("%s: Already dequeued %zu output buffers and requesting %zu (max is %d), waiting.",
- __FUNCTION__, numOutstandingBuffers, numBuffersRequested,
- camera_stream::max_buffers);
+ size_t numCachedBuffers = getCachedOutputBufferCountLocked();
+ size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
+ // Wait for new buffer returned back if we are running into the limit. There
+ // are 2 limits:
+ // 1. The number of HAL buffers is greater than max_buffers
+ // 2. The number of HAL buffers + cached buffers is greater than max_buffers
+ // + maxCachedBuffers
+ while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers ||
+ numOutstandingBuffers + numCachedBuffers + numBuffersRequested >
+ camera_stream::max_buffers + maxNumCachedBuffers) {
+ ALOGV("%s: Already dequeued %zu(+%zu) output buffers and requesting %zu "
+ "(max is %d(+%zu)), waiting.", __FUNCTION__, numOutstandingBuffers,
+ numCachedBuffers, numBuffersRequested, camera_stream::max_buffers,
+ maxNumCachedBuffers);
nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
if (waitBufferTimeout < kWaitForBufferDuration) {
waitBufferTimeout = kWaitForBufferDuration;
@@ -1078,13 +1105,16 @@
return res;
}
size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
- if (updatedNumOutstandingBuffers >= numOutstandingBuffers) {
- ALOGE("%s: outsanding buffer count goes from %zu to %zu, "
+ size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
+ if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
+ updatedNumCachedBuffers == numCachedBuffers) {
+ ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
"getBuffer(s) call must not run in parallel!", __FUNCTION__,
numOutstandingBuffers, updatedNumOutstandingBuffers);
return INVALID_OPERATION;
}
numOutstandingBuffers = updatedNumOutstandingBuffers;
+ numCachedBuffers = updatedNumCachedBuffers;
}
res = getBuffersLocked(buffers);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index d429e6c..f32053b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,7 @@
uint32_t getHeight() const;
int getFormat() const;
android_dataspace getDataSpace() const;
+ int32_t getColorSpace() const;
uint64_t getUsage() const;
void setUsage(uint64_t usage);
void setFormatOverride(bool formatOverriden);
@@ -509,7 +510,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
@@ -558,6 +560,10 @@
// Get handout input buffer count.
virtual size_t getHandoutInputBufferCountLocked() = 0;
+ // Get cached output buffer count.
+ virtual size_t getCachedOutputBufferCountLocked() const = 0;
+ virtual size_t getMaxCachedOutputBuffersLocked() const = 0;
+
// Get the usage flags for the other endpoint, or return
// INVALID_OPERATION if they cannot be obtained.
virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
@@ -576,6 +582,8 @@
uint64_t mUsage;
+ Condition mOutputBufferReturnedSignal;
+
private:
// Previously configured stream properties (post HAL override)
uint64_t mOldUsage;
@@ -583,7 +591,6 @@
int mOldFormat;
android_dataspace mOldDataSpace;
- Condition mOutputBufferReturnedSignal;
Condition mInputBufferReturnedSignal;
static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6812e89..823be2e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int64_t dynamic_range_profile;
int64_t use_case;
+ int32_t color_space;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
int64_t streamUseCase;
int timestampBase;
int mirrorMode;
+ int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+ mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+ colorSpace(_colorSpace) {}
};
// Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
virtual int getFormat() const = 0;
virtual int64_t getDynamicRangeProfile() const = 0;
virtual android_dataspace getDataSpace() const = 0;
+ virtual int32_t getColorSpace() const = 0;
virtual void setFormatOverride(bool formatOverriden) = 0;
virtual bool isFormatOverridden() const = 0;
virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index fa00495..444445b 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -152,6 +152,9 @@
// For auto-exposure modes, equal to 1/(lower end of target FPS range)
nsecs_t maxExpectedDuration;
+ // Whether the FPS range is fixed, aka, minFps == maxFps
+ bool isFixedFps;
+
// Whether the result metadata for this request is to be skipped. The
// result metadata should be skipped in the case of
// REQUEST/RESULT error.
@@ -205,6 +208,7 @@
hasCallback(true),
minExpectedDuration(kDefaultMinExpectedDuration),
maxExpectedDuration(kDefaultMaxExpectedDuration),
+ isFixedFps(false),
skipResultMetadata(false),
errorBufStrategy(ERROR_BUF_CACHE),
stillCapture(false),
@@ -215,7 +219,7 @@
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration,
+ bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
@@ -229,6 +233,7 @@
hasCallback(hasAppCallback),
minExpectedDuration(minDuration),
maxExpectedDuration(maxDuration),
+ isFixedFps(fixedFps),
skipResultMetadata(false),
errorBufStrategy(ERROR_BUF_CACHE),
physicalCameraIds(physicalCameraIdSet),
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index 0439501..83caa00 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -68,8 +68,10 @@
return true;
}
- // Cache the frame to match readout time interval, for up to 33ms
- nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+ // Cache the frame to match readout time interval, for up to kMaxFrameWaitTime
+ // Because the code between here and queueBuffer() takes time to execute, make sure the
+ // presentationInterval is slightly shorter than readoutInterval.
+ nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval - kFrameAdjustThreshold;
nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
mBufferCond.waitRelative(mLock, frameWaitTime);
@@ -78,9 +80,9 @@
}
currentTime = systemTime();
}
- ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+ ALOGV("%s: readoutInterval %" PRId64 ", waited for %" PRId64
", timestamp %" PRId64, __FUNCTION__, readoutInterval,
- currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+ mPendingBuffers.size() < 2 ? frameWaitTime : 0, buffer.timestamp);
mPendingBuffers.pop();
queueBufferToClientLocked(buffer, currentTime);
return true;
@@ -122,6 +124,7 @@
}
}
+ parent->onCachedBufferQueued();
mLastCameraPresentTime = currentTime;
mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
}
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index e165768..f46de3d 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -85,7 +85,8 @@
nsecs_t mLastCameraPresentTime = 0;
static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
- static constexpr nsecs_t kMaxFrameWaitTime = 33333333LL; // 33ms
+ static constexpr nsecs_t kMaxFrameWaitTime = 10000000LL; // 10ms
+ static constexpr nsecs_t kFrameAdjustThreshold = 2000000LL; // 2ms
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 27b00c9..515259e 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -354,17 +354,8 @@
if (weight == 0) {
continue;
}
- // Top left (inclusive)
- scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+ scaleRegion(entry.data.i32 + j, zoomRatio, arrayWidth,
arrayHeight);
- // Bottom right (exclusive): Use adjacent inclusive pixel to
- // calculate.
- entry.data.i32[j+2] -= 1;
- entry.data.i32[j+3] -= 1;
- scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
- arrayHeight);
- entry.data.i32[j+2] += 1;
- entry.data.i32[j+3] += 1;
}
}
@@ -401,17 +392,8 @@
if (weight == 0) {
continue;
}
- // Top-left (inclusive)
- scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+ scaleRegion(entry.data.i32 + j, 1.0 / zoomRatio, arrayWidth,
arrayHeight);
- // Bottom-right (exclusive): Use adjacent inclusive pixel to
- // calculate.
- entry.data.i32[j+2] -= 1;
- entry.data.i32[j+3] -= 1;
- scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
- arrayHeight);
- entry.data.i32[j+2] += 1;
- entry.data.i32[j+3] += 1;
}
}
for (auto rect : kRectsToCorrect) {
@@ -470,6 +452,24 @@
}
}
+void ZoomRatioMapper::scaleRegion(int32_t* region, float scaleRatio,
+ int32_t arrayWidth, int32_t arrayHeight) {
+ // Top-left (inclusive)
+ scaleCoordinates(region, 1, scaleRatio, true /*clamp*/, arrayWidth,
+ arrayHeight);
+ // Bottom-right (exclusive): Use adjacent inclusive pixel to
+ // calculate.
+ region[2] -= 1;
+ region[3] -= 1;
+ scaleCoordinates(region + 2, 1, scaleRatio, true /*clamp*/, arrayWidth,
+ arrayHeight);
+ region[2] += 1;
+ region[3] += 1;
+ // Make sure bottom-right >= top-left
+ region[2] = std::max(region[0], region[2]);
+ region[3] = std::max(region[1], region[3]);
+}
+
void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
for (int i = 0; i < rectCount * 4; i += 4) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index b7a9e41..1aa8e78 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -69,6 +69,8 @@
public: // Visible for testing. Do not use concurently.
void scaleCoordinates(int32_t* coordPairs, int coordCount,
float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
+ void scaleRegion(int32_t* region, float scaleRatio,
+ int32_t arrayWidth, int32_t arrayHeight);
bool isValid() { return mIsValid; }
private:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 973bc04..99c067e 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -51,6 +51,7 @@
#include <aidl/android/hardware/camera/device/ICameraInjectionSession.h>
#include <aidlcommonsupport/NativeHandle.h>
+#include <android/binder_ibinder_platform.h>
#include <android/hardware/camera2/ICameraDeviceUser.h>
#include "utils/CameraTraces.h"
@@ -373,7 +374,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+ *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
for (const auto& result : results) {
@@ -414,7 +416,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+ *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -671,6 +674,12 @@
return p->returnStreamBuffers(buffers);
}
+::ndk::SpAIBinder AidlCamera3Device::AidlCameraDeviceCallbacks::createBinder() {
+ auto binder = BnCameraDeviceCallback::createBinder();
+ AIBinder_setInheritRt(binder.get(), /*inheritRt*/ true);
+ return binder;
+}
+
::ndk::ScopedAStatus AidlCamera3Device::returnStreamBuffers(
const std::vector<camera::device::StreamBuffer>& buffers) {
ReturnBufferStates states {
@@ -904,6 +913,7 @@
cam3stream->getOriginalFormat() : src->format);
dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
cam3stream->getOriginalDataSpace() : src->data_space);
+ dst.colorSpace = src->color_space;
dst.bufferSize = bufferSizes[i];
if (src->physical_camera_id != nullptr) {
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index ecf42b4..ef5bb74 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -243,6 +243,10 @@
::ndk::ScopedAStatus returnStreamBuffers(
const std::vector<
aidl::android::hardware::camera::device::StreamBuffer>& buffers) override;
+
+ protected:
+ ::ndk::SpAIBinder createBinder() override;
+
private:
wp<AidlCamera3Device> mParent = nullptr;
};
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 336719d..8ff0b07 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -30,6 +30,7 @@
#include <utils/Trace.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <android/binder_ibinder_platform.h>
#include "device3/aidl/AidlCamera3OfflineSession.h"
#include "device3/Camera3OutputStream.h"
@@ -123,7 +124,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+ *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -168,7 +170,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+ *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -218,6 +221,12 @@
return p->returnStreamBuffers(buffers);
}
+::ndk::SpAIBinder AidlCamera3OfflineSession::AidlCameraDeviceCallbacks::createBinder() {
+ auto binder = BnCameraDeviceCallback::createBinder();
+ AIBinder_setInheritRt(binder.get(), /*inheritRt*/ true);
+ return binder;
+}
+
::ndk::ScopedAStatus AidlCamera3OfflineSession::returnStreamBuffers(
const std::vector<camera::device::StreamBuffer>& buffers) {
{
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index 33de2c5..d107af6 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -97,6 +97,10 @@
::ndk::ScopedAStatus returnStreamBuffers(
const std::vector<
aidl::android::hardware::camera::device::StreamBuffer>& buffers) override;
+ protected:
+
+ ::ndk::SpAIBinder createBinder() override;
+
private:
wp<AidlCamera3OfflineSession> mParent = nullptr;
};
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 4bb426c..9557692 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -363,7 +363,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
};
//HidlCaptureOutputStates hidlStates {
@@ -425,7 +425,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -472,7 +472,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index 5c97f0e..2b4f8a1 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -105,7 +105,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -145,7 +146,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -180,7 +182,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
+ mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index 74b3700..a071989 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -73,6 +73,10 @@
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
} },
+ {34, {
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ } },
};
/**
@@ -89,4 +93,7 @@
ANDROID_SENSOR_PIXEL_MODE,
ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
} },
+ {34, {
+ ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ } },
};
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index d909624..b1bf41e 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -52,7 +52,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 4d7798c..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -49,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index ff7aafd..badd47a 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -160,11 +160,9 @@
false/*hasZoomRatioRange*/, zoomRatioRange,
usePreCorrectArray));
- size_t index = 0;
int32_t width = testActiveArraySize[2];
int32_t height = testActiveArraySize[3];
if (usePreCorrectArray) {
- index = 1;
width = testPreCorrActiveArraySize[2];
height = testPreCorrActiveArraySize[3];
}
@@ -254,6 +252,19 @@
for (size_t i = 0; i < coords.size(); i++) {
EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
}
+
+ // Verify region zoom scaling doesn't generate invalid metering region
+ // (width < 0, or height < 0)
+ std::array<float, 3> scaleRatios = {10.0f, 1.0f, 0.1f};
+ for (float scaleRatio : scaleRatios) {
+ for (size_t i = 0; i < originalCoords.size(); i+= 2) {
+ int32_t coordinates[] = {originalCoords[i], originalCoords[i+1],
+ originalCoords[i], originalCoords[i+1]};
+ mapper.scaleRegion(coordinates, scaleRatio, width, height);
+ EXPECT_LE(coordinates[0], coordinates[2]);
+ EXPECT_LE(coordinates[1], coordinates[3]);
+ }
+ }
}
TEST(ZoomRatioTest, scaleCoordinatesTest) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index ed490a8..f9afd41 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -282,6 +282,61 @@
}
}
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+ int64_t colorSpace64 = colorSpace;
+ int64_t format64 = format;
+
+ // Translate HAL format + data space to public format
+ if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+ format64 = 0x100; // JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+ format64 = 0x48454946; // HEIC
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ format64 = 0x69656963; // DEPTH_JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH_POINT_CLOUD, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH16, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH10, not applicable
+ }
+
+ camera_metadata_ro_entry_t entry =
+ staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ for (size_t i = 0; i < entry.count; i += 3) {
+ bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+ bool isDynamicProfileCompatible =
+ (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+ if (colorSpace64 == entry.data.i64[i]
+ && isFormatCompatible
+ && isDynamicProfileCompatible) {
+ return true;
+ }
+ }
+
+ ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+ " combination not found", colorSpace, format64, dynamicRangeProfile);
+ return false;
+}
+
bool isPublicFormat(int32_t format)
{
switch(format) {
@@ -336,7 +391,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace) {
// bufferProducer must be non-null
if (gbp == nullptr) {
String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -450,6 +506,16 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+ !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+ dynamicRangeProfile, physicalCameraMetadata)) {
+ String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
+ "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+ logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
@@ -483,6 +549,7 @@
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
streamInfo.mirrorMode = mirrorMode;
+ streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -538,6 +605,7 @@
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+ stream->colorSpace = streamInfo.colorSpace;
stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
stream->id = -1; // Invalid stream id
stream->physicalCameraId = std::string(physicalId.string());
@@ -635,6 +703,7 @@
String8 physicalCameraId = String8(it.getPhysicalCameraId());
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+ int32_t colorSpace = it.getColorSpace();
std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
overrideForPerfClass);
@@ -693,7 +762,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index a127c7b..264045e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -98,7 +98,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format);
@@ -109,6 +110,11 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 1efdc60..250ac63 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -55,6 +55,10 @@
return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
case ANDROID_LENS_DISTORTION:
return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
default:
ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
defaultTag);
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index 85ce110..acafe56 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -74,6 +74,9 @@
arm64: {
src: "seccomp_policy/mediaextractor-arm64.policy",
},
+ riscv64: {
+ src: "seccomp_policy/mediaextractor-riscv64.policy",
+ },
x86: {
src: "seccomp_policy/mediaextractor-x86.policy",
},
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-riscv64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-riscv64.policy
new file mode 100644
index 0000000..df143dd
--- /dev/null
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-riscv64.policy
@@ -0,0 +1,48 @@
+# Organized by frequency of systemcall - in descending order for
+# best performance.
+ioctl: 1
+futex: 1
+prctl: 1
+write: 1
+getpriority: 1
+close: 1
+dup: 1
+mmap: 1
+munmap: 1
+openat: 1
+mprotect: 1
+madvise: 1
+getuid: 1
+fstat: 1
+fstatfs: 1
+read: 1
+setpriority: 1
+sigaltstack: 1
+clone: 1
+sched_setscheduler: 1
+lseek: 1
+newfstatat: 1
+faccessat: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+rt_sigprocmask: 1
+getrlimit: 1
+nanosleep: 1
+getrandom: 1
+timer_create: 1
+timer_settime: 1
+timer_delete: 1
+
+# for dynamically loading extractors
+getdents64: 1
+readlinkat: 1
+pread64: 1
+mremap: 1
+
+# Required by Sanitizers
+sched_yield: 1
+
+@include /apex/com.android.media/etc/seccomp_policy/crash_dump.riscv64.policy
+@include /apex/com.android.media/etc/seccomp_policy/code_coverage.riscv64.policy
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 11534bb..e8d3f6e 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -169,7 +169,7 @@
"libmemunreachable",
"libprotobuf-cpp-lite",
"libstagefright_foundation",
- "libstatslog",
+ "libstats_media_metrics",
"libstatspull",
"libstatssocket",
"libutils",
@@ -177,6 +177,7 @@
],
export_shared_lib_headers: [
+ "libstats_media_metrics",
"libstatspull",
"libstatssocket",
],
@@ -200,3 +201,33 @@
"libaudioutils_headers",
],
}
+
+cc_library {
+ name: "libstats_media_metrics",
+ generated_sources: ["stats_media_metrics.cpp"],
+ generated_headers: ["stats_media_metrics.h"],
+ export_generated_headers: ["stats_media_metrics.h"],
+ shared_libs: [
+ "libcutils",
+ "libstatspull",
+ "libstatssocket",
+ ],
+}
+
+genrule {
+ name: "stats_media_metrics.h",
+ tools: ["stats-log-api-gen"],
+ cmd: "$(location stats-log-api-gen) --header $(genDir)/stats_media_metrics.h --module media_metrics --namespace android,stats,media_metrics",
+ out: [
+ "stats_media_metrics.h",
+ ],
+}
+
+genrule {
+ name: "stats_media_metrics.cpp",
+ tools: ["stats-log-api-gen"],
+ cmd: "$(location stats-log-api-gen) --cpp $(genDir)/stats_media_metrics.cpp --module media_metrics --namespace android,stats,media_metrics --importHeader stats_media_metrics.h",
+ out: [
+ "stats_media_metrics.cpp",
+ ],
+}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 12593ff..119bb6c 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -24,7 +24,7 @@
#include <aaudio/AAudio.h> // error codes
#include <audio_utils/clock.h> // clock conversions
#include <cutils/properties.h>
-#include <statslog.h> // statsd
+#include <stats_media_metrics.h> // statsd
#include <system/audio.h>
#include "AudioTypes.h" // string to int conversions
@@ -292,7 +292,7 @@
int result = 0;
#ifdef STATSD_ENABLE
- result = android::util::stats_write(args...);
+ result = stats::media_metrics::stats_write(args...);
#endif
return result;
}
@@ -308,7 +308,7 @@
std::stringstream ss;
#ifdef STATSD_ENABLE
- result = android::util::stats_write(args...);
+ result = stats::media_metrics::stats_write(args...);
ss << "result:" << result;
#endif
ss << " { ";
@@ -607,7 +607,7 @@
const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
// currently we only send create status events.
- const int32_t event = android::util::
+ const int32_t event = stats::media_metrics::
MEDIAMETRICS_AUDIO_RECORD_STATUS_REPORTED__EVENT__AUDIO_RECORD_EVENT_CREATE;
// The following fields should all be present in a create event.
@@ -647,7 +647,7 @@
__func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SAMPLERATE);
const auto [ result, str ] = sendToStatsd(AudioRecordStatusFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
, atom_status
, message.c_str()
, subCode
@@ -661,7 +661,7 @@
, sampleRate
);
ALOGV("%s: statsd %s", __func__, str.c_str());
- mStatsdLog->log(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
+ mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
return true;
}
return false;
@@ -679,7 +679,7 @@
const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
// currently we only send create status events.
- const int32_t event = android::util::
+ const int32_t event = stats::media_metrics::
MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__EVENT__AUDIO_TRACK_EVENT_CREATE;
// The following fields should all be present in a create event.
@@ -734,7 +734,7 @@
__func__,
AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
const auto [ result, str ] = sendToStatsd(AudioTrackStatusFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
, atom_status
, message.c_str()
, subCode
@@ -751,7 +751,7 @@
, (float)pitch
);
ALOGV("%s: statsd %s", __func__, str.c_str());
- mStatsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
+ mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
return true;
}
return false;
@@ -788,15 +788,9 @@
int32_t frameCount = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_FRAMECOUNT, &frameCount);
- std::string inputDevicePairs;
- mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_INPUTDEVICES, &inputDevicePairs);
int32_t intervalCount = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_INTERVALCOUNT, &intervalCount);
- std::string outputDevicePairs;
- mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevicePairs);
int32_t sampleRate = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate);
@@ -804,53 +798,16 @@
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_FLAGS, &flags);
- // We may have several devices.
- // Accumulate the bit flags for input and output devices.
- std::stringstream oss;
- long_enum_type_t outputDeviceBits{};
- { // compute outputDevices
- const auto devaddrvec = stringutils::getDeviceAddressPairs(outputDevicePairs);
- for (const auto& [device, addr] : devaddrvec) {
- if (oss.tellp() > 0) oss << "|"; // delimit devices with '|'.
- oss << device;
- outputDeviceBits += types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(device);
- }
- }
- const std::string outputDevices = oss.str();
-
- std::stringstream iss;
- long_enum_type_t inputDeviceBits{};
- { // compute inputDevices
- const auto devaddrvec = stringutils::getDeviceAddressPairs(inputDevicePairs);
- for (const auto& [device, addr] : devaddrvec) {
- if (iss.tellp() > 0) iss << "|"; // delimit devices with '|'.
- iss << device;
- inputDeviceBits += types::lookup<types::INPUT_DEVICE, long_enum_type_t>(device);
- }
- }
- const std::string inputDevices = iss.str();
-
- // Get connected device name if from bluetooth.
- bool isBluetooth = false;
-
- std::string inputDeviceNames; // not filled currently.
- std::string outputDeviceNames;
- if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
- isBluetooth = true;
- outputDeviceNames = SUPPRESSED;
-#if 0 // TODO(b/161554630) sanitize name
- mAudioAnalytics.mAnalyticsState->timeMachine().get(
- "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &outputDeviceNames);
- // Remove | if present
- stringutils::replace(outputDeviceNames, "|", '?');
- if (outputDeviceNames.size() > STATSD_DEVICE_NAME_MAX_LENGTH) {
- outputDeviceNames.resize(STATSD_DEVICE_NAME_MAX_LENGTH); // truncate
- }
-#endif
- }
-
switch (itemType) {
case RECORD: {
+ std::string inputDevicePairs;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_INPUTDEVICES, &inputDevicePairs);
+
+ const auto [ inputDeviceStatsd, inputDevices ] =
+ stringutils::parseInputDevicePairs(inputDevicePairs);
+ const std::string inputDeviceNames; // not filled currently.
+
std::string callerName;
const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
@@ -886,7 +843,7 @@
LOG(LOG_LEVEL) << "key:" << key
<< " id:" << id
- << " inputDevices:" << inputDevices << "(" << inputDeviceBits
+ << " inputDevices:" << inputDevices << "(" << inputDeviceStatsd
<< ") inputDeviceNames:" << inputDeviceNames
<< " deviceTimeNs:" << deviceTimeNs
<< " encoding:" << encoding << "(" << encodingForStats
@@ -903,8 +860,8 @@
if (clientCalled // only log if client app called AudioRecord.
&& mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
- , ENUM_EXTRACT(inputDeviceBits)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
+ , ENUM_EXTRACT(inputDeviceStatsd)
, inputDeviceNames.c_str()
, deviceTimeNs
, ENUM_EXTRACT(encodingForStats)
@@ -921,7 +878,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
}
} break;
case THREAD: {
@@ -933,18 +890,35 @@
key, AMEDIAMETRICS_PROP_UNDERRUN, &underrun);
const bool isInput = types::isInputThreadType(type);
+
+ // get device information
+ std::string devicePairs;
+ std::string deviceStatsd;
+ std::string devices;
+ std::string deviceNames;
+ if (isInput) {
+ // Note we get the "last" device which is the one associated with group.
+ item->get(AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_INPUTDEVICES,
+ &devicePairs);
+ std::tie(deviceStatsd, devices) = stringutils::parseInputDevicePairs(devicePairs);
+ } else {
+ // Note we get the "last" device which is the one associated with group.
+ item->get(AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_OUTPUTDEVICES,
+ &devicePairs);
+ std::tie(deviceStatsd, devices) = stringutils::parseOutputDevicePairs(devicePairs);
+ deviceNames = mAudioAnalytics.getDeviceNamesFromOutputDevices(devices);
+ }
+
const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
const auto flagsForStats =
(isInput ? types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags)
: types::lookup<types::OUTPUT_FLAG, short_enum_type_t>(flags));
const auto typeForStats = types::lookup<types::THREAD_TYPE, short_enum_type_t>(type);
- LOG(LOG_LEVEL) << "key:" << key
+ LOG(LOG_LEVEL) << "key:" << key
<< " id:" << id
- << " inputDevices:" << inputDevices << "(" << inputDeviceBits
- << ") outputDevices:" << outputDevices << "(" << outputDeviceBits
- << ") inputDeviceNames:" << inputDeviceNames
- << " outputDeviceNames:" << outputDeviceNames
+ << " devices:" << devices << "(" << deviceStatsd
+ << ") deviceNames:" << deviceNames
<< " deviceTimeNs:" << deviceTimeNs
<< " encoding:" << encoding << "(" << encodingForStats
<< ") frameCount:" << frameCount
@@ -956,9 +930,9 @@
<< ")";
if (mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioThreadDeviceUsageFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
- , isInput ? ENUM_EXTRACT(inputDeviceBits) : ENUM_EXTRACT(outputDeviceBits)
- , isInput ? inputDeviceNames.c_str() : outputDeviceNames.c_str()
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
+ , ENUM_EXTRACT(deviceStatsd)
+ , deviceNames.c_str()
, deviceTimeNs
, ENUM_EXTRACT(encodingForStats)
, frameCount
@@ -970,10 +944,19 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
}
} break;
case TRACK: {
+ std::string outputDevicePairs;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevicePairs);
+
+ const auto [ outputDeviceStatsd, outputDevices ] =
+ stringutils::parseOutputDevicePairs(outputDevicePairs);
+ const std::string outputDeviceNames =
+ mAudioAnalytics.getDeviceNamesFromOutputDevices(outputDevices);
+
std::string callerName;
const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
@@ -1041,7 +1024,7 @@
LOG(LOG_LEVEL) << "key:" << key
<< " id:" << id
- << " outputDevices:" << outputDevices << "(" << outputDeviceBits
+ << " outputDevices:" << outputDevices << "(" << outputDeviceStatsd
<< ") outputDeviceNames:" << outputDeviceNames
<< " deviceTimeNs:" << deviceTimeNs
<< " encoding:" << encoding << "(" << encodingForStats
@@ -1067,8 +1050,8 @@
if (clientCalled // only log if client app called AudioTracks
&& mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioTrackDeviceUsageFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
- , ENUM_EXTRACT(outputDeviceBits)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
+ , ENUM_EXTRACT(outputDeviceStatsd)
, outputDeviceNames.c_str()
, deviceTimeNs
, ENUM_EXTRACT(encodingForStats)
@@ -1091,15 +1074,10 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
}
} break;
}
-
- // Report this as needed.
- if (isBluetooth) {
- // report this for Bluetooth
- }
}
// DeviceConnection helper class.
@@ -1158,7 +1136,7 @@
const long_enum_type_t inputDeviceBits{};
const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
, ENUM_EXTRACT(inputDeviceBits)
, ENUM_EXTRACT(outputDeviceBits)
, mA2dpDeviceName.c_str()
@@ -1168,7 +1146,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
}
}
}
@@ -1212,7 +1190,7 @@
<< " deviceName:" << mA2dpDeviceName;
if (mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
, ENUM_EXTRACT(inputDeviceBits)
, ENUM_EXTRACT(outputDeviceBits)
, mA2dpDeviceName.c_str()
@@ -1222,7 +1200,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
}
return;
}
@@ -1239,7 +1217,7 @@
<< " deviceName:" << mA2dpDeviceName;
if (mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
, ENUM_EXTRACT(inputDeviceBits)
, ENUM_EXTRACT(outputDeviceBits)
, mA2dpDeviceName.c_str()
@@ -1249,7 +1227,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
}
}
@@ -1377,10 +1355,10 @@
<< "(" << sharingModeRequestedStr << ")";
if (mAudioAnalytics.mDeliverStatistics) {
- android::util::BytesField bf_serialized(
+ const stats::media_metrics::BytesField bf_serialized(
serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
const auto result = sendToStatsd(
- CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
, path
, direction
, framesPerBurst
@@ -1403,7 +1381,7 @@
std::stringstream ss;
ss << "result:" << result;
const auto fieldsStr = printFields(AAudioStreamFields,
- CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
, path
, direction
, framesPerBurst
@@ -1426,7 +1404,7 @@
ss << " " << fieldsStr;
std::string str = ss.str();
ALOGV("%s: statsd %s", __func__, str.c_str());
- mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
+ mAudioAnalytics.mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
}
}
@@ -1566,12 +1544,12 @@
// Classifies the setting event for statsd (use generated statsd enums.proto constants).
static int32_t classifySettingEvent(bool isSetAlready, bool withinBoot) {
if (isSetAlready) {
- return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
+ return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
}
if (withinBoot) {
- return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
+ return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
}
- return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
+ return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
}
void AudioAnalytics::Spatializer::onEvent(
@@ -1620,7 +1598,7 @@
types::channelMaskVectorFromString(channelMasks);
const auto [ result, str ] = sendToStatsd(SpatializerCapabilitiesFields,
- CONDITION(android::util::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
, headTrackingModesVector
, levelsVector
, modesVector
@@ -1628,7 +1606,7 @@
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
std::lock_guard lg(mLock);
if (mFirstCreateTimeNs == 0) {
@@ -1677,13 +1655,13 @@
deviceState.enabled = enabled;
const bool enabledStatsd = enabled == "true";
const auto [ result, str ] = sendToStatsd(SpatializerDeviceEnabledFields,
- CONDITION(android::util::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
, deviceTypeStatsd
, settingEventStatsd
, enabledStatsd
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
}
}
if (!hasHeadTracker.empty()) {
@@ -1693,13 +1671,13 @@
deviceState.hasHeadTracker = hasHeadTracker;
const bool supportedStatsd = hasHeadTracker == "true";
const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceSupportedFields,
- CONDITION(android::util::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
, deviceTypeStatsd
, settingEventStatsd
, supportedStatsd
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
}
}
if (!headTrackerEnabled.empty()) {
@@ -1709,13 +1687,13 @@
deviceState.headTrackerEnabled = headTrackerEnabled;
const bool enabledStatsd = headTrackerEnabled == "true";
const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceEnabledFields,
- CONDITION(android::util::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
, deviceTypeStatsd
, settingEventStatsd
, enabledStatsd
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
}
}
mSimpleLog.log("%s deviceKey: %s item: %s",
@@ -1732,4 +1710,22 @@
return { s, n };
}
+// This method currently suppresses the name.
+std::string AudioAnalytics::getDeviceNamesFromOutputDevices(std::string_view devices) const {
+ std::string deviceNames;
+ if (stringutils::hasBluetoothOutputDevice(devices)) {
+ deviceNames = SUPPRESSED;
+#if 0 // TODO(b/161554630) sanitize name
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &deviceNames);
+ // Remove | if present
+ stringutils::replace(deviceNames, "|", '?');
+ if (deviceNames.size() > STATSD_DEVICE_NAME_MAX_LENGTH) {
+ deviceNames.resize(STATSD_DEVICE_NAME_MAX_LENGTH); // truncate
+ }
+#endif
+ }
+ return deviceNames;
+}
+
} // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 5787e9e..630a436 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -26,7 +26,7 @@
#include <string>
#include <audio_utils/clock.h>
#include <cutils/properties.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <sys/timerfd.h>
#include <system/audio.h>
@@ -164,7 +164,7 @@
const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
const int32_t min_volume_duration_secs = (int32_t)(min_volume_duration_ns / NANOS_PER_SECOND);
const int32_t max_volume_duration_secs = (int32_t)(max_volume_duration_ns / NANOS_PER_SECOND);
- const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+ const int result = stats::media_metrics::stats_write(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED,
audio_device,
duration_secs,
(float)volume,
@@ -177,7 +177,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audio_power_usage_data_reported:"
- << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+ << stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED
<< " audio_device:" << audio_device
<< " duration_secs:" << duration_secs
<< " average_volume:" << (float)volume
@@ -187,7 +187,7 @@
<< " max_volume_duration_secs:" << max_volume_duration_secs
<< " max_volume:" << (float)max_volume
<< " }";
- mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
+ mStatsdLog->log(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
}
void AudioPowerUsage::updateMinMaxVolumeAndDuration(
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index d2b4aab..353ae12 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -18,7 +18,7 @@
#include "MediaMetricsConstants.h"
#include "StringUtils.h"
#include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
-#include <statslog.h> // statsd
+#include <stats_media_metrics.h> // statsd
namespace android::mediametrics::types {
@@ -184,41 +184,41 @@
const std::unordered_map<std::string, int32_t>& getAudioDeviceInfoTypeMap() {
// DO NOT MODIFY VALUES (OK to add new ones).
static std::unordered_map<std::string, int32_t> map{
- {"unknown", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
- {"earpiece", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
- {"speaker", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
- {"headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
- {"headphone", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
- {"bt_sco", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
- {"bt_sco_hs", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
- {"bt_sco_carkit", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
- {"bt_a2dp", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
- {"bt_a2dp_hp", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
- {"bt_a2dp_spk", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
- {"aux_digital", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
- {"hdmi", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
- {"analog_dock", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
- {"digital_dock", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
- {"usb_accessory", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
- {"usb_device", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
- {"usb_headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
- {"remote_submix", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
- {"telephony_tx", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
- {"line", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
- {"hdmi_arc", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
- {"hdmi_earc", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
- {"spdif", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
- {"fm_transmitter", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
- {"aux_line", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
- {"speaker_safe", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
- {"ip", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
- {"bus", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
- {"proxy", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
- {"hearing_aid_out", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
- {"echo_canceller", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
- {"ble_headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
- {"ble_speaker", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
- {"ble_broadcast", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
+ {"unknown", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
+ {"earpiece", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
+ {"speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
+ {"headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
+ {"headphone", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
+ {"bt_sco", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+ {"bt_sco_hs", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+ {"bt_sco_carkit", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+ {"bt_a2dp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+ {"bt_a2dp_hp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+ {"bt_a2dp_spk", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+ {"aux_digital", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+ {"hdmi", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+ {"analog_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+ {"digital_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+ {"usb_accessory", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
+ {"usb_device", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
+ {"usb_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
+ {"remote_submix", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
+ {"telephony_tx", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
+ {"line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
+ {"hdmi_arc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
+ {"hdmi_earc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
+ {"spdif", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
+ {"fm_transmitter", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
+ {"aux_line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
+ {"speaker_safe", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
+ {"ip", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
+ {"bus", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
+ {"proxy", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
+ {"hearing_aid_out", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
+ {"echo_canceller", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
+ {"ble_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
+ {"ble_speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
+ {"ble_broadcast", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
};
return map;
}
@@ -324,23 +324,23 @@
// DO NOT MODIFY VALUES(OK to add new ones).
static std::unordered_map<std::string, int32_t> map {
{"",
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
{AMEDIAMETRICS_PROP_STATUS_VALUE_OK,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
{AMEDIAMETRICS_PROP_STATUS_VALUE_ARGUMENT,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
{AMEDIAMETRICS_PROP_STATUS_VALUE_IO,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
{AMEDIAMETRICS_PROP_STATUS_VALUE_MEMORY,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
{AMEDIAMETRICS_PROP_STATUS_VALUE_SECURITY,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
{AMEDIAMETRICS_PROP_STATUS_VALUE_STATE,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
{AMEDIAMETRICS_PROP_STATUS_VALUE_TIMEOUT,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
{AMEDIAMETRICS_PROP_STATUS_VALUE_UNKNOWN,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
};
return map;
}
@@ -664,7 +664,7 @@
auto& map = getStatusMap();
auto it = map.find(status);
if (it == map.end()) {
- return util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
+ return stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
}
return it->second;
}
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index ceb3e6a..b4de4f4 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -33,7 +33,7 @@
#include <mediautils/MemoryLeakTrackUtil.h>
#include <memunreachable/memunreachable.h>
#include <private/android_filesystem_config.h> // UID
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <set>
@@ -546,7 +546,7 @@
if (mStatsdRegistered.test_and_set()) {
return;
}
- auto tag = android::util::MEDIA_DRM_ACTIVITY_INFO;
+ auto tag = stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO;
auto cb = MediaMetricsService::pullAtomCallback;
AStatsManager_setPullAtomCallback(tag, /* metadata */ nullptr, cb, this);
}
@@ -564,7 +564,7 @@
std::string MediaMetricsService::atomTagToKey(int32_t atomTag)
{
switch (atomTag) {
- case android::util::MEDIA_DRM_ACTIVITY_INFO:
+ case stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO:
return "mediadrm";
}
return {};
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index 50525bc..d1c7a18 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -20,6 +20,8 @@
#include "StringUtils.h"
+#include "AudioTypes.h"
+
namespace android::mediametrics::stringutils {
std::string tokenizer(std::string::const_iterator& it,
@@ -99,4 +101,30 @@
return replaced;
}
+template <types::AudioEnumCategory CATEGORY>
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseDevicePairs(const std::string& devicePairs) {
+ std::pair<std::string, std::string> result{};
+ const auto devaddrvec = stringutils::getDeviceAddressPairs(devicePairs);
+ for (const auto& [device, addr] : devaddrvec) { // addr ignored for now.
+ if (!result.second.empty()) {
+ result.second.append("|"); // delimit devices with '|'.
+ result.first.append("|");
+ }
+ result.second.append(device);
+ result.first.append(types::lookup<CATEGORY, std::string>(device));
+ }
+ return result;
+}
+
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseOutputDevicePairs(const std::string& devicePairs) {
+ return parseDevicePairs<types::OUTPUT_DEVICE>(devicePairs);
+}
+
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseInputDevicePairs(const std::string& devicePairs) {
+ return parseDevicePairs<types::INPUT_DEVICE>(devicePairs);
+}
+
} // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 84d494e..8b33f10 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -51,7 +51,7 @@
"libprotobuf-cpp-lite",
"libstagefright",
"libstagefright_foundation",
- "libstatslog",
+ "libstats_media_metrics",
"libstatspull",
"libstatssocket",
"libutils",
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 776f878..f64b3ec 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -37,8 +37,6 @@
#include "MediaMetricsService.h"
#include "iface_statsd.h"
-#include <statslog.h>
-
namespace android {
// set of routines that crack a mediametrics::Item
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index 7000ba8..82e928e 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -161,6 +161,13 @@
*/
std::string getThreadFromTrack(const std::string& track) const;
+ /**
+ * return the device name, if present.
+ *
+ * This is currently enabled only for Bluetooth output devices.
+ */
+ std::string getDeviceNamesFromOutputDevices(std::string_view devices) const;
+
const bool mDeliverStatistics;
// Actions is individually locked
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index a91d37b..78c25ff 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -217,4 +217,14 @@
return { key, "" };
}
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseOutputDevicePairs(const std::string& outputDevicePairs);
+
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseInputDevicePairs(const std::string& inputDevicePairs);
+
+inline bool hasBluetoothOutputDevice(std::string_view devices) {
+ return devices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos;
+}
+
} // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 3d9376e..9a9bc1d 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -107,15 +107,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiopolicy_reported:"
- << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -131,7 +132,7 @@
<< " active_session:" << active_session
<< " active_device:" << active_device
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 01adf7f..63c61ec 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -147,8 +147,9 @@
(void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized,
@@ -156,7 +157,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiorecord_reported:"
- << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -181,7 +182,7 @@
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index e9b6dd6..3056605 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -188,15 +188,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiothread_reported:"
- << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -231,7 +232,7 @@
<< " latency_mean_millis:" << latency_mean_millis
<< " latency_stddev_millis:" << latency_stddev_millis
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 67514e9..1fc7fb4 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -134,8 +134,9 @@
(void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized,
@@ -143,7 +144,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiotrack_reported:"
- << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -164,7 +165,7 @@
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index a737ba0..c5957e9 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <stats_event.h>
#include "cleaner.h"
@@ -46,7 +46,7 @@
if (item == nullptr) return false;
AStatsEvent* event = AStatsEvent_obtain();
- AStatsEvent_setAtomId(event, android::util::MEDIA_CODEC_REPORTED);
+ AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
AStatsEvent_writeInt64(event, timestamp_nanos);
@@ -455,8 +455,8 @@
ALOGE("Failed to serialize codec metrics");
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
@@ -464,7 +464,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_codec_reported:"
- << android::util::MEDIAMETRICS_CODEC_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -525,7 +525,7 @@
<< " original_qp_b_min:" << qpBMinOri
<< " original_qp_b_max:" << qpBMaxOri
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED, log.str());
return true;
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index e06a605..8769efb 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -35,7 +35,7 @@
#include "StringUtils.h"
#include "iface_statsd.h"
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <array>
#include <string>
@@ -69,8 +69,9 @@
// This field is left here for backward compatibility.
// This field is not used anymore.
const std::string kUnusedField("");
- android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
vendor.c_str(),
@@ -80,7 +81,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_mediadrm_reported:"
- << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -90,7 +91,7 @@
<< " description:" << description
// omitting serialized
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
return true;
}
@@ -122,7 +123,8 @@
item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
}
- const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
plugin_id.c_str(), description.c_str(),
@@ -136,7 +138,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_drmmanager_reported:"
- << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -151,7 +153,7 @@
log << " method_" << i << ":" << methodCounts[i];
}
log << " }";
- statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
return true;
}
@@ -207,7 +209,7 @@
// Memory for |event| is internally managed by statsd.
AStatsEvent* event = AStatsEventList_addStatsEvent(out);
- AStatsEvent_setAtomId(event, android::util::MEDIA_DRM_ACTIVITY_INFO);
+ AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO);
AStatsEvent_writeString(event, item->getPkgName().c_str());
AStatsEvent_writeInt64(event, item->getPkgVersionCode());
AStatsEvent_writeString(event, vendor.c_str());
@@ -219,7 +221,7 @@
std::stringstream log;
log << "pulled:" << " {"
<< " media_drm_activity_info:"
- << android::util::MEDIA_DRM_ACTIVITY_INFO
+ << stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO
<< " package_name:" << item->getPkgName()
<< " package_version_code:" << item->getPkgVersionCode()
<< " vendor:" << vendor
@@ -227,7 +229,7 @@
<< " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
<< " vendor_metrics:" << mediametrics::stringutils::bytesToString(plugin_raw, 8)
<< " }";
- statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
+ statsdLog->log(stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index a8bfeaa..9345df6 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -96,15 +96,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_extractor_reported:"
- << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -116,7 +117,7 @@
<< " entry_point:" << entry_point_string << "(" << entry_point << ")"
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 67ca874b..458bd32 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -28,7 +28,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -83,7 +83,8 @@
item->getString("android.media.mediaparser.logSessionId", &logSessionId);
logSessionId = mediametrics::ValidateId::get()->validateId(logSessionId);
- int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED,
timestamp_nanos,
package_name.c_str(),
package_version_code,
@@ -103,7 +104,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_mediaparser_reported:"
- << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -120,7 +121,7 @@
<< " video_height:" << videoHeight
<< " log_session_id:" << logSessionId
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index bdee1f2..fd545f4 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -153,8 +153,9 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
@@ -162,7 +163,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_nuplayer_reported:"
- << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -193,7 +194,7 @@
// TODO NuPlayer - add log_session_id
// << " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 5f54a68..efa284b 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -179,15 +179,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_recorder_reported:"
- << android::util::MEDIAMETRICS_RECORDER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -218,7 +219,7 @@
<< " iframe_interval:" << iframe_interval
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED, log.str());
return true;
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 4d18876..adf0a5e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -26,8 +26,9 @@
#include <cutils/sched_policy.h>
#include <dirent.h>
#include <media/MediaResourcePolicy.h>
-#include <media/stagefright/ProcessInfo.h>
+#include <media/stagefright/foundation/ABase.h>
#include <mediautils/BatteryNotifier.h>
+#include <mediautils/ProcessInfo.h>
#include <mediautils/SchedulingPolicyService.h>
#include <string.h>
#include <sys/types.h>
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 81c85e5..08ac90e 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -40,6 +40,7 @@
"libbinder",
"libbinder_ndk",
"libmedia",
+ "libmediautils",
"libutils",
],
fuzz_config: {
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index e4aaea0..7003dcb 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -22,8 +22,8 @@
#include <aidl/android/media/BnResourceManagerClient.h>
#include <media/MediaResource.h>
#include <media/MediaResourcePolicy.h>
-#include <media/stagefright/ProcessInfoInterface.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <mediautils/ProcessInfoInterface.h>
#include "ResourceManagerService.h"
#include "fuzzer/FuzzedDataProvider.h"
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 618626f..0366d9b 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -18,6 +18,7 @@
"libbinder_ndk",
"liblog",
"libmedia",
+ "libmediautils",
"libutils",
],
include_dirs: [
@@ -63,6 +64,7 @@
"libbinder_ndk",
"liblog",
"libmedia",
+ "libmediautils",
"libutils",
],
include_dirs: [
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 5bf44ce..7bd9484 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -21,7 +21,7 @@
#include <media/MediaResource.h>
#include <media/MediaResourcePolicy.h>
#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
+#include <mediautils/ProcessInfoInterface.h>
namespace android {
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index 9c4f9a5..0649061 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -15,7 +15,7 @@
imports: [
"android.hardware.common-V2",
"android.hardware.common.fmq-V1",
- "android.hardware.tv.tuner-V1",
+ "android.hardware.tv.tuner-V2",
],
backend: {
java: {
@@ -41,7 +41,7 @@
shared_libs: [
"android.hardware.tv.tuner@1.0",
"android.hardware.tv.tuner@1.1",
- "android.hardware.tv.tuner-V1-ndk",
+ "android.hardware.tv.tuner-V2-ndk",
"libbase",
"libbinder",
"libbinder_ndk",
@@ -84,7 +84,7 @@
shared_libs: [
"android.hardware.tv.tuner@1.0",
"android.hardware.tv.tuner@1.1",
- "android.hardware.tv.tuner-V1-ndk",
+ "android.hardware.tv.tuner-V2-ndk",
"libbase",
"libbinder",
"libfmq",
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index 9a35db8..a0abc92 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -94,6 +94,21 @@
return mDvr->close();
}
+::ndk::ScopedAStatus TunerDvr::setStatusCheckIntervalHint(const int64_t milliseconds) {
+ if (milliseconds < 0L) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::INVALID_ARGUMENT));
+ }
+
+ ::ndk::ScopedAStatus s = mDvr->setStatusCheckIntervalHint(milliseconds);
+ if (s.getStatus() == STATUS_UNKNOWN_TRANSACTION) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
+ }
+
+ return s;
+}
+
/////////////// IDvrCallback ///////////////////////
::ndk::ScopedAStatus TunerDvr::DvrCallback::onRecordStatus(const RecordStatus status) {
if (mTunerDvrCallback != nullptr) {
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 1854d08..2330e7b 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -61,6 +61,7 @@
::ndk::ScopedAStatus stop() override;
::ndk::ScopedAStatus flush() override;
::ndk::ScopedAStatus close() override;
+ ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
struct DvrCallback : public BnDvrCallback {
DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
index 2c01c4e..cafe075 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
@@ -66,4 +66,9 @@
* close the DVR instance to release resource for DVR.
*/
void close();
+
+ /**
+ * Set status check time interval.
+ */
+ void setStatusCheckIntervalHint(in long milliseconds);
}
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
index 50d92de..8083a6e 100644
--- a/services/tuner/hidl/TunerHidlDvr.cpp
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -148,6 +148,11 @@
return ::ndk::ScopedAStatus::ok();
}
+::ndk::ScopedAStatus TunerHidlDvr::setStatusCheckIntervalHint(int64_t /* in_milliseconds */) {
+ HidlResult res = HidlResult::UNAVAILABLE;
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
HidlDvrSettings TunerHidlDvr::getHidlDvrSettings(const DvrSettings& settings) {
HidlDvrSettings s;
switch (mType) {
diff --git a/services/tuner/hidl/TunerHidlDvr.h b/services/tuner/hidl/TunerHidlDvr.h
index a280ff7..aa86b14 100644
--- a/services/tuner/hidl/TunerHidlDvr.h
+++ b/services/tuner/hidl/TunerHidlDvr.h
@@ -63,6 +63,7 @@
::ndk::ScopedAStatus stop() override;
::ndk::ScopedAStatus flush() override;
::ndk::ScopedAStatus close() override;
+ ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
struct DvrCallback : public HidlIDvrCallback {
DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/tools/OWNERS b/tools/OWNERS
new file mode 100644
index 0000000..7598c6f
--- /dev/null
+++ b/tools/OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1344
+essick@google.com
+
+# reliability builds mainline trains, so needs to manage these scripts
+include platform/frameworks/av/:/media/janitors/reliability_mainline_OWNERS
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index bd82315..cd3e579 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index cb5fc44..1cc3b2b 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -17,7 +17,7 @@
# tunables
DEV_BRANCH=master
-MAINLINE_BRANCH=sc-mainline-prod
+MAINLINE_BRANCH=tm-mainline-prod
###
RED=$(tput setaf 1)