Merge "Camera: Retrieve maximum duration BLOB tags" into udc-dev
diff --git a/camera/Android.bp b/camera/Android.bp
index f27eb31..b3f70f4 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -142,14 +142,15 @@
filegroup {
name: "libcamera_client_aidl",
srcs: [
+ "aidl/android/hardware/CameraExtensionSessionStats.aidl",
"aidl/android/hardware/ICameraService.aidl",
"aidl/android/hardware/ICameraServiceListener.aidl",
"aidl/android/hardware/ICameraServiceProxy.aidl",
"aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
"aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
- "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
"aidl/android/hardware/camera2/ICameraInjectionCallback.aidl",
"aidl/android/hardware/camera2/ICameraInjectionSession.aidl",
+ "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
],
path: "aidl",
}
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 3954db5..9e9793d 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -279,7 +279,8 @@
mResultErrorCount(0),
mDeviceError(false),
mVideoStabilizationMode(-1),
- mSessionIndex(0) {}
+ mSessionIndex(0),
+ mCameraExtensionSessionStats() {}
CameraSessionStats::CameraSessionStats(const String16& cameraId,
int facing, int newCameraState, const String16& clientName,
@@ -299,7 +300,8 @@
mResultErrorCount(0),
mDeviceError(0),
mVideoStabilizationMode(-1),
- mSessionIndex(0) {}
+ mSessionIndex(0),
+ mCameraExtensionSessionStats() {}
status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
if (parcel == NULL) {
@@ -417,6 +419,12 @@
return err;
}
+ CameraExtensionSessionStats extStats{};
+ if ((err = extStats.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read extension session stats from parcel", __FUNCTION__);
+ return err;
+ }
+
mCameraId = id;
mFacing = facing;
mNewCameraState = newCameraState;
@@ -435,6 +443,7 @@
mUserTag = userTag;
mVideoStabilizationMode = videoStabilizationMode;
mSessionIndex = sessionIdx;
+ mCameraExtensionSessionStats = extStats;
return OK;
}
@@ -537,6 +546,11 @@
return err;
}
+ if ((err = mCameraExtensionSessionStats.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write extension sessions stats!", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
new file mode 100644
index 0000000..1c81831
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Metrics specific to Extension Sessions (see CameraExtensionSession) for logging.
+ *
+ * Each Extension Session is mapped to one camera session internally, and will be sent to
+ * CameraServiceProxy with IDLE/CLOSE calls.
+ * @hide
+ */
+parcelable CameraExtensionSessionStats {
+ /**
+ * Value should match {@code CameraExtensionCharacteristics#EXTENSION_*}
+ */
+ @Backing(type="int")
+ enum Type {
+ EXTENSION_NONE = -1,
+ EXTENSION_AUTOMATIC = 0,
+ EXTENSION_FACE_RETOUCH = 1,
+ EXTENSION_BOKEH = 2,
+ EXTENSION_HDR = 3,
+ EXTENSION_NIGHT = 4
+ }
+
+ /**
+ * Key to uniquely identify the session this stat is associated with. The first call to
+ * 'ICameraService.reportExtensionSessionStats' should set this to an empty string.
+ * 'ICameraService.reportExtensionSessionStats' will return the key which should be used with
+ * the next calls.
+ */
+ String key;
+
+ /**
+ * Camera ID for which the stats is being reported.
+ */
+ String cameraId;
+
+ /**
+ * Package name of the client using the camera
+ */
+ String clientName;
+
+
+ /**
+ * Type of extension session requested by the app. Note that EXTENSION_AUTOMATIC is reported
+ * as such.
+ */
+ Type type = Type.EXTENSION_NONE;
+
+ /**
+ * true if advanced extensions are being used, false otherwise
+ */
+ boolean isAdvanced = false;
+}
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 9f32595..f8e1631 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -30,6 +30,7 @@
import android.hardware.ICameraServiceListener;
import android.hardware.CameraInfo;
import android.hardware.CameraStatus;
+import android.hardware.CameraExtensionSessionStats;
/**
* Binder interface for the native camera service running in mediaserver.
@@ -214,6 +215,26 @@
*/
oneway void notifyDeviceStateChange(long newState);
+ /**
+ * Report Extension specific metrics to camera service for logging. This should only be called
+ * by CameraExtensionSession to log extension metrics. All calls after the first must set
+ * CameraExtensionSessionStats.key to the value returned by this function.
+ *
+ * Each subsequent call fully overwrites the existing CameraExtensionSessionStats for the
+ * current session, so the caller is responsible for keeping the stats complete.
+ *
+ * Due to cameraservice and cameraservice_proxy architecture, there is no guarantee that
+ * {@code stats} will be logged immediately (or at all). CameraService will log whatever
+ * extension stats it has at the time of camera session closing which may be before the app
+ * process receives a session/device closed callback; so CameraExtensionSession
+ * should send metrics to the cameraservice preriodically, and cameraservice must handle calls
+ * to this function from sessions that have not been logged yet and from sessions that have
+ * already been closed.
+ *
+ * @return the key that must be used to report updates to previously reported stats.
+ */
+ String reportExtensionSessionStats(in CameraExtensionSessionStats stats);
+
// Bitfield constants for notifyDeviceStateChange
// All bits >= 32 are for custom vendor states
// Written as ints since AIDL does not support long constants.
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index be8a00f..4faa6b4 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -17,6 +17,7 @@
package android.hardware;
import android.hardware.CameraSessionStats;
+import android.hardware.CameraExtensionSessionStats;
/**
* Binder interface for the camera service proxy running in system_server.
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 895449c..071bc73 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -20,6 +20,7 @@
#include <binder/Parcelable.h>
#include <camera/CameraMetadata.h>
+#include <android/hardware/CameraExtensionSessionStats.h>
namespace android {
namespace hardware {
@@ -160,6 +161,8 @@
int mVideoStabilizationMode;
int mSessionIndex;
+ CameraExtensionSessionStats mCameraExtensionSessionStats;
+
// Constructors
CameraSessionStats();
CameraSessionStats(const String16& cameraId, int facing, int newCameraState,
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index ab25c65..d064123 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -31,7 +31,33 @@
],
}
-cc_binary {
+prebuilt_etc {
+ name: "drmserver.zygote64_32.rc",
+ src: "drmserver.zygote64_32.rc",
+ sub_dir: "init/hw",
+}
+
+prebuilt_etc {
+ name: "drmserver.zygote64.rc",
+ src: "drmserver.zygote64.rc",
+ sub_dir: "init/hw",
+}
+
+soong_config_module_type {
+ name: "drmserver_cc_binary",
+ module_type: "cc_binary",
+ config_namespace: "ANDROID",
+ bool_variables: ["TARGET_DYNAMIC_64_32_DRMSERVER"],
+ properties: [
+ "compile_multilib",
+ "init_rc",
+ "multilib.lib32.suffix",
+ "multilib.lib64.suffix",
+ "required",
+ ],
+}
+
+drmserver_cc_binary {
name: "drmserver",
srcs: [
@@ -61,7 +87,27 @@
compile_multilib: "prefer32",
- init_rc: ["drmserver.rc"],
+ soong_config_variables: {
+ TARGET_DYNAMIC_64_32_DRMSERVER: {
+ compile_multilib: "both",
+ multilib: {
+ lib32: {
+ suffix: "32",
+ },
+ lib64: {
+ suffix: "64",
+ },
+ },
+ required: [
+ "drmserver.zygote64_32.rc",
+ "drmserver.zygote64.rc",
+ ],
+ init_rc: ["drmserver_dynamic.rc"],
+ conditions_default: {
+ init_rc: ["drmserver.rc"],
+ },
+ },
+ },
}
cc_fuzz {
diff --git a/drm/drmserver/drmserver.zygote64.rc b/drm/drmserver/drmserver.zygote64.rc
new file mode 100644
index 0000000..60cd906
--- /dev/null
+++ b/drm/drmserver/drmserver.zygote64.rc
@@ -0,0 +1,6 @@
+service drm /system/bin/drmserver64
+ disabled
+ class main
+ user drm
+ group drm system inet drmrpc readproc
+ task_profiles ProcessCapacityHigh
diff --git a/drm/drmserver/drmserver.zygote64_32.rc b/drm/drmserver/drmserver.zygote64_32.rc
new file mode 100644
index 0000000..c881acf
--- /dev/null
+++ b/drm/drmserver/drmserver.zygote64_32.rc
@@ -0,0 +1,6 @@
+service drm /system/bin/drmserver32
+ disabled
+ class main
+ user drm
+ group drm system inet drmrpc readproc
+ task_profiles ProcessCapacityHigh
diff --git a/drm/drmserver/drmserver_dynamic.rc b/drm/drmserver/drmserver_dynamic.rc
new file mode 100644
index 0000000..bfaada1
--- /dev/null
+++ b/drm/drmserver/drmserver_dynamic.rc
@@ -0,0 +1,7 @@
+import /system/etc/init/hw/drmserver.${ro.zygote}.rc
+
+on property:drm.service.enabled=true
+ start drm
+
+on property:drm.service.enabled=1
+ start drm
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 5b1bd91..cd5d354 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,53 +44,5 @@
],
"file_patterns": ["(?i)drm|crypto"]
}
- ],
-
- "platinum-postsubmit": [
- // runs regularly, independent of changes in this tree.
- // signals if changes elsewhere break media functionality
- // @FlakyTest: in staged-postsubmit, but not postsubmit
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.EncodeDecodeTest"
- }
- ]
- },
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
- },
- {
- "exclude-annotation": "androidx.test.filters.FlakyTest"
- }
- ]
- }
- ],
-
- "staged-platinum-postsubmit": [
- // runs every four hours
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.EncodeDecodeTest"
- }
- ]
- },
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
- }
- ]
- }
]
-
- // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
- // platinum-postsubmit once issues in cuttlefish are fixed
}
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
index ec1f75c..bc9d4d5 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -288,6 +288,11 @@
ConversionResult<media::audio::common::AudioOutputFlags>
legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+ConversionResult<audio_stream_type_t>
+aidl2legacy_AudioStreamType_audio_stream_type_t(media::audio::common::AudioStreamType aidl);
+ConversionResult<media::audio::common::AudioStreamType>
+legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
+
// This type is unnamed in the original definition, thus we name it here.
using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
ConversionResult<audio_port_config_mix_ext_usecase>
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index e8b0506..e08bf43 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -636,11 +636,11 @@
return;
}
- std::shared_ptr<const C2GraphicView> rView;
+ std::shared_ptr<C2GraphicView> rView;
std::shared_ptr<C2Buffer> inputBuffer;
if (!work->input.buffers.empty()) {
inputBuffer = work->input.buffers[0];
- rView = std::make_shared<const C2GraphicView>(
+ rView = std::make_shared<C2GraphicView>(
inputBuffer->data().graphicBlocks().front().map().get());
if (rView->error() != C2_OK) {
ALOGE("graphic view map err = %d", rView->error());
@@ -678,6 +678,10 @@
return;
}
+ //(b/279387842)
+ //workaround for incorrect crop size in view when using surface mode
+ rView->setCrop_be(C2Rect(mSize->width, mSize->height));
+
if (!mHeadersReceived) {
Av1Config av1_config;
constexpr uint32_t header_length = 2048;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 77296a4..3e4247b 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -29,6 +29,14 @@
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
namespace android {
// codecname set and passed in as a compile flag from Android.bp
@@ -726,6 +734,24 @@
}
}
+void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
+ mSignalledError = true;
+ work->result = error;
+ work->workletsProcessed = 1u;
+}
+
+bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
+ if (size > mTmpFrameBufferSize) {
+ mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+ if (mTmpFrameBuffer == nullptr) {
+ mTmpFrameBufferSize = 0;
+ return false;
+ }
+ mTmpFrameBufferSize = size;
+ }
+ return true;
+}
+
bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work) {
if (!(work && pool)) return false;
@@ -772,6 +798,7 @@
getHDRStaticParams(buffer, work);
getHDR10PlusInfoData(buffer, work);
+#if LIBYUV_VERSION < 1779
if (buffer->bitdepth == 10 &&
!(buffer->image_format == libgav1::kImageFormatYuv420 ||
buffer->image_format == libgav1::kImageFormatMonochrome400)) {
@@ -781,6 +808,7 @@
work->result = C2_CORRUPTED;
return false;
}
+#endif
const bool isMonochrome =
buffer->image_format == libgav1::kImageFormatMonochrome400;
@@ -798,6 +826,7 @@
allowRGBA1010102 = true;
}
format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
(buffer->image_format != libgav1::kImageFormatYuv420)) {
ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
@@ -806,6 +835,7 @@
work->workletsProcessed = 1u;
return false;
}
+#endif
}
if (mHalPixelFormat != format) {
@@ -854,9 +884,6 @@
uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
- size_t srcYStride = buffer->stride[0];
- size_t srcUStride = buffer->stride[1];
- size_t srcVStride = buffer->stride[2];
C2PlanarLayout layout = wView.layout();
size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
@@ -867,26 +894,130 @@
const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+ size_t srcYStride = buffer->stride[0] / 2;
+ size_t srcUStride = buffer->stride[1] / 2;
+ size_t srcVStride = buffer->stride[2] / 2;
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410OrRGBA1010102(
- (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- dstYStride / sizeof(uint32_t), mWidth, mHeight,
- std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+ bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+ if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, &libyuv::kYuvV2020Constants,
+ mWidth, mHeight);
+ processed = true;
+ } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+ libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, &libyuv::kYuvV2020Constants,
+ mWidth, mHeight);
+ processed = true;
+ }
+#endif // HAVE_LIBYUV_I410_I210_TO_AB30
+ if (!processed) {
+ if (isMonochrome) {
+ const size_t tmpSize = mWidth;
+ const bool needFill = tmpSize > mTmpFrameBufferSize;
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ srcU = srcV = mTmpFrameBuffer.get();
+ srcUStride = srcVStride = 0;
+ if (needFill) {
+ std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+ }
+ }
+ convertYUV420Planar16ToY410OrRGBA1010102(
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride,
+ dstYStride / sizeof(uint32_t), mWidth, mHeight,
+ std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+ }
} else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ dstYStride /= 2;
+ dstUStride /= 2;
+ dstVStride /= 2;
+#if LIBYUV_VERSION >= 1779
+ if (buffer->image_format == libgav1::kImageFormatYuv444 ||
+ buffer->image_format == libgav1::kImageFormatYuv422) {
+ // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
+ // libyuv::I210ToP010 when they are available.
+ // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
+ // guarantees.
+ const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ uint16_t *const tmpY = mTmpFrameBuffer.get();
+ uint16_t *const tmpU = tmpY + dstYStride * mHeight;
+ uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+ if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+ mWidth, mHeight);
+ } else {
+ libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+ mWidth, mHeight);
+ }
+ libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+ (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
+ mWidth, mHeight);
+ } else {
+ convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ dstUStride, mWidth, mHeight, isMonochrome);
+ }
+#else // LIBYUV_VERSION < 1779
convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
- dstUStride / 2, mWidth, mHeight, isMonochrome);
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ dstUStride, mWidth, mHeight, isMonochrome);
+#endif // LIBYUV_VERSION >= 1779
} else {
- convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2, dstYStride, dstUStride, mWidth,
- mHeight, isMonochrome);
+#if LIBYUV_VERSION >= 1779
+ if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
+ // it's available.
+ const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ uint16_t *const tmpY = mTmpFrameBuffer.get();
+ uint16_t *const tmpU = tmpY + dstYStride * mHeight;
+ uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+ libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+ mWidth, mHeight);
+ libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+ libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ } else {
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ mWidth, mHeight, isMonochrome);
+ }
+#else // LIBYUV_VERSION < 1779
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ mWidth, mHeight, isMonochrome);
+#endif // LIBYUV_VERSION >= 1779
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
+ size_t srcYStride = buffer->stride[0];
+ size_t srcUStride = buffer->stride[1];
+ size_t srcVStride = buffer->stride[2];
if (buffer->image_format == libgav1::kImageFormatYuv444) {
libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index f0e14d7..c3b27ea 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -19,6 +19,8 @@
#include <inttypes.h>
+#include <memory>
+
#include <media/stagefright/foundation/ColorUtils.h>
#include <SimpleC2Component.h>
@@ -60,6 +62,9 @@
uint32_t mHeight;
bool mSignalledOutputEos;
bool mSignalledError;
+ // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+ std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+ size_t mTmpFrameBufferSize = 0;
C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
@@ -97,6 +102,9 @@
void destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
const std::shared_ptr<C2GraphicBlock>& block);
+ // Sets |work->result| and mSignalledError. Returns false.
+ void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
+ bool allocTmpFrameBuffer(size_t size);
bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
const std::unique_ptr<C2Work>& work);
c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 417b261..9a3399d 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -2016,7 +2016,8 @@
constexpr char C2_PARAMKEY_MAX_CODED_CHANNEL_COUNT[] = "coded.max-channel-count";
/**
- * Audio channel mask. Used by decoder to express audio channel mask of decoded content.
+ * Audio channel mask. Used by decoder to express audio channel mask of decoded content,
+ * or by encoder for the channel mask of the encoded content once decoded.
* Channel representation is specified according to the Java android.media.AudioFormat
* CHANNEL_OUT_* constants.
*/
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 829a179..97c0806 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -610,16 +610,9 @@
// Codec2Client
Codec2Client::Codec2Client(sp<Base> const& base,
+ sp<IConfigurable> const& configurable,
size_t serviceIndex)
- : Configurable{
- [base]() -> sp<IConfigurable> {
- Return<sp<IConfigurable>> transResult =
- base->getConfigurable();
- return transResult.isOk() ?
- static_cast<sp<IConfigurable>>(transResult) :
- nullptr;
- }()
- },
+ : Configurable{configurable},
mBase1_0{base},
mBase1_1{Base1_1::castFrom(base)},
mBase1_2{Base1_2::castFrom(base)},
@@ -1003,7 +996,11 @@
CHECK(baseStore) << "Codec2 service \"" << name << "\""
" inaccessible for unknown reasons.";
LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
- return std::make_shared<Codec2Client>(baseStore, index);
+ Return<sp<IConfigurable>> transResult = baseStore->getConfigurable();
+ CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
+ "does not have IConfigurable.";
+ sp<IConfigurable> configurable = static_cast<sp<IConfigurable>>(transResult);
+ return std::make_shared<Codec2Client>(baseStore, configurable, index);
}
c2_status_t Codec2Client::ForAllServices(
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index efbf179..5267394 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -146,6 +146,8 @@
typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
typedef Base1_0 Base;
+ typedef ::android::hardware::media::c2::V1_0::IConfigurable IConfigurable;
+
struct Listener;
typedef Codec2ConfigurableClient Configurable;
@@ -230,8 +232,11 @@
static std::shared_ptr<InputSurface> CreateInputSurface(
char const* serviceName = nullptr);
- // base cannot be null.
- Codec2Client(sp<Base> const& base, size_t serviceIndex);
+ // base and/or configurable cannot be null.
+ Codec2Client(
+ sp<Base> const& base,
+ sp<IConfigurable> const& configurable,
+ size_t serviceIndex);
protected:
sp<Base1_0> mBase1_0;
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index ecd5463..e4daf5c 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -71,10 +71,11 @@
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx",
- "libstagefright_surface_utils",
+ "libstagefright_surface_utils",
"libstagefright_xmlparser",
"libui",
"libutils",
+ "server_configurable_flags",
],
export_shared_lib_headers: [
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 2db6f2f..5e53acc 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2569,43 +2569,6 @@
}
void CCodec::initiateReleaseIfStuck() {
- bool tunneled = false;
- bool isMediaTypeKnown = false;
- {
- static const std::set<std::string> kKnownMediaTypes{
- MIMETYPE_VIDEO_VP8,
- MIMETYPE_VIDEO_VP9,
- MIMETYPE_VIDEO_AV1,
- MIMETYPE_VIDEO_AVC,
- MIMETYPE_VIDEO_HEVC,
- MIMETYPE_VIDEO_MPEG4,
- MIMETYPE_VIDEO_H263,
- MIMETYPE_VIDEO_MPEG2,
- MIMETYPE_VIDEO_RAW,
- MIMETYPE_VIDEO_DOLBY_VISION,
-
- MIMETYPE_AUDIO_AMR_NB,
- MIMETYPE_AUDIO_AMR_WB,
- MIMETYPE_AUDIO_MPEG,
- MIMETYPE_AUDIO_AAC,
- MIMETYPE_AUDIO_QCELP,
- MIMETYPE_AUDIO_VORBIS,
- MIMETYPE_AUDIO_OPUS,
- MIMETYPE_AUDIO_G711_ALAW,
- MIMETYPE_AUDIO_G711_MLAW,
- MIMETYPE_AUDIO_RAW,
- MIMETYPE_AUDIO_FLAC,
- MIMETYPE_AUDIO_MSGSM,
- MIMETYPE_AUDIO_AC3,
- MIMETYPE_AUDIO_EAC3,
-
- MIMETYPE_IMAGE_ANDROID_HEIC,
- };
- Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
- const std::unique_ptr<Config> &config = *configLocked;
- tunneled = config->mTunneled;
- isMediaTypeKnown = (kKnownMediaTypes.count(config->mCodingMediaType) != 0);
- }
std::string name;
bool pendingDeadline = false;
{
@@ -2617,16 +2580,6 @@
pendingDeadline = true;
}
}
- if (!tunneled && isMediaTypeKnown && name.empty()) {
- constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
- std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
- if (elapsed >= kWorkDurationThreshold) {
- name = "queue";
- }
- if (elapsed > 0s) {
- pendingDeadline = true;
- }
- }
if (name.empty()) {
// We're not stuck.
if (pendingDeadline) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 1c86ba9..881c74e 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -34,6 +34,7 @@
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/drm/1.0/types.h>
+#include <android-base/parseint.h>
#include <android-base/properties.h>
#include <android-base/stringprintf.h>
#include <binder/MemoryBase.h>
@@ -52,6 +53,7 @@
#include <media/stagefright/SurfaceUtils.h>
#include <media/MediaCodecBuffer.h>
#include <mediadrm/ICrypto.h>
+#include <server_configurable_flags/get_flags.h>
#include <system/window.h>
#include "CCodecBufferChannel.h"
@@ -75,7 +77,6 @@
namespace {
constexpr size_t kSmoothnessFactor = 4;
-constexpr size_t kRenderingDepth = 3;
// This is for keeping IGBP's buffer dropping logic in legacy mode other
// than making it non-blocking. Do not change this value.
@@ -149,10 +150,11 @@
mFirstValidFrameIndex(0u),
mIsSurfaceToDisplay(false),
mHasPresentFenceTimes(false),
+ mRenderingDepth(0u),
mMetaMode(MODE_NONE),
mInputMetEos(false),
mSendEncryptedInfoBuffer(false) {
- mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
+ mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor;
{
Mutexed<Input>::Locked input(mInput);
input->buffers.reset(new DummyInputBuffers(""));
@@ -167,11 +169,15 @@
Mutexed<Output>::Locked output(mOutput);
output->outputDelay = 0u;
output->numSlots = kSmoothnessFactor;
+ output->bounded = false;
}
{
Mutexed<BlockPools>::Locked pools(mBlockPools);
pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
}
+ std::string value = server_configurable_flags::GetServerConfigurableFlag(
+ "media_native", "ccodec_rendering_depth", "0");
+ android::base::ParseInt(value, &mRenderingDepth);
}
CCodecBufferChannel::~CCodecBufferChannel() {
@@ -727,7 +733,7 @@
Mutexed<Output>::Locked output(mOutput);
if (!output->buffers ||
output->buffers->hasPending() ||
- output->buffers->numActiveSlots() >= output->numSlots) {
+ (!output->bounded && output->buffers->numActiveSlots() >= output->numSlots)) {
return;
}
}
@@ -1017,7 +1023,7 @@
int hasPresentFenceTimes = 0;
window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
mHasPresentFenceTimes = hasPresentFenceTimes == 1;
- if (mHasPresentFenceTimes) {
+ if (!mHasPresentFenceTimes) {
ALOGI("Using latch times for frame rendered signals - present fences not supported");
}
}
@@ -1386,7 +1392,7 @@
{
Mutexed<OutputSurface>::Locked output(mOutputSurface);
maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
- reorderDepth.value + kRenderingDepth;
+ reorderDepth.value + mRenderingDepth;
outputSurface = output->surface ?
output->surface->getIGraphicBufferProducer() : nullptr;
if (outputSurface) {
@@ -1509,6 +1515,7 @@
Mutexed<Output>::Locked output(mOutput);
output->outputDelay = outputDelayValue;
output->numSlots = numOutputSlots;
+ output->bounded = bool(outputSurface);
if (graphic) {
if (outputSurface || !buffersBoundToCodec) {
output->buffers.reset(new GraphicOutputBuffers(mName));
@@ -2053,7 +2060,7 @@
{
Mutexed<OutputSurface>::Locked output(mOutputSurface);
maxDequeueCount = output->maxDequeueBuffers =
- numOutputSlots + reorderDepth + kRenderingDepth;
+ numOutputSlots + reorderDepth + mRenderingDepth;
if (output->surface) {
output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 0d25d6d..2d87aa9 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -321,6 +321,9 @@
std::unique_ptr<OutputBuffers> buffers;
size_t numSlots;
uint32_t outputDelay;
+ // true iff the underlying block pool is bounded --- for example,
+ // a BufferQueue-based block pool would be bounded by the BufferQueue.
+ bool bounded;
};
Mutexed<Output> mOutput;
Mutexed<std::list<std::unique_ptr<C2Work>>> mFlushedConfigs;
@@ -341,6 +344,7 @@
std::map<uint64_t, int> rotation;
};
Mutexed<OutputSurface> mOutputSurface;
+ int mRenderingDepth;
struct BlockPools {
C2Allocator::id_t inputAllocatorId;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index a893bc0..6c10549 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -940,6 +940,9 @@
add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
.limitTo(D::AUDIO & D::DECODER & D::READ));
+ add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
+ .limitTo(D::AUDIO & D::ENCODER & D::CONFIG));
+
add(ConfigMapper(KEY_AAC_SBR_MODE, C2_PARAMKEY_AAC_SBR_MODE, "value")
.limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM | D::READ))
.withMapper([](C2Value v) -> C2Value {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 453a0d2..e18dd59 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -637,6 +637,10 @@
if (encoder) {
attrs |= MediaCodecInfo::kFlagIsEncoder;
}
+ if (codec.quirkSet.find("attribute::enforce-xml-capabilities") !=
+ codec.quirkSet.end()) {
+ attrs |= MediaCodecInfo::kFlagIsEnforceXmlCapabilities;
+ }
if (trait.owner == "software") {
attrs |= MediaCodecInfo::kFlagIsSoftwareOnly;
} else {
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 7c0ce57..9c4ccb8 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -1306,7 +1306,7 @@
streamType = AUDIO_STREAM_DTMF;
}
attr = AudioSystem::streamTypeToAttributes(streamType);
- attr.flags = AUDIO_FLAG_LOW_LATENCY;
+ attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_LOW_LATENCY);
const size_t frameCount = mProcessSize;
status_t status = mpAudioTrack->set(
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index 5bd0114..b0d48b7 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -72,11 +72,6 @@
media::audio::common::AudioPortDeviceExt* aidl,
media::AudioPortDeviceExtSys* aidlDeviceExt);
-ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
- media::audio::common::AudioStreamType aidl);
-ConversionResult<media::audio::common::AudioStreamType>
-legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
-
ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
const media::audio::common::AudioPortMixExt& aidl, media::AudioPortRole role,
const media::AudioPortMixExtSys& aidlMixExt);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 0dcb8ee..7b9088e 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -40,6 +40,8 @@
using ::aidl::android::hardware::audio::effect::Descriptor;
using ::aidl::android::hardware::audio::effect::IFactory;
using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::aidl::android::media::audio::common::AudioStreamType;
using ::aidl::android::media::audio::common::AudioUuid;
using ::android::base::unexpected;
using ::android::detail::AudioHalVersionInfo;
@@ -96,7 +98,13 @@
return list;
}()),
mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()),
- mEffectProcessings(nullptr /* TODO: add AIDL implementation */) {
+ mAidlProcessings([this]() -> std::vector<Processing> {
+ std::vector<Processing> processings;
+ if (!mFactory || !mFactory->queryProcessing(std::nullopt, &processings).isOk()) {
+ ALOGE("%s queryProcessing failed", __func__);
+ }
+ return processings;
+ }()) {
ALOG_ASSERT(mFactory != nullptr, "Provided IEffectsFactory service is NULL");
ALOGI("%s with %zu nonProxyEffects and %zu proxyEffects", __func__, mNonProxyDescList.size(),
mProxyDescList.size());
@@ -274,15 +282,79 @@
}
std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalAidl::getProcessings() const {
- return mEffectProcessings;
+
+ auto getConfigEffectWithDescriptor =
+ [](const auto& desc) -> std::shared_ptr<const effectsConfig::Effect> {
+ effectsConfig::Effect effect = {.name = desc.common.name, .isProxy = false};
+ if (const auto uuid =
+ ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(desc.common.id.uuid);
+ uuid.ok()) {
+ static_cast<effectsConfig::EffectImpl>(effect).uuid = uuid.value();
+ return std::make_shared<const effectsConfig::Effect>(effect);
+ } else {
+ return nullptr;
+ }
+ };
+
+ auto getConfigProcessingWithAidlProcessing =
+ [&](const auto& aidlProcess, std::vector<effectsConfig::InputStream>& preprocess,
+ std::vector<effectsConfig::OutputStream>& postprocess) {
+ if (aidlProcess.type.getTag() == Processing::Type::streamType) {
+ AudioStreamType aidlType =
+ aidlProcess.type.template get<Processing::Type::streamType>();
+ const auto type =
+ ::aidl::android::aidl2legacy_AudioStreamType_audio_stream_type_t(
+ aidlType);
+ if (!type.ok()) {
+ return;
+ }
+
+ std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+ std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+ std::back_inserter(effects), getConfigEffectWithDescriptor);
+ effectsConfig::OutputStream stream = {.type = type.value(),
+ .effects = std::move(effects)};
+ postprocess.emplace_back(stream);
+ } else if (aidlProcess.type.getTag() == Processing::Type::source) {
+ AudioSource aidlType =
+ aidlProcess.type.template get<Processing::Type::source>();
+ const auto type =
+ ::aidl::android::aidl2legacy_AudioSource_audio_source_t(aidlType);
+ if (!type.ok()) {
+ return;
+ }
+
+ std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+ std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+ std::back_inserter(effects), getConfigEffectWithDescriptor);
+ effectsConfig::InputStream stream = {.type = type.value(),
+ .effects = std::move(effects)};
+ preprocess.emplace_back(stream);
+ }
+ };
+
+ static std::shared_ptr<const effectsConfig::Processings> processings(
+ [&]() -> std::shared_ptr<const effectsConfig::Processings> {
+ std::vector<effectsConfig::InputStream> preprocess;
+ std::vector<effectsConfig::OutputStream> postprocess;
+ for (const auto& processing : mAidlProcessings) {
+ getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess);
+ }
+
+ if (0 == preprocess.size() && 0 == postprocess.size()) {
+ return nullptr;
+ }
+
+ return std::make_shared<const effectsConfig::Processings>(
+ effectsConfig::Processings({.preprocess = std::move(preprocess),
+ .postprocess = std::move(postprocess)}));
+ }());
+
+ return processings;
}
+// Return 0 for AIDL, as the AIDL interface is not aware of the configuration file.
::android::error::Result<size_t> EffectsFactoryHalAidl::getSkippedElements() const {
- if (!mEffectProcessings) {
- return ::android::base::unexpected(BAD_VALUE);
- }
-
- // Only return 0 for AIDL, because the AIDL interface doesn't aware of configuration file
return 0;
}
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 70a7012..39beea2 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -21,6 +21,7 @@
#include <mutex>
#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <aidl/android/hardware/audio/effect/Processing.h>
#include <android-base/thread_annotations.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <system/thread_defs.h>
@@ -82,7 +83,7 @@
// total number of effects including proxy effects
const size_t mEffectCount;
// Query result of pre and post processing from effect factory
- const std::shared_ptr<const effectsConfig::Processings> mEffectProcessings;
+ const std::vector<Processing> mAidlProcessings;
std::mutex mLock;
uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0; // Align with HIDL (0 is INVALID_ID)
diff --git a/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h b/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
index 6e09463..2323ed6 100644
--- a/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
+++ b/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
@@ -30,6 +30,8 @@
minor = halMinor;
}
+ bool isHidl() const { return type == Type::HIDL; }
+
Type getType() const { return type; }
int getMajorVersion() const { return major; }
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index c076ccc..63f895f 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -15,6 +15,7 @@
*/
//#define LOG_NDEBUG 0
+#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <cstring>
@@ -92,6 +93,47 @@
}
}
+TEST(libAudioHalTest, getProcessings) {
+ auto factory = EffectsFactoryHalInterface::create();
+ ASSERT_NE(nullptr, factory);
+
+ const auto &processings = factory->getProcessings();
+ if (processings) {
+ EXPECT_NE(0UL, processings->preprocess.size() + processings->postprocess.size() +
+ processings->deviceprocess.size());
+
+ auto processingChecker = [](const auto& processings) {
+ if (processings.size() != 0) {
+ // any process need at least 1 effect inside
+ std::for_each(processings.begin(), processings.end(), [](const auto& process) {
+ EXPECT_NE(0ul, process.effects.size());
+ // any effect should have a valid name string, and not proxy
+ for (const auto& effect : process.effects) {
+ SCOPED_TRACE("Effect: {" +
+ (effect == nullptr
+ ? "NULL}"
+ : ("{name: " + effect->name + ", isproxy: " +
+ (effect->isProxy ? "true" : "false") + ", sw: " +
+ (effect->libSw ? "non-null" : "null") + ", hw: " +
+ (effect->libHw ? "non-null" : "null") + "}")));
+ EXPECT_NE(nullptr, effect);
+ EXPECT_NE("", effect->name);
+ EXPECT_EQ(false, effect->isProxy);
+ EXPECT_EQ(nullptr, effect->libSw);
+ EXPECT_EQ(nullptr, effect->libHw);
+ }
+ });
+ }
+ };
+
+ processingChecker(processings->preprocess);
+ processingChecker(processings->postprocess);
+ processingChecker(processings->deviceprocess);
+ } else {
+ GTEST_SKIP() << "no processing found, skipping the test";
+ }
+}
+
TEST(libAudioHalTest, getHalVersion) {
auto factory = EffectsFactoryHalInterface::create();
ASSERT_NE(nullptr, factory);
diff --git a/media/libaudiousecasevalidation/UsecaseValidator.cpp b/media/libaudiousecasevalidation/UsecaseValidator.cpp
index d62df3a..bf532de 100644
--- a/media/libaudiousecasevalidation/UsecaseValidator.cpp
+++ b/media/libaudiousecasevalidation/UsecaseValidator.cpp
@@ -142,6 +142,9 @@
bool areFlagsValid(audio_flags_mask_t flags) {
ALOGV("areFlagsValid flags: %#x", flags);
+ if ((flags & (AUDIO_FLAG_SCO|AUDIO_FLAG_AUDIBILITY_ENFORCED|AUDIO_FLAG_BEACON)) != 0) {
+ return false;
+ }
if ((flags & AUDIO_FLAG_LOW_LATENCY) != 0) {
return true;
}
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index f56abbd..d94093e 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -21,6 +21,7 @@
vendor: true,
srcs: [
"EffectsFactory.c",
+ "EffectsConfigLoader.c",
"EffectsFactoryState.c",
"EffectsXmlConfigLoader.cpp",
],
diff --git a/media/libeffects/factory/EffectsConfigLoader.c b/media/libeffects/factory/EffectsConfigLoader.c
new file mode 100644
index 0000000..e23530e
--- /dev/null
+++ b/media/libeffects/factory/EffectsConfigLoader.c
@@ -0,0 +1,439 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectsFactoryConfigLoader"
+//#define LOG_NDEBUG 0
+
+#include <dlfcn.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include <cutils/config_utils.h>
+#include <cutils/misc.h>
+#include <log/log.h>
+
+#include <system/audio_effects/audio_effects_conf.h>
+
+#include "EffectsConfigLoader.h"
+#include "EffectsFactoryState.h"
+
+/////////////////////////////////////////////////
+// Local functions prototypes
+/////////////////////////////////////////////////
+
+static int loadEffectConfigFile(const char *path);
+static int loadLibraries(cnode *root);
+static int loadLibrary(cnode *root, const char *name);
+static int loadEffects(cnode *root);
+static int loadEffect(cnode *node);
+// To get and add the effect pointed by the passed node to the gSubEffectList
+static int addSubEffect(cnode *root);
+static lib_entry_t *getLibrary(const char *path);
+
+static lib_entry_t *gCachedLibrary; // last library accessed by getLibrary()
+
+int EffectLoadEffectConfig()
+{
+ if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
+ return loadEffectConfigFile(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+ } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
+ return loadEffectConfigFile(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+ }
+ return 0;
+}
+
+int loadEffectConfigFile(const char *path)
+{
+ cnode *root;
+ char *data;
+
+ data = load_file(path, NULL);
+ if (data == NULL) {
+ return -ENODEV;
+ }
+ root = config_node("", "");
+ config_load(root, data);
+ loadLibraries(root);
+ loadEffects(root);
+ config_free(root);
+ free(root);
+ free(data);
+
+ return 0;
+}
+
+int loadLibraries(cnode *root)
+{
+ cnode *node;
+
+ node = config_find(root, LIBRARIES_TAG);
+ if (node == NULL) {
+ return -ENOENT;
+ }
+ node = node->first_child;
+ while (node) {
+ loadLibrary(node, node->name);
+ node = node->next;
+ }
+ return 0;
+}
+
+#ifdef __LP64__
+// audio_effects.conf always specifies 32 bit lib path: convert to 64 bit path if needed
+static const char *kLibraryPathRoot[] =
+ {"/odm/lib64/soundfx", "/vendor/lib64/soundfx", "/system/lib64/soundfx"};
+#else
+static const char *kLibraryPathRoot[] =
+ {"/odm/lib/soundfx", "/vendor/lib/soundfx", "/system/lib/soundfx"};
+#endif
+
+static const int kLibraryPathRootSize =
+ (sizeof(kLibraryPathRoot) / sizeof(kLibraryPathRoot[0]));
+
+// Checks if the library path passed as lib_path_in can be opened and if not
+// tries in standard effect library directories with just the library name and returns correct path
+// in lib_path_out
+int checkLibraryPath(const char *lib_path_in, char *lib_path_out) {
+ char *str;
+ const char *lib_name;
+ size_t len;
+
+ if (lib_path_in == NULL || lib_path_out == NULL) {
+ return -EINVAL;
+ }
+
+ strlcpy(lib_path_out, lib_path_in, PATH_MAX);
+
+ // Try exact path first
+ str = strstr(lib_path_out, "/lib/soundfx/");
+ if (str == NULL) {
+ return -EINVAL;
+ }
+
+ // Extract library name from input path
+ len = str - lib_path_out;
+ lib_name = lib_path_in + len + strlen("/lib/soundfx/");
+
+ // Then try with library name and standard path names in order of preference
+ for (int i = 0; i < kLibraryPathRootSize; i++) {
+ char path[PATH_MAX];
+
+ snprintf(path,
+ PATH_MAX,
+ "%s/%s",
+ kLibraryPathRoot[i],
+ lib_name);
+ if (F_OK == access(path, 0)) {
+ strcpy(lib_path_out, path);
+ ALOGW_IF(strncmp(lib_path_out, lib_path_in, PATH_MAX) != 0,
+ "checkLibraryPath() corrected library path %s to %s", lib_path_in, lib_path_out);
+ return 0;
+ }
+ }
+ return -EINVAL;
+}
+
+
+
+int loadLibrary(cnode *root, const char *name)
+{
+ cnode *node;
+ void *hdl = NULL;
+ audio_effect_library_t *desc;
+ list_elem_t *e;
+ lib_entry_t *l;
+ char path[PATH_MAX];
+
+ node = config_find(root, PATH_TAG);
+ if (node == NULL) {
+ return -EINVAL;
+ }
+
+ if (checkLibraryPath((const char *)node->value, path) != 0) {
+ ALOGW("loadLibrary() could not find library %s", path);
+ goto error;
+ }
+
+ hdl = dlopen(path, RTLD_NOW);
+ if (hdl == NULL) {
+ ALOGW("loadLibrary() failed to open %s", path);
+ goto error;
+ }
+
+ desc = (audio_effect_library_t *)dlsym(hdl, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
+ if (desc == NULL) {
+ ALOGW("loadLibrary() could not find symbol %s", AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
+ goto error;
+ }
+
+ if (AUDIO_EFFECT_LIBRARY_TAG != desc->tag) {
+ ALOGW("getLibrary() bad tag %08x in lib info struct", desc->tag);
+ goto error;
+ }
+
+ if (EFFECT_API_VERSION_MAJOR(desc->version) !=
+ EFFECT_API_VERSION_MAJOR(EFFECT_LIBRARY_API_VERSION)) {
+ ALOGW("loadLibrary() bad lib version %08x", desc->version);
+ goto error;
+ }
+
+ // add entry for library in gLibraryList
+ l = malloc(sizeof(lib_entry_t));
+ l->name = strndup(name, PATH_MAX);
+ l->path = strndup(path, PATH_MAX);
+ l->handle = hdl;
+ l->desc = desc;
+ l->effects = NULL;
+ pthread_mutex_init(&l->lock, NULL);
+
+ e = malloc(sizeof(list_elem_t));
+ e->object = l;
+ pthread_mutex_lock(&gLibLock);
+ e->next = gLibraryList;
+ gLibraryList = e;
+ pthread_mutex_unlock(&gLibLock);
+ ALOGV("getLibrary() linked library %p for path %s", l, path);
+
+ return 0;
+
+error:
+ if (hdl != NULL) {
+ dlclose(hdl);
+ }
+ //add entry for library errors in gLibraryFailedList
+ lib_failed_entry_t *fl = malloc(sizeof(lib_failed_entry_t));
+ fl->name = strndup(name, PATH_MAX);
+ fl->path = strndup(path, PATH_MAX);
+
+ list_elem_t *fe = malloc(sizeof(list_elem_t));
+ fe->object = fl;
+ fe->next = gLibraryFailedList;
+ gLibraryFailedList = fe;
+ ALOGV("getLibrary() linked error in library %p for path %s", fl, path);
+
+ return -EINVAL;
+}
+
+// This will find the library and UUID tags of the sub effect pointed by the
+// node, gets the effect descriptor and lib_entry_t and adds the subeffect -
+// sub_entry_t to the gSubEffectList
+int addSubEffect(cnode *root)
+{
+ ALOGV("addSubEffect");
+ cnode *node;
+ effect_uuid_t uuid;
+ effect_descriptor_t *d;
+ lib_entry_t *l;
+ list_elem_t *e;
+ node = config_find(root, LIBRARY_TAG);
+ if (node == NULL) {
+ return -EINVAL;
+ }
+ l = getLibrary(node->value);
+ if (l == NULL) {
+ ALOGW("addSubEffect() could not get library %s", node->value);
+ return -EINVAL;
+ }
+ node = config_find(root, UUID_TAG);
+ if (node == NULL) {
+ return -EINVAL;
+ }
+ if (stringToUuid(node->value, &uuid) != 0) {
+ ALOGW("addSubEffect() invalid uuid %s", node->value);
+ return -EINVAL;
+ }
+ d = malloc(sizeof(effect_descriptor_t));
+ if (l->desc->get_descriptor(&uuid, d) != 0) {
+ char s[40];
+ uuidToString(&uuid, s, 40);
+ ALOGW("Error querying effect %s on lib %s", s, l->name);
+ free(d);
+ return -EINVAL;
+ }
+#if (LOG_NDEBUG==0)
+ char s[512];
+ dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);
+ ALOGV("addSubEffect() read descriptor %p:%s",d, s);
+#endif
+ if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
+ EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION)) {
+ ALOGW("Bad API version %08x on lib %s", d->apiVersion, l->name);
+ free(d);
+ return -EINVAL;
+ }
+ sub_effect_entry_t *sub_effect = malloc(sizeof(sub_effect_entry_t));
+ sub_effect->object = d;
+ // lib_entry_t is stored since the sub effects are not linked to the library
+ sub_effect->lib = l;
+ e = malloc(sizeof(list_elem_t));
+ e->object = sub_effect;
+ e->next = gSubEffectList->sub_elem;
+ gSubEffectList->sub_elem = e;
+ ALOGV("addSubEffect end");
+ return 0;
+}
+
+int loadEffects(cnode *root)
+{
+ cnode *node;
+
+ node = config_find(root, EFFECTS_TAG);
+ if (node == NULL) {
+ return -ENOENT;
+ }
+ node = node->first_child;
+ while (node) {
+ loadEffect(node);
+ node = node->next;
+ }
+ return 0;
+}
+
+int loadEffect(cnode *root)
+{
+ cnode *node;
+ effect_uuid_t uuid;
+ lib_entry_t *l;
+ effect_descriptor_t *d;
+ list_elem_t *e;
+
+ node = config_find(root, LIBRARY_TAG);
+ if (node == NULL) {
+ return -EINVAL;
+ }
+
+ l = getLibrary(node->value);
+ if (l == NULL) {
+ ALOGW("loadEffect() could not get library %s", node->value);
+ return -EINVAL;
+ }
+
+ node = config_find(root, UUID_TAG);
+ if (node == NULL) {
+ return -EINVAL;
+ }
+ if (stringToUuid(node->value, &uuid) != 0) {
+ ALOGW("loadEffect() invalid uuid %s", node->value);
+ return -EINVAL;
+ }
+ lib_entry_t *tmp;
+ bool skip = false;
+ if (findEffect(NULL, &uuid, &tmp, NULL) == 0) {
+ ALOGW("skipping duplicate uuid %s %s", node->value,
+ node->next ? "and its sub-effects" : "");
+ skip = true;
+ }
+
+ d = malloc(sizeof(effect_descriptor_t));
+ if (l->desc->get_descriptor(&uuid, d) != 0) {
+ char s[40];
+ uuidToString(&uuid, s, 40);
+ ALOGW("Error querying effect %s on lib %s", s, l->name);
+ free(d);
+ return -EINVAL;
+ }
+#if (LOG_NDEBUG==0)
+ char s[512];
+ dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);
+ ALOGV("loadEffect() read descriptor %p:%s",d, s);
+#endif
+ if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
+ EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION)) {
+ ALOGW("Bad API version %08x on lib %s", d->apiVersion, l->name);
+ free(d);
+ return -EINVAL;
+ }
+ e = malloc(sizeof(list_elem_t));
+ e->object = d;
+ if (skip) {
+ e->next = gSkippedEffects;
+ gSkippedEffects = e;
+ return -EINVAL;
+ } else {
+ e->next = l->effects;
+ l->effects = e;
+ }
+
+ // After the UUID node in the config_tree, if node->next is valid,
+ // that would be sub effect node.
+ // Find the sub effects and add them to the gSubEffectList
+ node = node->next;
+ int count = 2;
+ bool hwSubefx = false, swSubefx = false;
+ list_sub_elem_t *sube = NULL;
+ if (node != NULL) {
+ ALOGV("Adding the effect to gEffectSubList as there are sub effects");
+ sube = malloc(sizeof(list_sub_elem_t));
+ sube->object = d;
+ sube->sub_elem = NULL;
+ sube->next = gSubEffectList;
+ gSubEffectList = sube;
+ }
+ while (node != NULL && count) {
+ if (addSubEffect(node)) {
+ ALOGW("loadEffect() could not add subEffect %s", node->value);
+ // Change the gSubEffectList to point to older list;
+ gSubEffectList = sube->next;
+ free(sube->sub_elem);// Free an already added sub effect
+ sube->sub_elem = NULL;
+ free(sube);
+ return -ENOENT;
+ }
+ sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object;
+ effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object);
+ // Since we return a stub descriptor for the proxy during
+ // get_descriptor call,we replace it with the correspoding
+ // sw effect descriptor, but with Proxy UUID
+ // check for Sw desc
+ if (!((subEffectDesc->flags & EFFECT_FLAG_HW_ACC_MASK) ==
+ EFFECT_FLAG_HW_ACC_TUNNEL)) {
+ swSubefx = true;
+ *d = *subEffectDesc;
+ d->uuid = uuid;
+ ALOGV("loadEffect() Changed the Proxy desc");
+ } else
+ hwSubefx = true;
+ count--;
+ node = node->next;
+ }
+ // 1 HW and 1 SW sub effect found. Set the offload flag in the Proxy desc
+ if (hwSubefx && swSubefx) {
+ d->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
+ }
+ return 0;
+}
+
+lib_entry_t *getLibrary(const char *name)
+{
+ list_elem_t *e;
+
+ if (gCachedLibrary &&
+ !strncmp(gCachedLibrary->name, name, PATH_MAX)) {
+ return gCachedLibrary;
+ }
+
+ e = gLibraryList;
+ while (e) {
+ lib_entry_t *l = (lib_entry_t *)e->object;
+ if (!strcmp(l->name, name)) {
+ gCachedLibrary = l;
+ return l;
+ }
+ e = e->next;
+ }
+
+ return NULL;
+}
diff --git a/media/libeffects/factory/EffectsConfigLoader.h b/media/libeffects/factory/EffectsConfigLoader.h
new file mode 100644
index 0000000..3f82609
--- /dev/null
+++ b/media/libeffects/factory/EffectsConfigLoader.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECTSCONFIGLOADER_H
+#define ANDROID_EFFECTSCONFIGLOADER_H
+
+#include <cutils/compiler.h>
+#include "EffectsFactoryState.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/** Parses the platform effect configuration
+ * and stores its content in the global EffectFactoryState. */
+ANDROID_API
+int EffectLoadEffectConfig();
+
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // ANDROID_EFFECTSCONFIGLOADER_H
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index 38ba4b0..dcdf634 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -26,6 +26,7 @@
#include <media/EffectsFactoryApi.h>
+#include "EffectsConfigLoader.h"
#include "EffectsFactoryState.h"
#include "EffectsXmlConfigLoader.h"
@@ -463,7 +464,8 @@
} else {
gConfigNbElemSkipped = EffectLoadXmlEffectConfig(NULL);
if (gConfigNbElemSkipped < 0) {
- ALOGE("Failed to load XML effect configuration with status %zd", gConfigNbElemSkipped);
+ ALOGW("Failed to load XML effect configuration, fallback to .conf");
+ EffectLoadEffectConfig();
} else if (gConfigNbElemSkipped > 0) {
ALOGE("Effect config is partially invalid, skipped %zd elements", gConfigNbElemSkipped);
}
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index f24c15c..9bff136 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -26,6 +26,7 @@
#include <media/EffectsConfig.h>
+#include "EffectsConfigLoader.h"
#include "EffectsFactoryState.h"
#include "EffectsXmlConfigLoader.h"
diff --git a/media/libeffects/factory/test/DumpConfig.cpp b/media/libeffects/factory/test/DumpConfig.cpp
index 1fecf06..0a156b4 100644
--- a/media/libeffects/factory/test/DumpConfig.cpp
+++ b/media/libeffects/factory/test/DumpConfig.cpp
@@ -16,31 +16,48 @@
#include <media/EffectsFactoryApi.h>
#include <unistd.h>
-
#include "EffectsXmlConfigLoader.h"
+#include "EffectsConfigLoader.h"
int main(int argc, char* argv[]) {
- char* path = nullptr;
- if ((argc == 2 || argc == 3) && strcmp(argv[1], "--xml") == 0) {
+ const char* path = nullptr;
+ bool legacyFormat;
+
+ if (argc == 2 && strcmp(argv[1], "--legacy") == 0) {
+ legacyFormat = true;
+ fprintf(stderr, "Dumping legacy effect config file\n");
+ } else if ((argc == 2 || argc == 3) && strcmp(argv[1], "--xml") == 0) {
+ legacyFormat = false;
if (argc == 3) {
- path = argv[2];
fprintf(stderr, "Dumping XML effect config file: %s\n", path);
} else {
fprintf(stderr, "Dumping default XML effect config file.\n");
}
} else {
- fprintf(stderr, "Invalid arguments.\nUsage: %s [--xml [FILE]]\n", argv[0]);
+ fprintf(stderr, "Invalid arguments.\n"
+ "Usage: %s [--legacy|--xml [FILE]]\n", argv[0]);
return 1;
}
- ssize_t ret = EffectLoadXmlEffectConfig(path);
- if (ret < 0) {
- fprintf(stderr, "loadXmlEffectConfig failed, see logcat for detail.\n");
- return 2;
+ if (!legacyFormat) {
+ ssize_t ret = EffectLoadXmlEffectConfig(path);
+ if (ret < 0) {
+ fprintf(stderr, "loadXmlEffectConfig failed, see logcat for detail.\n");
+ return 2;
+ }
+ if (ret > 0) {
+ fprintf(stderr, "Partially failed to load config. Skipped %zu elements, "
+ "see logcat for detail.\n", (size_t)ret);
+ }
}
- if (ret > 0) {
- fprintf(stderr, "Partially failed to load config. Skipped %zu elements, "
- "see logcat for detail.\n", (size_t)ret);
+
+ if (legacyFormat) {
+ auto ret = EffectLoadEffectConfig();
+ if (ret < 0) {
+ fprintf(stderr, "loadEffectConfig failed, see logcat for detail.\n");
+ return 3;
+ }
+ fprintf(stderr, "legacy loadEffectConfig has probably succeed, see logcat to make sure.\n");
}
if (EffectDumpEffects(STDOUT_FILENO) != 0) {
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..3ebd13e 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -102,6 +102,111 @@
return OK;
}
+static int32_t convertToIntNoSign(const AString &str) {
+ char *end;
+ unsigned long u = strtoul(str.c_str(), &end, 10);
+ if (end == str.c_str() || *end != '\0') {
+ // malformed integer
+ return -1;
+ }
+ if (u > INT32_MAX) {
+ // The number is too big
+ return -1;
+ }
+ return static_cast<int32_t>(u);
+}
+
+static void parseSize(const AString &str, int32_t *width, int32_t *height) {
+ ssize_t ix = str.find("x");
+ if (ix == -1) {
+ ix = str.find("*");
+ if (ix == -1) {
+ return;
+ }
+ }
+ AString wStr(str, 0, ix);
+ AString hStr(str, ix + 1, str.size() - ix - 1);
+ *width = convertToIntNoSign(wStr);
+ *height = convertToIntNoSign(hStr);
+}
+
+static void parseRange(const AString &str, int32_t *min, int32_t *max) {
+ ssize_t ix = str.find("-");
+ if (ix == -1) {
+ return;
+ }
+ AString minStr(str, 0, ix);
+ AString maxStr(str, ix + 1, str.size() - ix - 1);
+ *min = convertToIntNoSign(minStr);
+ *max = convertToIntNoSign(maxStr);
+}
+
+static void parseSizeRange(const AString &str, int32_t *minWidth, int32_t *minHeight,
+ int32_t *maxWidth, int32_t *maxHeight) {
+ ssize_t ix = str.find("-");
+ if (ix == -1) {
+ return;
+ }
+ AString minSize(str, 0, ix);
+ AString maxSize(str, ix + 1, str.size() - ix - 1);
+ parseSize(minSize, minWidth, minHeight);
+ parseSize(maxSize, maxWidth, maxHeight);
+}
+
+
+bool MediaCodecInfo::Capabilities::isResolutionSupported(int32_t width, int32_t height) {
+ AString blockSizeStr;
+ AString blockCountStr;
+ int32_t blockWidth = -1;
+ int32_t blockHeight = -1;
+ int32_t maxBlocks = -1;
+ int32_t minBlocks = -1;
+
+ if (mDetails->findString("block-size", &blockSizeStr)) {
+ parseSize(blockSizeStr, &blockWidth, &blockHeight);
+ }
+ if (mDetails->findString("block-count-range", &blockCountStr)) {
+ parseRange(blockCountStr, &minBlocks, &maxBlocks);
+ }
+ if (maxBlocks != -1 && blockWidth != -1 && blockHeight != -1) {
+ if (maxBlocks < ((width + blockWidth - 1) / blockWidth) *
+ ((height + blockHeight - 1) / blockHeight)) {
+ return false;
+ }
+ }
+
+ AString sizeRangeStr;
+ int32_t maxWidth = -1;
+ int32_t maxHeight = -1;
+ int32_t minWidth = -1;
+ int32_t minHeight = -1;
+
+ if (mDetails->findString("size-range", &sizeRangeStr)) {
+ parseSizeRange(sizeRangeStr, &minWidth, &minHeight, &maxWidth, &maxHeight);
+ }
+
+ if (maxWidth != -1 && maxHeight != -1) {
+ // The logic is that the format is not supported if width or height is outside
+ // of min-max limits, UNLESS codec allows to swap it and in this case format is
+ // not supported if width is outside of min-max height or height is outside of
+ // min-max width
+ if (width < minWidth || height < minHeight ||
+ width > maxWidth || height > maxHeight) {
+ int32_t swappable = 0;
+ if (!mDetails->findInt32("feature-can-swap-width-height", &swappable) ||
+ swappable == 0) {
+ return false;
+ }
+ if (width < minHeight || height < minWidth ||
+ width > maxHeight || height > maxWidth) {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+
void MediaCodecInfo::CapabilitiesWriter::addDetail(
const char* key, const char* value) {
mCap->mDetails->setString(key, value);
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..855bc28 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -59,6 +59,7 @@
kFlagIsVendor = 1 << 1,
kFlagIsSoftwareOnly = 1 << 2,
kFlagIsHardwareAccelerated = 1 << 3,
+ kFlagIsEnforceXmlCapabilities = 1 << 4,
};
struct Capabilities : public RefBase {
@@ -96,6 +97,8 @@
*/
const sp<AMessage> getDetails() const;
+ bool isResolutionSupported(int32_t width, int32_t height);
+
protected:
Vector<ProfileLevel> mProfileLevels;
SortedVector<ProfileLevel> mProfileLevelsSorted;
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index a3c2f1a..2240223 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -86,6 +86,11 @@
if (item != NULL) item->setRate(attr, count, duration);
}
+void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
+ const std::string &string) {
+ mediametrics_setCString(handle, attr, string.c_str());
+}
+
void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
const char *value) {
Item *item = (Item *) handle;
@@ -152,6 +157,14 @@
return item->getRate(attr, count, duration, rate);
}
+bool mediametrics_getString(mediametrics_handle_t handle, attr_t attr,
+ std::string *string) {
+ Item *item = (Item *) handle;
+ if (item == NULL) return false;
+
+ return item->getString(attr, string);
+}
+
// NB: caller owns the string that comes back, is responsible for freeing it
bool mediametrics_getCString(mediametrics_handle_t handle, attr_t attr,
char **value) {
diff --git a/media/libmediametrics/include/media/MediaMetrics.h b/media/libmediametrics/include/media/MediaMetrics.h
index 76abe86..58612a3 100644
--- a/media/libmediametrics/include/media/MediaMetrics.h
+++ b/media/libmediametrics/include/media/MediaMetrics.h
@@ -50,7 +50,7 @@
void mediametrics_setRate(mediametrics_handle_t handle, attr_t attr,
int64_t count, int64_t duration);
void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
- const char * value);
+ const char * value);
// fused get/add/set; if attr wasn't there, it's a simple set.
// these do not provide atomicity or mutual exclusion, only simpler code sequences.
@@ -95,4 +95,11 @@
__END_DECLS
+#ifdef __cplusplus
+#include <string>
+void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
+ const std::string &value);
+bool mediametrics_getString(mediametrics_handle_t handle, attr_t attr, std::string *value);
+#endif // __cplusplus
+
#endif
diff --git a/media/libmediametrics/include/media/MediaMetricsItem.h b/media/libmediametrics/include/media/MediaMetricsItem.h
index de56665..03834d4 100644
--- a/media/libmediametrics/include/media/MediaMetricsItem.h
+++ b/media/libmediametrics/include/media/MediaMetricsItem.h
@@ -1048,6 +1048,9 @@
}
return true;
}
+ bool getString(const char *key, std::string *value) const {
+ return get(key, value);
+ }
// Caller owns the returned string
bool getCString(const char *key, char **value) const {
std::string s;
@@ -1057,9 +1060,6 @@
}
return false;
}
- bool getString(const char *key, std::string *value) const {
- return get(key, value);
- }
const Prop::Elem* get(const char *key) const {
const Prop *prop = findProp(key);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index ec79b99..0365085 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1986,6 +1986,10 @@
format->setString("mime", MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
break;
+ case VIDEO_ENCODER_AV1:
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_AV1);
+ break;
+
default:
CHECK(!"Should not be here, unsupported video encoding.");
break;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 366956c..e8556dd 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -28,11 +28,13 @@
namespace android {
+const int32_t INVALID_DISPLAY_ID = -1;
+
AWakeLock::AWakeLock() :
mPowerManager(NULL),
mWakeLockToken(NULL),
mWakeLockCount(0),
- mDeathRecipient(new PMDeathRecipient(this)) {}
+ mDeathRecipient(new PMDeathRecipient(this)){}
AWakeLock::~AWakeLock() {
if (mPowerManager != NULL) {
@@ -59,10 +61,15 @@
if (mPowerManager != NULL) {
sp<IBinder> binder = new BBinder();
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- binder::Status status = mPowerManager->acquireWakeLockAsync(
- binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
- String16("AWakeLock"), String16("media"),
- {} /* workSource */, {} /* historyTag */);
+ binder::Status status = mPowerManager->acquireWakeLock(
+ binder,
+ /*flags= */ POWERMANAGER_PARTIAL_WAKE_LOCK,
+ /*tag=*/ String16("AWakeLock"),
+ /*packageName=*/ String16("media"),
+ /*ws=*/ {},
+ /*historyTag=*/ {},
+ /*displayId=*/ INVALID_DISPLAY_ID,
+ /*callback=*/NULL);
IPCThreadState::self()->restoreCallingIdentity(token);
if (status.isOk()) {
mWakeLockToken = binder;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 569a25f..a26fcbe 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -270,6 +270,7 @@
"SurfaceUtils.cpp",
"ThrottledSource.cpp",
"Utils.cpp",
+ "VideoRenderQualityTracker.cpp",
"VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
],
@@ -313,6 +314,7 @@
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
"packagemanager_aidl-cpp",
+ "server_configurable_flags",
],
static_libs: [
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index c93d033..b84dc27 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3581,7 +3581,7 @@
(const uint8_t *)buffer->data()
+ buffer->range_offset(),
buffer->range_length());
- } else if (mIsMPEG4) {
+ } else if (mIsMPEG4 || mIsAv1) {
err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
buffer->range_length());
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 080c3d0..54ca1ea 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,19 +19,18 @@
#define LOG_TAG "MediaCodec"
#include <utils/Log.h>
-#include <set>
-#include <random>
-#include <stdlib.h>
-
-#include <inttypes.h>
-#include <stdlib.h>
#include <dlfcn.h>
+#include <inttypes.h>
+#include <random>
+#include <set>
+#include <stdlib.h>
+#include <string>
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
+#include <android/api-level.h>
#include <android/binder_manager.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -83,6 +82,7 @@
#include <media/stagefright/SurfaceUtils.h>
#include <nativeloader/dlext_namespaces.h>
#include <private/android_filesystem_config.h>
+#include <server_configurable_flags/get_flags.h>
#include <utils/Singleton.h>
namespace android {
@@ -92,6 +92,8 @@
using aidl::android::media::IResourceManagerClient;
using aidl::android::media::IResourceManagerService;
using aidl::android::media::ClientInfoParcel;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
// key for media statistics
static const char *kCodecKeyName = "codec";
@@ -109,7 +111,9 @@
static const char *kCodecModeImage = "image";
static const char *kCodecModeUnknown = "unknown";
static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
+static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
+static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
@@ -155,6 +159,7 @@
static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
+static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
@@ -173,9 +178,9 @@
static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
-static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
-static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
+static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
// array/sync/async/block modes
static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
@@ -196,13 +201,72 @@
static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
-static const char *kCodecPlaybackDurationSec =
- "android.media.mediacodec.playback-duration-sec"; /* in sec */
/* -1: shaper disabled
>=0: number of fields changed */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
+// Render metrics
+static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
+static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
+static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
+static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
+static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
+static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
+static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
+static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
+// Freeze
+static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
+static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
+static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
+static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
+static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
+static const char *kCodecFreezeDurationMsHistogram =
+ "android.media.mediacodec.freeze-duration-ms-histogram";
+static const char *kCodecFreezeDurationMsHistogramBuckets =
+ "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
+static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
+static const char *kCodecFreezeDistanceMsHistogram =
+ "android.media.mediacodec.freeze-distance-ms-histogram";
+static const char *kCodecFreezeDistanceMsHistogramBuckets =
+ "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
+// Judder
+static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
+static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
+static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
+static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
+static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
+static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
+static const char *kCodecJudderScoreHistogramBuckets =
+ "android.media.mediacodec.judder-score-histogram-buckets";
+// Freeze event
+static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
+static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
+static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
+static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
+static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
+static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
+static const char *kFreezeEventDetailsDurationMs =
+ "android.media.mediacodec.freeze.details-duration-ms";
+static const char *kFreezeEventDetailsDistanceMs =
+ "android.media.mediacodec.freeze.details-distance-ms";
+// Judder event
+static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
+static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
+static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
+static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
+static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
+static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
+static const char *kJudderEventDetailsActualDurationUs =
+ "android.media.mediacodec.judder.details-actual-duration-us";
+static const char *kJudderEventDetailsContentDurationUs =
+ "android.media.mediacodec.judder.details-content-duration-us";
+static const char *kJudderEventDetailsDistanceMs =
+ "android.media.mediacodec.judder.details-distance-ms";
+
// XXX suppress until we get our representation right
static bool kEmitHistogram = false;
@@ -960,8 +1024,10 @@
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
- mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
- mIsSurfaceToScreen(false),
+ mIsSurfaceToDisplay(false),
+ mVideoRenderQualityTracker(
+ VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+ server_configurable_flags::GetServerConfigurableFlag)),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
@@ -1096,6 +1162,56 @@
mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
mReliabilityContextMetrics.resolutionChangeCount);
+ // Video rendering quality metrics
+ {
+ const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
+ if (m.frameReleasedCount > 0) {
+ mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
+ mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
+ mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
+ mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
+ }
+ if (m.freezeDurationMsHistogram.getCount() >= 1) {
+ const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
+ mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
+ mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
+ mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
+ mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
+ mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
+ mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
+ mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
+ h.emitBuckets());
+ }
+ if (m.freezeDistanceMsHistogram.getCount() >= 1) {
+ const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
+ mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
+ mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
+ mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
+ h.emitBuckets());
+ }
+ if (m.judderScoreHistogram.getCount() >= 1) {
+ const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
+ mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
+ mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
+ mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
+ mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
+ mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
+ mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
+ mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
+ h.emitBuckets());
+ }
+ if (m.freezeEventCount != 0) {
+ mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
+ }
+ if (m.judderEventCount != 0) {
+ mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
+ }
+ }
+
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1111,7 +1227,7 @@
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
- int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
+ int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
if (playbackDurationSec > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
}
@@ -1174,14 +1290,14 @@
&& ColorUtils::isHDRStaticInfoValid(&info)) {
mHdrInfoFlags |= kFlagHasHdrStaticInfo;
}
- mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo,
+ mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
(mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
sp<ABuffer> hdr10PlusInfo;
if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
&& hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
}
- mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo,
+ mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
(mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
// hdr format
@@ -1194,7 +1310,7 @@
&& codedFormat->findInt32(KEY_PROFILE, &profile)
&& colorTransfer != -1) {
hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
- mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+ mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
}
}
@@ -1302,16 +1418,15 @@
return;
}
- Histogram recentHist;
-
// build an empty histogram
+ MediaHistogram<int64_t> recentHist;
recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
// stuff it with the samples in the ring buffer
{
Mutex::Autolock al(mRecentLock);
- for (int i=0; i<kRecentLatencyFrames; i++) {
+ for (int i = 0; i < kRecentLatencyFrames; i++) {
if (mRecentSamples[i] != kRecentSampleInvalid) {
recentHist.insert(mRecentSamples[i]);
}
@@ -1319,7 +1434,7 @@
}
// spit the data (if any) into the supplied analytics record
- if (recentHist.getCount()!= 0 ) {
+ if (recentHist.getCount() != 0 ) {
mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
@@ -1333,6 +1448,53 @@
}
}
+static std::string emitVector(std::vector<int32_t> vector) {
+ std::ostringstream sstr;
+ for (size_t i = 0; i < vector.size(); ++i) {
+ if (i != 0) {
+ sstr << ',';
+ }
+ sstr << vector[i];
+ }
+ return sstr.str();
+}
+
+static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
+ if (e.valid) {
+ mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
+ mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
+ mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
+ mediametrics_setInt64(handle, kFreezeEventCount, e.count);
+ mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
+ mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
+ mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
+ emitVector(e.details.durationMs));
+ mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
+ emitVector(e.details.distanceMs));
+ mediametrics_selfRecord(handle);
+ mediametrics_delete(handle);
+ }
+}
+
+static void reportToMediaMetricsIfValid(const JudderEvent &e) {
+ if (e.valid) {
+ mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
+ mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
+ mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
+ mediametrics_setInt64(handle, kJudderEventCount, e.count);
+ mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
+ mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
+ mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
+ emitVector(e.details.actualRenderDurationUs));
+ mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
+ emitVector(e.details.contentRenderDurationUs));
+ mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
+ emitVector(e.details.distanceMs));
+ mediametrics_selfRecord(handle);
+ mediametrics_delete(handle);
+ }
+}
+
void MediaCodec::flushMediametrics() {
ALOGD("flushMediametrics");
@@ -1351,6 +1513,10 @@
}
// we no longer have anything pending upload
mMetricsToUpload = false;
+
+ // Freeze and judder events are reported separately
+ reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
+ reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
}
void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1436,116 +1602,43 @@
ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
}
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
+void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
int what = 0;
msg->findInt32("what", &what);
if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
static bool logged = false;
if (!logged) {
logged = true;
- ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
+ ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
}
return;
}
- // Playback duration only counts if the buffers are going to the screen.
- if (!mIsSurfaceToScreen) {
- return;
- }
- int64_t renderTimeNs;
- size_t index = 0;
- while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
- mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
- }
-}
-
-bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
-{
- if (nbuckets <= 0 || width <= 0) {
- return false;
- }
-
- // get histogram buckets
- if (nbuckets == mBucketCount && mBuckets != NULL) {
- // reuse our existing buffer
- memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
- } else {
- // get a new pre-zeroed buffer
- int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
- if (newbuckets == NULL) {
- goto bad;
+ // Rendered frames only matter if they're being sent to the display
+ if (mIsSurfaceToDisplay) {
+ int64_t renderTimeNs;
+ for (size_t index = 0;
+ msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
+ index++) {
+ // Capture metrics for playback duration
+ mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
+ // Capture metrics for quality
+ int64_t mediaTimeUs = 0;
+ if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
+ ALOGE("processRenderedFrames: no media time found");
+ continue;
+ }
+ // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
+ // rendered frame.
+ if (!mTunneled || mediaTimeUs != INT64_MAX) {
+ FreezeEvent freezeEvent;
+ JudderEvent judderEvent;
+ mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
+ &judderEvent);
+ reportToMediaMetricsIfValid(freezeEvent);
+ reportToMediaMetricsIfValid(judderEvent);
+ }
}
- if (mBuckets != NULL)
- free(mBuckets);
- mBuckets = newbuckets;
}
-
- mWidth = width;
- mFloor = floor;
- mCeiling = floor + nbuckets * width;
- mBucketCount = nbuckets;
-
- mMin = INT64_MAX;
- mMax = INT64_MIN;
- mSum = 0;
- mCount = 0;
- mBelow = mAbove = 0;
-
- return true;
-
- bad:
- if (mBuckets != NULL) {
- free(mBuckets);
- mBuckets = NULL;
- }
-
- return false;
-}
-
-void MediaCodec::Histogram::insert(int64_t sample)
-{
- // histogram is not set up
- if (mBuckets == NULL) {
- return;
- }
-
- mCount++;
- mSum += sample;
- if (mMin > sample) mMin = sample;
- if (mMax < sample) mMax = sample;
-
- if (sample < mFloor) {
- mBelow++;
- } else if (sample >= mCeiling) {
- mAbove++;
- } else {
- int64_t slot = (sample - mFloor) / mWidth;
- CHECK(slot < mBucketCount);
- mBuckets[slot]++;
- }
- return;
-}
-
-std::string MediaCodec::Histogram::emit()
-{
- std::string value;
- char buffer[64];
-
- // emits: width,Below{bucket0,bucket1,...., bucketN}above
- // unconfigured will emit: 0,0{}0
- // XXX: is this best representation?
- snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
- mFloor, mWidth, mBelow);
- value = buffer;
- for (int i = 0; i < mBucketCount; i++) {
- if (i != 0) {
- value = value + ",";
- }
- snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
- value = value + buffer;
- }
- snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
- value = value + buffer;
- return value;
}
// when we send a buffer to the codec;
@@ -1955,6 +2048,40 @@
return configure(format, nativeWindow, crypto, NULL, flags);
}
+bool MediaCodec::isResolutionSupported(const sp<AMessage>& format) {
+ int32_t width = -1;
+ int32_t height = -1;
+ int32_t maxWidth = -1;
+ int32_t maxHeight = -1;
+ format->findInt32("width", &width);
+ format->findInt32("height", &height);
+ format->findInt32("max-width", &maxWidth);
+ format->findInt32("max-height", &maxHeight);
+ AString mediaType;
+ if (!format->findString("mime", &mediaType)) {
+ ALOGI("Can not check mediaFormat: No MIME set.");
+ return true;
+ }
+ sp<MediaCodecInfo::Capabilities> caps = mCodecInfo->getCapabilitiesFor(mediaType.c_str());
+ if (caps == NULL) {
+ ALOGI("Can not get Capabilities for MIME %s.", mediaType.c_str());
+ return true;
+ }
+ if (width != -1 && height != -1) {
+ if (!caps->isResolutionSupported(width, height)) {
+ ALOGD("Frame resolution (%dx%d) is beyond codec capabilities", width, height);
+ return false;
+ }
+ }
+ if (maxWidth != -1 && maxHeight != -1) {
+ if (!caps->isResolutionSupported(maxWidth, maxHeight)) {
+ ALOGD("Max frame resolution (%dx%d) is beyond codec capabilities", maxWidth, maxHeight);
+ return false;
+ }
+ }
+ return true;
+}
+
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &surface,
@@ -2042,7 +2169,24 @@
mediametrics_delete(nextMetricsHandle);
return BAD_VALUE;
}
-
+ // For applications built with targetSdkVersion of Android U or later (or if MediaCodec's
+ // caller is not an app) we enforce codec resolution capabilities if such enforcement is
+ // required by 'enforce-xml-capabilities' attribute
+ if (android_get_application_target_sdk_version() >= __ANDROID_API_U__) {
+ if (mCodecInfo != nullptr &&
+ (mCodecInfo->getAttributes() &
+ MediaCodecInfo::kFlagIsEnforceXmlCapabilities)) {
+ if (!isResolutionSupported(format)) {
+ mErrorLog.log(LOG_TAG,
+ base::StringPrintf("The input resolution of %dx%d is not "
+ "supported for this codec; please query MediaCodecList "
+ "for the supported formats including the resolution. See "
+ "CodecCapabilities#isFormatSupported() and "
+ "VideoCapabilities#isSizeSupported()", mWidth, mHeight));
+ return BAD_VALUE;
+ }
+ }
+ }
} else {
if (nextMetricsHandle != 0) {
int32_t channelCount;
@@ -3622,8 +3766,7 @@
setState(UNINITIALIZED);
} else {
- setState(
- (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
+ setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
}
break;
}
@@ -3748,6 +3891,9 @@
mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
}
+ mediametrics_setInt32(mMetricsHandle, kCodecHardware,
+ MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
+
mResourceManagerProxy->addResource(MediaResource::CodecResource(
mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
@@ -3814,6 +3960,14 @@
if (interestingFormat->findInt32("level", &level)) {
mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
}
+ sp<AMessage> uncompressedFormat =
+ (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
+ int32_t componentColorFormat = -1;
+ if (uncompressedFormat->findInt32("android._color-format",
+ &componentColorFormat)) {
+ mediametrics_setInt32(mMetricsHandle,
+ kCodecComponentColorFormat, componentColorFormat);
+ }
updateHdrMetrics(true /* isConfig */);
int32_t codecMaxInputSize = -1;
if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
@@ -3964,7 +4118,7 @@
asString(previousState),
asString(TunnelPeekState::kBufferRendered));
}
- updatePlaybackDuration(msg);
+ processRenderedFrames(msg);
// check that we have a notification set
if (mOnFrameRenderedNotification != NULL) {
sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4158,6 +4312,11 @@
mState, stateString(mState).c_str());
break;
}
+
+ if (mIsSurfaceToDisplay) {
+ mVideoRenderQualityTracker.resetForDiscontinuity();
+ }
+
// Notify the RM that the codec has been stopped.
ClientConfigParcel clientConfig;
initClientConfigParcel(clientConfig);
@@ -4213,6 +4372,10 @@
break;
}
+ if (mIsSurfaceToDisplay) {
+ mVideoRenderQualityTracker.resetForDiscontinuity();
+ }
+
if (mFlags & kFlagIsAsync) {
setState(FLUSHED);
} else {
@@ -4464,6 +4627,7 @@
} else {
mTunneled = false;
}
+ mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
int32_t background = 0;
if (format->findInt32("android._background-mode", &background) && background) {
@@ -5815,6 +5979,10 @@
}
if (err == OK) {
+ if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
+ mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
+ }
+
// synchronization boundary for getBufferAndFormat
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = false;
@@ -5897,7 +6065,7 @@
}
if (render && buffer->size() != 0) {
- int64_t mediaTimeUs = -1;
+ int64_t mediaTimeUs = INT64_MIN;
buffer->meta()->findInt64("timeUs", &mediaTimeUs);
bool noRenderTime = false;
@@ -5927,7 +6095,12 @@
// If rendering to the screen, then schedule a time in the future to poll to see if this
// frame was ever rendered to seed onFrameRendered callbacks.
- if (mIsSurfaceToScreen) {
+ if (mIsSurfaceToDisplay) {
+ if (mediaTimeUs != INT64_MIN) {
+ noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
+ : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
+ renderTimeNs);
+ }
// can't initialize this in the constructor because the Looper parent class needs to be
// initialized first
if (mMsgPollForRenderedBuffers == nullptr) {
@@ -5957,6 +6130,12 @@
ALOGI("rendring output error %d", err);
}
} else {
+ if (mIsSurfaceToDisplay && buffer->size() != 0) {
+ int64_t mediaTimeUs = INT64_MIN;
+ if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
+ mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
+ }
+ }
mBufferChannel->discardBuffer(buffer);
}
@@ -6023,7 +6202,7 @@
// in case we don't connect, ensure that we don't signal the surface is
// connected to the screen
- mIsSurfaceToScreen = false;
+ mIsSurfaceToDisplay = false;
err = nativeWindowConnect(surface.get(), "connectToSurface");
if (err == OK) {
@@ -6053,7 +6232,7 @@
// keep track whether or not the buffers of the connected surface go to the screen
int result = 0;
surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
- mIsSurfaceToScreen = result != 0;
+ mIsSurfaceToDisplay = result != 0;
}
}
// do not return ALREADY_EXISTS unless surfaces are the same
@@ -6071,7 +6250,7 @@
}
// assume disconnected even on error
mSurface.clear();
- mIsSurfaceToScreen = false;
+ mIsSurfaceToDisplay = false;
}
return err;
}
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
new file mode 100644
index 0000000..4f12a37
--- /dev/null
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -0,0 +1,739 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "VideoRenderQualityTracker"
+#include <utils/Log.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+#include <assert.h>
+#include <charconv>
+#include <cmath>
+#include <stdio.h>
+#include <sys/time.h>
+
+#include <android-base/parsebool.h>
+#include <android-base/parseint.h>
+
+namespace android {
+
+using android::base::ParseBoolResult;
+
+static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
+static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
+ VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+
+typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
+ GetServerConfigurableFlagFn;
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+ char const *flagNameSuffix, bool *value) {
+ std::string flagName("render_metrics_");
+ flagName.append(flagNameSuffix);
+ std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName,
+ *value ? "true" : "false");
+ switch (android::base::ParseBool(valueStr)) {
+ case ParseBoolResult::kTrue: *value = true; break;
+ case ParseBoolResult::kFalse: *value = false; break;
+ case ParseBoolResult::kError:
+ ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+ valueStr.c_str());
+ break;
+ }
+}
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+ char const *flagNameSuffix, int32_t *value) {
+ char defaultStr[11];
+ sprintf(defaultStr, "%d", int(*value));
+ std::string flagName("render_metrics_");
+ flagName.append(flagNameSuffix);
+ std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, defaultStr);
+ if (!android::base::ParseInt(valueStr.c_str(), value) || valueStr.size() == 0) {
+ ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+ valueStr.c_str());
+ return;
+ }
+}
+
+template<typename T>
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+ char const *flagNameSuffix, std::vector<T> *value) {
+ std::stringstream sstr;
+ for (int i = 0; i < value->size(); ++i) {
+ if (i != 0) {
+ sstr << ",";
+ }
+ sstr << (*value)[i];
+ }
+ std::string flagName("render_metrics_");
+ flagName.append(flagNameSuffix);
+ std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, sstr.str());
+ if (valueStr.size() == 0) {
+ return;
+ }
+ // note: using android::base::Tokenize fails to catch parsing failures for values ending in ','
+ std::vector<T> newValues;
+ const char *p = valueStr.c_str();
+ const char *last = p + valueStr.size();
+ while (p != last) {
+ if (*p == ',') {
+ p++;
+ }
+ T value = -1;
+ auto [ptr, error] = std::from_chars(p, last, value);
+ if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+ ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+ valueStr.c_str());
+ return;
+ }
+ p = ptr;
+ newValues.push_back(value);
+ }
+ *value = std::move(newValues);
+}
+
+VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
+ clear();
+}
+
+void VideoRenderQualityMetrics::clear() {
+ firstRenderTimeUs = 0;
+ frameReleasedCount = 0;
+ frameRenderedCount = 0;
+ frameDroppedCount = 0;
+ frameSkippedCount = 0;
+ contentFrameRate = FRAME_RATE_UNDETERMINED;
+ desiredFrameRate = FRAME_RATE_UNDETERMINED;
+ actualFrameRate = FRAME_RATE_UNDETERMINED;
+ freezeEventCount = 0;
+ freezeDurationMsHistogram.clear();
+ freezeDistanceMsHistogram.clear();
+ judderEventCount = 0;
+ judderScoreHistogram.clear();
+}
+
+VideoRenderQualityTracker::Configuration
+ VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+ GetServerConfigurableFlagFn getServerConfigurableFlagFn) {
+ VideoRenderQualityTracker::Configuration c;
+#define getFlag(FIELDNAME, FLAGNAME) \
+ getServerConfigurableFlag(getServerConfigurableFlagFn, FLAGNAME, &c.FIELDNAME)
+ getFlag(enabled, "enabled");
+ getFlag(areSkippedFramesDropped, "are_skipped_frames_dropped");
+ getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
+ getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
+ getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
+ getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
+ getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
+ getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
+ getFlag(freezeEventMax, "freeze_event_max");
+ getFlag(freezeEventDetailsMax, "freeze_event_details_max");
+ getFlag(freezeEventDistanceToleranceMs, "freeze_event_distance_tolerance_ms");
+ getFlag(judderErrorToleranceUs, "judder_error_tolerance_us");
+ getFlag(judderScoreHistogramBuckets, "judder_score_histogram_buckets");
+ getFlag(judderScoreHistogramToScore, "judder_score_histogram_to_score");
+ getFlag(judderEventMax, "judder_event_max");
+ getFlag(judderEventDetailsMax, "judder_event_details_max");
+ getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
+#undef getFlag
+ return c;
+}
+
+VideoRenderQualityTracker::Configuration::Configuration() {
+ enabled = true;
+
+ // Assume that the app is skipping frames because it's detected that the frame couldn't be
+ // rendered in time.
+ areSkippedFramesDropped = true;
+
+ // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
+ maxExpectedContentFrameDurationUs = 400 * 1000;
+
+ // Allow for 2 milliseconds of deviation when detecting frame rates
+ frameRateDetectionToleranceUs = 2 * 1000;
+
+ // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
+ // because of frame drops for live content, or because the user is seeking.
+ liveContentFrameDropToleranceUs = 200 * 1000;
+
+ // Freeze configuration
+ freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
+ freezeDurationMsHistogramToScore = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
+ freezeDistanceMsHistogramBuckets = {0, 20, 100, 400, 1000, 2000, 3000, 4000, 8000, 15000, 30000,
+ 60000};
+ freezeEventMax = 0; // enabled only when debugging
+ freezeEventDetailsMax = 20;
+ freezeEventDistanceToleranceMs = 60000; // lump freeze occurrences together when 60s or less
+
+ // Judder configuration
+ judderErrorToleranceUs = 2000;
+ judderScoreHistogramBuckets = {1, 4, 5, 9, 11, 20, 30, 40, 50, 60, 70, 80};
+ judderScoreHistogramToScore = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
+ judderEventMax = 0; // enabled only when debugging
+ judderEventDetailsMax = 20;
+ judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
+ configureHistograms(mMetrics, mConfiguration);
+ clear();
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
+ mConfiguration(configuration) {
+ configureHistograms(mMetrics, mConfiguration);
+ clear();
+}
+
+void VideoRenderQualityTracker::onTunnelFrameQueued(int64_t contentTimeUs) {
+ if (!mConfiguration.enabled) {
+ return;
+ }
+
+ // Since P-frames are queued out of order, hold onto the P-frame until we can track it in
+ // render order. This only works because it depends on today's encoding algorithms that only
+ // allow B-frames to refer to ONE P-frame that comes after it. If the cardinality of P-frames
+ // in a single mini-GOP is increased, this algorithm breaks down.
+ if (mTunnelFrameQueuedContentTimeUs == -1) {
+ mTunnelFrameQueuedContentTimeUs = contentTimeUs;
+ } else if (contentTimeUs < mTunnelFrameQueuedContentTimeUs) {
+ onFrameReleased(contentTimeUs, 0);
+ } else {
+ onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
+ mTunnelFrameQueuedContentTimeUs = contentTimeUs;
+ }
+}
+
+void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
+ if (!mConfiguration.enabled) {
+ return;
+ }
+
+ // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
+ // app might be seeking to a starting point that isn't the first key frame.
+ if (mLastRenderTimeUs == -1) {
+ return;
+ }
+
+ resetIfDiscontinuity(contentTimeUs, -1);
+
+ // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
+ // app could be terminating the playback. The pending count will be added to the metrics if and
+ // when the next frame is rendered.
+ mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
+ onFrameReleased(contentTimeUs, nowUs() * 1000);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
+ int64_t desiredRenderTimeNs) {
+ if (!mConfiguration.enabled) {
+ return;
+ }
+
+ int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
+ resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
+ mMetrics.frameReleasedCount++;
+ mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
+ mLastContentTimeUs = contentTimeUs;
+}
+
+void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+ FreezeEvent *freezeEventOut,
+ JudderEvent *judderEventOut) {
+ if (!mConfiguration.enabled) {
+ return;
+ }
+
+ int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
+
+ if (mLastRenderTimeUs != -1) {
+ mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
+ }
+ // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
+ // frames since the app is not skipping them to terminate playback.
+ for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
+ processMetricsForSkippedFrame(contentTimeUs);
+ }
+ mPendingSkippedFrameContentTimeUsList = {};
+
+ // We can render a pending queued frame if it's the last frame of the video, so release it
+ // immediately.
+ if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
+ onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
+ mTunnelFrameQueuedContentTimeUs = -1;
+ }
+
+ static const FrameInfo noFrame = {-1, -1};
+ FrameInfo nextExpectedFrame = noFrame;
+ while (!mNextExpectedRenderedFrameQueue.empty()) {
+ nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
+ mNextExpectedRenderedFrameQueue.pop();
+ // Happy path - the rendered frame is what we expected it to be
+ if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ // This isn't really supposed to happen - the next rendered frame should be the expected
+ // frame, or, if there's frame drops, it will be a frame later in the content stream
+ if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
+ ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
+ (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
+ break;
+ }
+ processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
+ nextExpectedFrame.desiredRenderTimeUs);
+ }
+ processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
+ nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
+ freezeEventOut, judderEventOut);
+ mLastRenderTimeUs = actualRenderTimeUs;
+}
+
+VideoRenderQualityTracker::FreezeEvent VideoRenderQualityTracker::getAndResetFreezeEvent() {
+ FreezeEvent event = std::move(mFreezeEvent);
+ mFreezeEvent.valid = false;
+ return event;
+}
+
+VideoRenderQualityTracker::JudderEvent VideoRenderQualityTracker::getAndResetJudderEvent() {
+ JudderEvent event = std::move(mJudderEvent);
+ mJudderEvent.valid = false;
+ return event;
+}
+
+const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() {
+ if (!mConfiguration.enabled) {
+ return mMetrics;
+ }
+
+ mMetrics.freezeScore = 0;
+ if (mConfiguration.freezeDurationMsHistogramToScore.size() ==
+ mMetrics.freezeDurationMsHistogram.size()) {
+ for (int i = 0; i < mMetrics.freezeDurationMsHistogram.size(); ++i) {
+ mMetrics.freezeScore += mMetrics.freezeDurationMsHistogram[i] *
+ mConfiguration.freezeDurationMsHistogramToScore[i];
+ }
+ }
+ mMetrics.freezeRate = float(double(mMetrics.freezeDurationMsHistogram.getSum()) /
+ mRenderDurationMs);
+
+ mMetrics.judderScore = 0;
+ if (mConfiguration.judderScoreHistogramToScore.size() == mMetrics.judderScoreHistogram.size()) {
+ for (int i = 0; i < mMetrics.judderScoreHistogram.size(); ++i) {
+ mMetrics.judderScore += mMetrics.judderScoreHistogram[i] *
+ mConfiguration.judderScoreHistogramToScore[i];
+ }
+ }
+ mMetrics.judderRate = float(double(mMetrics.judderScoreHistogram.getCount()) /
+ (mMetrics.frameReleasedCount + mMetrics.frameSkippedCount));
+
+ return mMetrics;
+}
+
+void VideoRenderQualityTracker::clear() {
+ mRenderDurationMs = 0;
+ mMetrics.clear();
+ resetForDiscontinuity();
+}
+
+void VideoRenderQualityTracker::resetForDiscontinuity() {
+ mLastContentTimeUs = -1;
+ mLastRenderTimeUs = -1;
+ mLastFreezeEndTimeUs = -1;
+ mLastJudderEndTimeUs = -1;
+ mWasPreviousFrameDropped = false;
+ mFreezeEvent.valid = false;
+ mJudderEvent.valid = false;
+
+ // Don't worry about tracking frame rendering times from now up until playback catches up to the
+ // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
+ // to the user is is minimal, so better to just keep things simple and don't bother.
+ mNextExpectedRenderedFrameQueue = {};
+ mTunnelFrameQueuedContentTimeUs = -1;
+
+ // Ignore any frames that were skipped just prior to the discontinuity.
+ mPendingSkippedFrameContentTimeUsList = {};
+
+ // All frame durations can be now ignored since all bets are off now on what the render
+ // durations should be after the discontinuity.
+ for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
+ mActualFrameDurationUs[i] = -1;
+ mDesiredFrameDurationUs[i] = -1;
+ mContentFrameDurationUs[i] = -1;
+ }
+ mActualFrameDurationUs.priorTimestampUs = -1;
+ mDesiredFrameDurationUs.priorTimestampUs = -1;
+ mContentFrameDurationUs.priorTimestampUs = -1;
+}
+
+bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
+ int64_t desiredRenderTimeUs) {
+ if (mLastContentTimeUs == -1) {
+ resetForDiscontinuity();
+ return true;
+ }
+ if (contentTimeUs < mLastContentTimeUs) {
+ ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
+ int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+ int(contentTimeUs / 1000));
+ resetForDiscontinuity();
+ return true;
+ }
+ if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
+ // The content frame duration could be long due to frame drops for live content. This can be
+ // detected by looking at the app's desired rendering duration. If the app's rendered frame
+ // duration is roughly the same as the content's frame duration, then it is assumed that
+ // the forward discontinuity is due to frame drops for live content. A false positive can
+ // occur if the time the user spends seeking is equal to the duration of the seek. This is
+ // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
+ // distracted, and then returns to seeking forward.
+ bool skippedForwardDueToLiveContentFrameDrops = false;
+ if (desiredRenderTimeUs != -1) {
+ int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
+ int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
+ skippedForwardDueToLiveContentFrameDrops =
+ abs(contentFrameDurationUs - desiredFrameDurationUs) <
+ mConfiguration.liveContentFrameDropToleranceUs;
+ }
+ if (!skippedForwardDueToLiveContentFrameDrops) {
+ ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
+ int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+ int(contentTimeUs / 1000));
+ resetForDiscontinuity();
+ return true;
+ }
+ }
+ return false;
+}
+
+void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
+ mMetrics.frameSkippedCount++;
+ if (mConfiguration.areSkippedFramesDropped) {
+ processMetricsForDroppedFrame(contentTimeUs, -1);
+ return;
+ }
+ updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+ updateFrameDurations(mDesiredFrameDurationUs, -1);
+ updateFrameDurations(mActualFrameDurationUs, -1);
+ updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+ mWasPreviousFrameDropped = false;
+}
+
+void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
+ int64_t desiredRenderTimeUs) {
+ mMetrics.frameDroppedCount++;
+ updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+ updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+ updateFrameDurations(mActualFrameDurationUs, -1);
+ updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+ updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+ mWasPreviousFrameDropped = true;
+}
+
+void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
+ int64_t desiredRenderTimeUs,
+ int64_t actualRenderTimeUs,
+ FreezeEvent *freezeEventOut,
+ JudderEvent *judderEventOut) {
+ // Capture the timestamp at which the first frame was rendered
+ if (mMetrics.firstRenderTimeUs == 0) {
+ mMetrics.firstRenderTimeUs = actualRenderTimeUs;
+ }
+
+ mMetrics.frameRenderedCount++;
+
+ // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
+ // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
+ // anything other than count it as a rendered frame.
+ if (contentTimeUs == -1) {
+ return;
+ }
+ updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+ updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+ updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
+ updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+ updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+ updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
+
+ // If the previous frame was dropped, there was a freeze if we've already rendered a frame
+ if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
+ processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
+ mMetrics, mConfiguration);
+ mLastFreezeEndTimeUs = actualRenderTimeUs;
+ }
+ maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
+ mConfiguration, freezeEventOut);
+
+ // Judder is computed on the prior video frame, not the current video frame
+ int64_t judderScore = computePreviousJudderScore(mActualFrameDurationUs,
+ mContentFrameDurationUs,
+ mConfiguration);
+ if (judderScore != 0) {
+ int64_t judderTimeUs = actualRenderTimeUs - mActualFrameDurationUs[0] -
+ mActualFrameDurationUs[1];
+ processJudder(judderScore, judderTimeUs, mLastJudderEndTimeUs, mActualFrameDurationUs,
+ mContentFrameDurationUs, mJudderEvent, mMetrics, mConfiguration);
+ mLastJudderEndTimeUs = judderTimeUs + mActualFrameDurationUs[1];
+ }
+ maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
+ mConfiguration, judderEventOut);
+
+ mWasPreviousFrameDropped = false;
+}
+
+void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+ int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+ VideoRenderQualityMetrics &m,
+ const Configuration &c) {
+ int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
+ m.freezeDurationMsHistogram.insert(durationMs);
+ int32_t distanceMs = -1;
+ if (lastFreezeEndTimeUs != -1) {
+ // The distance to the last freeze is measured from the end of the last freze to the start
+ // of this freeze.
+ distanceMs = int32_t((lastRenderTimeUs - lastFreezeEndTimeUs) / 1000);
+ m.freezeDistanceMsHistogram.insert(distanceMs);
+ }
+ if (c.freezeEventMax > 0) {
+ if (e.valid == false) {
+ m.freezeEventCount++;
+ e.valid = true;
+ e.initialTimeUs = lastRenderTimeUs;
+ e.durationMs = 0;
+ e.sumDurationMs = 0;
+ e.sumDistanceMs = 0;
+ e.count = 0;
+ e.details.durationMs.clear();
+ e.details.distanceMs.clear();
+ // The first occurrence in the event should not have the distance recorded as part of the
+ // event, because it belongs in a vacuum between two events. However we still want the
+ // distance recorded in the details to calculate times in all details in all events.
+ } else if (distanceMs != -1) {
+ e.durationMs += distanceMs;
+ e.sumDistanceMs += distanceMs;
+ }
+ e.durationMs += durationMs;
+ e.count++;
+ e.sumDurationMs += durationMs;
+ if (e.details.durationMs.size() < c.freezeEventDetailsMax) {
+ e.details.durationMs.push_back(durationMs);
+ e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+ }
+ }
+}
+
+void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
+ int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+ const VideoRenderQualityMetrics & m,
+ const Configuration &c,
+ FreezeEvent *freezeEventOut) {
+ if (lastFreezeEndTimeUs == -1 || !e.valid) {
+ return;
+ }
+ // Future freeze occurrences are still pulled into the current freeze event if under tolerance
+ int64_t distanceMs = (actualRenderTimeUs - lastFreezeEndTimeUs) / 1000;
+ if (distanceMs < c.freezeEventDistanceToleranceMs) {
+ return;
+ }
+ if (freezeEventOut != nullptr && m.freezeEventCount <= c.freezeEventMax) {
+ *freezeEventOut = std::move(e);
+ }
+ // start recording a new freeze event after pushing the current one back to the caller
+ e.valid = false;
+}
+
+int64_t VideoRenderQualityTracker::computePreviousJudderScore(
+ const FrameDurationUs &actualFrameDurationUs,
+ const FrameDurationUs &contentFrameDurationUs,
+ const Configuration &c) {
+ // If the frame before or after was dropped, then don't generate a judder score, since any
+ // problems with frame drops are scored as a freeze instead.
+ if (actualFrameDurationUs[0] == -1 || actualFrameDurationUs[1] == -1 ||
+ actualFrameDurationUs[2] == -1) {
+ return 0;
+ }
+
+ // Don't score judder for when playback is paused or rebuffering (long frame duration), or if
+ // the player is intentionally playing each frame at a slow rate (e.g. half-rate). If the long
+ // frame duration was unintentional, it is assumed that this will be coupled with a later frame
+ // drop, and be scored as a freeze instead of judder.
+ if (actualFrameDurationUs[1] >= 2 * contentFrameDurationUs[1]) {
+ return 0;
+ }
+
+ // The judder score is based on the error of this frame
+ int64_t errorUs = actualFrameDurationUs[1] - contentFrameDurationUs[1];
+ // Don't score judder if the previous frame has high error, but this frame has low error
+ if (abs(errorUs) < c.judderErrorToleranceUs) {
+ return 0;
+ }
+
+ // Add a penalty if this frame has judder that amplifies the problem introduced by previous
+ // judder, instead of catching up for the previous judder (50, 16, 16, 50) vs (50, 16, 50, 16)
+ int64_t previousErrorUs = actualFrameDurationUs[2] - contentFrameDurationUs[2];
+ // Don't add the pentalty for errors from the previous frame if the previous frame has low error
+ if (abs(previousErrorUs) >= c.judderErrorToleranceUs) {
+ errorUs = abs(errorUs) + abs(errorUs + previousErrorUs);
+ }
+
+ // Avoid scoring judder for 3:2 pulldown or other minimally-small frame duration errors
+ if (abs(errorUs) < contentFrameDurationUs[1] / 4) {
+ return 0;
+ }
+
+ return abs(errorUs) / 1000; // error in millis to keep numbers small
+}
+
+void VideoRenderQualityTracker::processJudder(int32_t judderScore, int64_t judderTimeUs,
+ int64_t lastJudderEndTime,
+ const FrameDurationUs &actualDurationUs,
+ const FrameDurationUs &contentDurationUs,
+ JudderEvent &e, VideoRenderQualityMetrics &m,
+ const Configuration &c) {
+ int32_t distanceMs = -1;
+ if (lastJudderEndTime != -1) {
+ distanceMs = int32_t((judderTimeUs - lastJudderEndTime) / 1000);
+ }
+ m.judderScoreHistogram.insert(judderScore);
+ if (c.judderEventMax > 0) {
+ if (!e.valid) {
+ m.judderEventCount++;
+ e.valid = true;
+ e.initialTimeUs = judderTimeUs;
+ e.durationMs = 0;
+ e.sumScore = 0;
+ e.sumDistanceMs = 0;
+ e.count = 0;
+ e.details.contentRenderDurationUs.clear();
+ e.details.actualRenderDurationUs.clear();
+ e.details.distanceMs.clear();
+ // The first occurrence in the event should not have the distance recorded as part of the
+ // event, because it belongs in a vacuum between two events. However we still want the
+ // distance recorded in the details to calculate the times using all details in all events.
+ } else if (distanceMs != -1) {
+ e.durationMs += distanceMs;
+ e.sumDistanceMs += distanceMs;
+ }
+ e.durationMs += actualDurationUs[1] / 1000;
+ e.count++;
+ e.sumScore += judderScore;
+ if (e.details.contentRenderDurationUs.size() < c.judderEventDetailsMax) {
+ e.details.actualRenderDurationUs.push_back(actualDurationUs[1]);
+ e.details.contentRenderDurationUs.push_back(contentDurationUs[1]);
+ e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+ }
+ }
+}
+
+void VideoRenderQualityTracker::maybeCaptureJudderEvent(int64_t actualRenderTimeUs,
+ int64_t lastJudderEndTimeUs, JudderEvent &e,
+ const VideoRenderQualityMetrics &m,
+ const Configuration &c,
+ JudderEvent *judderEventOut) {
+ if (lastJudderEndTimeUs == -1 || !e.valid) {
+ return;
+ }
+ // Future judder occurrences are still pulled into the current judder event if under tolerance
+ int64_t distanceMs = (actualRenderTimeUs - lastJudderEndTimeUs) / 1000;
+ if (distanceMs < c.judderEventDistanceToleranceMs) {
+ return;
+ }
+ if (judderEventOut != nullptr && m.judderEventCount <= c.judderEventMax) {
+ *judderEventOut = std::move(e);
+ }
+ // start recording a new judder event after pushing the current one back to the caller
+ e.valid = false;
+}
+
+void VideoRenderQualityTracker::configureHistograms(VideoRenderQualityMetrics &m,
+ const Configuration &c) {
+ m.freezeDurationMsHistogram.setup(c.freezeDurationMsHistogramBuckets);
+ m.freezeDistanceMsHistogram.setup(c.freezeDistanceMsHistogramBuckets);
+ m.judderScoreHistogram.setup(c.judderScoreHistogramBuckets);
+}
+
+int64_t VideoRenderQualityTracker::nowUs() {
+ struct timespec t;
+ t.tv_sec = t.tv_nsec = 0;
+ clock_gettime(CLOCK_MONOTONIC, &t);
+ return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
+}
+
+void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
+ int64_t newTimestampUs) {
+ for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
+ durationUs[i] = durationUs[i - 1];
+ }
+ if (newTimestampUs == -1) {
+ durationUs[0] = -1;
+ } else {
+ durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
+ newTimestampUs - durationUs.priorTimestampUs;
+ durationUs.priorTimestampUs = newTimestampUs;
+ }
+}
+
+void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+ const Configuration &c) {
+ float newFrameRate = detectFrameRate(durationUs, c);
+ if (newFrameRate != FRAME_RATE_UNDETERMINED) {
+ frameRate = newFrameRate;
+ }
+}
+
+float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
+ const Configuration &c) {
+ // At least 3 frames are necessary to detect stable frame rates
+ assert(FrameDurationUs::SIZE >= 3);
+ if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
+ return FRAME_RATE_UNDETERMINED;
+ }
+ // Only determine frame rate if the render durations are stable across 3 frames
+ if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
+ abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
+ return is32pulldown(durationUs, c) ? FRAME_RATE_24_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
+ }
+ return 1000.0 * 1000.0 / durationUs[0];
+}
+
+bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
+ const Configuration &c) {
+ // At least 5 frames are necessary to detect stable 3:2 pulldown
+ assert(FrameDurationUs::SIZE >= 5);
+ if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
+ durationUs[4] == -1) {
+ return false;
+ }
+ // 3:2 pulldown expects that every other frame has identical duration...
+ if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
+ abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
+ abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
+ return false;
+ }
+ // ... for either 2 vsysncs or 3 vsyncs
+ if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
+ abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
+ (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
+ abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
+ return true;
+ }
+ return false;
+}
+
+} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 3d4b6f8..144ea53 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -30,6 +30,9 @@
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/CodecErrorLog.h>
#include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/MediaHistogram.h>
+#include <media/stagefright/PlaybackDurationAccumulator.h>
+#include <media/stagefright/VideoRenderQualityTracker.h>
#include <utils/Vector.h>
class C2Buffer;
@@ -63,7 +66,6 @@
struct PersistentSurface;
class SoftwareRenderer;
class Surface;
-class PlaybackDurationAccumulator;
namespace hardware {
namespace cas {
namespace native {
@@ -459,7 +461,8 @@
void onGetMetrics(const sp<AMessage>& msg);
constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
void updateTunnelPeek(const sp<AMessage> &msg);
- void updatePlaybackDuration(const sp<AMessage> &msg);
+ void processRenderedFrames(const sp<AMessage> &msg);
+ bool isResolutionSupported(const sp<AMessage> &format);
inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
@@ -569,8 +572,9 @@
sp<CryptoAsync> mCryptoAsync;
sp<ALooper> mCryptoLooper;
- std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
- bool mIsSurfaceToScreen;
+ bool mIsSurfaceToDisplay;
+ PlaybackDurationAccumulator mPlaybackDurationAccumulator;
+ VideoRenderQualityTracker mVideoRenderQualityTracker;
MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
@@ -712,31 +716,8 @@
int mRecentHead;
Mutex mRecentLock;
- class Histogram {
- public:
- Histogram() : mFloor(0), mWidth(0), mBelow(0), mAbove(0),
- mMin(INT64_MAX), mMax(INT64_MIN), mSum(0), mCount(0),
- mBucketCount(0), mBuckets(NULL) {};
- ~Histogram() { clear(); };
- void clear() { if (mBuckets != NULL) free(mBuckets); mBuckets = NULL; };
- bool setup(int nbuckets, int64_t width, int64_t floor = 0);
- void insert(int64_t sample);
- int64_t getMin() const { return mMin; }
- int64_t getMax() const { return mMax; }
- int64_t getCount() const { return mCount; }
- int64_t getSum() const { return mSum; }
- int64_t getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
- std::string emit();
- private:
- int64_t mFloor, mCeiling, mWidth;
- int64_t mBelow, mAbove;
- int64_t mMin, mMax, mSum, mCount;
+ MediaHistogram<int64_t> mLatencyHist;
- int mBucketCount;
- int64_t *mBuckets;
- };
-
- Histogram mLatencyHist;
// An unique ID for the codec - Used by the metrics.
uint64_t mCodecId = 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaHistogram.h b/media/libstagefright/include/media/stagefright/MediaHistogram.h
new file mode 100644
index 0000000..50fa258
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaHistogram.h
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_HISTOGRAM_H_
+#define MEDIA_HISTOGRAM_H_
+
+#include <limits>
+#include <sstream>
+#include <string>
+#include <vector>
+
+namespace android {
+
+template<typename T>
+class MediaHistogram {
+public:
+ MediaHistogram();
+ void clear();
+ bool setup(int bucketCount, T width, T floor = 0);
+ bool setup(const std::vector<T> &bucketLimits);
+ void insert(T sample);
+ size_t size();
+ int64_t operator[](int);
+ T getMin() const { return mMin; }
+ T getMax() const { return mMax; }
+ T getCount() const { return mCount; }
+ T getSum() const { return mSum; }
+ T getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
+ T getPercentile(int) const;
+ std::string emit() const;
+ std::string emitBuckets() const;
+private:
+ MediaHistogram(const MediaHistogram &); // disallow
+
+ bool allocate(int bucketCount, bool withBucketLimits);
+
+ T mFloor, mCeiling, mWidth;
+ T mMin, mMax, mSum;
+ int64_t mBelow, mAbove, mCount;
+ std::vector<T> mBuckets;
+ std::vector<T> mBucketLimits;
+};
+
+template<typename T>
+MediaHistogram<T>::MediaHistogram() {
+ mWidth = mCeiling = mFloor = -1;
+ clear();
+}
+
+template<typename T>
+void MediaHistogram<T>::clear() {
+ for (int i = 0; i < mBuckets.size(); ++i) {
+ mBuckets[i] = 0;
+ }
+ mMin = std::numeric_limits<T>::max();
+ mMax = std::numeric_limits<T>::min();
+ mSum = 0;
+ mCount = 0;
+ mBelow = mAbove = 0;
+}
+
+template<typename T>
+bool MediaHistogram<T>::setup(int bucketCount, T width, T floor) {
+ if (bucketCount <= 0 || width <= 0) {
+ return false;
+ }
+ if (!allocate(bucketCount, false)) {
+ return false;
+ }
+ mWidth = width;
+ mFloor = floor;
+ mCeiling = floor + bucketCount * width;
+ clear();
+ return true;
+}
+
+template<typename T>
+bool MediaHistogram<T>::setup(const std::vector<T> &bucketLimits) {
+ if (bucketLimits.size() <= 1) {
+ return false;
+ }
+ int bucketCount = bucketLimits.size() - 1;
+ if (!allocate(bucketCount, true)) {
+ return false;
+ }
+
+ mWidth = -1;
+ mFloor = bucketLimits[0];
+ for (int i = 0; i < bucketCount; ++i) {
+ mBucketLimits[i] = bucketLimits[i + 1];
+ }
+ mCeiling = bucketLimits[bucketCount];
+ clear();
+ return true;
+}
+
+template<typename T>
+bool MediaHistogram<T>::allocate(int bucketCount, bool withBucketLimits) {
+ assert(bucketCount > 0);
+ if (bucketCount != mBuckets.size()) {
+ mBuckets = std::vector<T>(bucketCount, 0);
+ }
+ if (withBucketLimits && mBucketLimits.size() != bucketCount) {
+ mBucketLimits = std::vector<T>(bucketCount, 0);
+ }
+ return true;
+}
+
+template<typename T>
+void MediaHistogram<T>::insert(T sample) {
+ // histogram is not set up
+ if (mBuckets.size() == 0) {
+ return;
+ }
+
+ mCount++;
+ mSum += sample;
+ if (mMin > sample) mMin = sample;
+ if (mMax < sample) mMax = sample;
+
+ if (sample < mFloor) {
+ mBelow++;
+ } else if (sample >= mCeiling) {
+ mAbove++;
+ } else if (mWidth == -1) {
+ // A binary search might be more efficient for large number of buckets, but it is expected
+ // that there will never be a large amount of buckets, so keep the code simple.
+ for (int slot = 0; slot < mBucketLimits.size(); ++slot) {
+ if (sample < mBucketLimits[slot]) {
+ mBuckets[slot]++;
+ break;
+ }
+ }
+ } else {
+ int64_t slot = (sample - mFloor) / mWidth;
+ assert(slot < mBuckets.size());
+ mBuckets[slot]++;
+ }
+ return;
+}
+
+template<typename T>
+size_t MediaHistogram<T>::size() {
+ return mBuckets.size() + 1;
+}
+
+template<typename T>
+int64_t MediaHistogram<T>::operator[](int i) {
+ assert(i >= 0);
+ assert(i <= mBuckets.size());
+ if (i == mBuckets.size()) {
+ return mAbove;
+ }
+ return mBuckets[i];
+}
+
+template<typename T>
+std::string MediaHistogram<T>::emit() const {
+ // emits: floor,width,below{bucket0,bucket1,...., bucketN}above
+ // or.. emits: below{bucket0,bucket1,...., bucketN}above
+ // unconfigured will emit: 0{}0
+ // XXX: is this best representation?
+ std::stringstream ss("");
+ if (mWidth == -1) {
+ ss << mBelow << "{";
+ } else {
+ ss << mFloor << "," << mWidth << "," << mBelow << "{";
+ }
+ for (int i = 0; i < mBuckets.size(); i++) {
+ if (i != 0) {
+ ss << ",";
+ }
+ ss << mBuckets[i];
+ }
+ ss << "}" << mAbove;
+ return ss.str();
+}
+
+template<typename T>
+std::string MediaHistogram<T>::emitBuckets() const {
+ std::stringstream ss("");
+ if (mWidth == -1) {
+ ss << mFloor;
+ for (int i = 0; i < mBucketLimits.size(); ++i) {
+ ss << ',' << mBucketLimits[i];
+ }
+ } else {
+ ss << mFloor;
+ for (int i = 1; i <= mBuckets.size(); ++i) {
+ ss << ',' << (mFloor + i * mWidth);
+ }
+ }
+ return ss.str();
+}
+
+} // android
+
+#endif // MEDIA_HISTOGRAM_H_
\ No newline at end of file
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
similarity index 95%
rename from media/libstagefright/PlaybackDurationAccumulator.h
rename to media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
index cb5f0c4..bdf1171 100644
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
@@ -33,7 +33,7 @@
}
// Process a render time expressed in nanoseconds.
- void processRenderTime(int64_t newRenderTimeNs) {
+ void onFrameRendered(int64_t newRenderTimeNs) {
// If we detect wrap-around or out of order frames, just ignore the duration for this
// and the next frame.
if (newRenderTimeNs < mPreviousRenderTimeNs) {
@@ -59,7 +59,7 @@
int64_t mPreviousRenderTimeNs;
};
-}
+} // android
-#endif
+#endif // PLAYBACK_DURATION_ACCUMULATOR_H_
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
new file mode 100644
index 0000000..82ba81c
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -0,0 +1,449 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#define VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#include <assert.h>
+#include <list>
+#include <queue>
+
+#include <media/stagefright/MediaHistogram.h>
+
+namespace android {
+
+// A variety of video rendering quality metrics.
+struct VideoRenderQualityMetrics {
+ static constexpr float FRAME_RATE_UNDETERMINED = -1.0f;
+ static constexpr float FRAME_RATE_24_3_2_PULLDOWN = -2.0f;
+
+ VideoRenderQualityMetrics();
+
+ void clear();
+
+ // The render time of the first video frame.
+ int64_t firstRenderTimeUs;
+
+ // The number of frames released to be rendered.
+ int64_t frameReleasedCount;
+
+ // The number of frames actually rendered.
+ int64_t frameRenderedCount;
+
+ // The number of frames dropped - frames that were released but never rendered.
+ int64_t frameDroppedCount;
+
+ // The number of frames that were intentionally dropped/skipped by the app.
+ int64_t frameSkippedCount;
+
+ // The frame rate as detected by looking at the position timestamp from the content stream.
+ float contentFrameRate;
+
+ // The frame rate as detected by looking at the desired render time passed in by the app.
+ float desiredFrameRate;
+
+ // The frame rate as detected by looking at the actual render time, as returned by the system
+ // post-render.
+ float actualFrameRate;
+
+ // A histogram of the durations of freezes due to dropped/skipped frames.
+ MediaHistogram<int32_t> freezeDurationMsHistogram;
+ // The computed overall freeze score using the above histogram and score conversion table. The
+ // score is based on counts in the histogram bucket, multiplied by the value in the score
+ // conversion table for that bucket. For example, the impact of a short freeze may be minimal,
+ // but the impact of long freeze may be disproportionally worse. Therefore, the score
+ // multipliers for each bucket might increase exponentially instead of linearly. A score
+ // multiplier of zero would reflect that small freeze durations have near-zero impact to the
+ // user experience.
+ int32_t freezeScore;
+ // The computed percentage of total playback duration that was frozen.
+ float freezeRate;
+ // The number of freeze events.
+ int32_t freezeEventCount;
+
+ // A histogram of the durations between each freeze.
+ MediaHistogram<int32_t> freezeDistanceMsHistogram;
+
+ // A histogram of the judder scores - based on the error tolerance between actual render
+ // duration of each frame and the ideal render duration.
+ MediaHistogram<int32_t> judderScoreHistogram;
+ // The computed overall judder score using the above histogram and score conversion table. The
+ // score is based on counts in the histogram bucket, multiplied by the value in the score
+ // conversion table for that bucket. For example, the impact of minimal judder may be small,
+ // but the impact of large judder may be disproportionally worse. Therefore, the score
+ // multipliers for each bucket might increase exponentially instead of linearly. A score
+ // multiplier of zero would reflect that small judder errors have near-zero impact to the user
+ // experience.
+ int32_t judderScore;
+ // The computed percentage of total frames that had judder.
+ float judderRate;
+ // The number of judder events.
+ int32_t judderEventCount;
+};
+
+///////////////////////////////////////////////////////
+// This class analyzes various timestamps related to video rendering to compute a set of metrics
+// that attempt to capture the quality of the user experience during video playback.
+//
+// The following timestamps (in microseconds) are analyzed to compute these metrics:
+// * The content timestamp found in the content stream, indicating the position of each video
+// frame.
+// * The desired timestamp passed in by the app, indicating at what point in time in the future
+// the app would like the frame to be rendered.
+// * The actual timestamp passed in by the display subsystem, indicating the point in time at
+// which the frame was actually rendered.
+//
+// Core to the algorithms are deriving frame durations based on these timestamps and determining
+// the result of each video frame in the content stream:
+// * skipped: the app didn't want to render the frame
+// * dropped: the display subsystem could not render the frame in time
+// * rendered: the display subsystem rendered the frame
+//
+class VideoRenderQualityTracker {
+public:
+ // Configurable elements of the metrics algorithms
+ class Configuration {
+ public:
+ // system/server_configurable_flags/libflags/include/get_flags.h:GetServerConfigurableFlag
+ typedef std::string (*GetServerConfigurableFlagFn)(
+ const std::string& experiment_category_name,
+ const std::string& experiment_flag_name,
+ const std::string& default_value);
+
+ static Configuration getFromServerConfigurableFlags(
+ GetServerConfigurableFlagFn getServerConfigurableFlagFn);
+
+ Configuration();
+
+ // Whether or not frame render quality is tracked.
+ bool enabled;
+
+ // Whether or not frames that are intentionally not rendered by the app should be considered
+ // as dropped.
+ bool areSkippedFramesDropped;
+
+ // How large of a jump forward in content time is allowed before it is considered a
+ // discontinuity (seek/playlist) and various internal states are reset.
+ int32_t maxExpectedContentFrameDurationUs;
+
+ // How much tolerance in frame duration when considering whether or not two frames have the
+ // same frame rate.
+ int32_t frameRateDetectionToleranceUs;
+
+ // A skip forward in content time could occur during frame drops of live content. Therefore
+ // the content frame duration and the app-desired frame duration are compared using this
+ // tolerance to determine whether the app is intentionally seeking forward or whether the
+ // skip forward in content time is due to frame drops. If the app-desired frame duration is
+ // short, but the content frame duration is large, it is assumed the app is intentionally
+ // seeking forward.
+ int32_t liveContentFrameDropToleranceUs;
+
+ // Freeze configuration
+ //
+ // The values used to distribute freeze durations across a histogram.
+ std::vector<int32_t> freezeDurationMsHistogramBuckets;
+ //
+ // The values used to multiply the counts in the histogram buckets above to compute an
+ // overall score. This allows the score to reflect disproportionate impact as freeze
+ // durations increase.
+ std::vector<int64_t> freezeDurationMsHistogramToScore;
+ //
+ // The values used to distribute distances between freezes across a histogram.
+ std::vector<int32_t> freezeDistanceMsHistogramBuckets;
+ //
+ // The maximum number of freeze events to send back to the caller.
+ int32_t freezeEventMax;
+ //
+ // The maximum number of detail entries tracked per freeze event.
+ int32_t freezeEventDetailsMax;
+ //
+ // The maximum distance in time between two freeze occurrences such that both will be
+ // lumped into the same freeze event.
+ int32_t freezeEventDistanceToleranceMs;
+
+ // Judder configuration
+ //
+ // A judder error lower than this value is not scored as judder.
+ int32_t judderErrorToleranceUs;
+ //
+ // The values used to distribute judder scores across a histogram.
+ std::vector<int32_t> judderScoreHistogramBuckets;
+ //
+ // The values used to multiply the counts in the histogram buckets above to compute an
+ // overall score. This allows the score to reflect disproportionate impact as judder scores
+ // increase.
+ std::vector<int64_t> judderScoreHistogramToScore;
+ //
+ // The maximum number of judder events to send back to the caller.
+ int32_t judderEventMax;
+ //
+ // The maximum number of detail entries tracked per judder event.
+ int32_t judderEventDetailsMax;
+ //
+ // The maximum distance in time between two judder occurrences such that both will be
+ // lumped into the same judder event.
+ int32_t judderEventDistanceToleranceMs;
+ };
+
+ struct FreezeEvent {
+ // Details are captured for each freeze up to a limited number. The arrays are guaranteed to
+ // have the same size.
+ struct Details {
+ /// The duration of the freeze.
+ std::vector<int32_t> durationMs;
+ // The distance between the beginning of this freeze and the end of the previous freeze.
+ std::vector<int32_t> distanceMs;
+ };
+ // Whether or not the data in this structure is valid.
+ bool valid = false;
+ // The time at which the first freeze for this event was detected.
+ int64_t initialTimeUs;
+ // The total duration from the beginning of the first freeze to the end of the last freeze
+ // in this event.
+ int32_t durationMs;
+ // The number of freezes in this event.
+ int64_t count;
+ // The sum of all durations of all freezes in this event.
+ int64_t sumDurationMs;
+ // The sum of all distances between each freeze in this event.
+ int64_t sumDistanceMs;
+ // Detailed information for the first N freezes in this event.
+ Details details;
+ };
+
+ struct JudderEvent {
+ // Details are captured for each frame judder up to a limited number. The arrays are
+ // guaranteed to have the same size.
+ struct Details {
+ // The actual render duration of the frame for this judder occurrence.
+ std::vector<int32_t> actualRenderDurationUs;
+ // The content render duration of the frame for this judder occurrence.
+ std::vector<int32_t> contentRenderDurationUs;
+ // The distance from this judder occurrence and the previous judder occurrence.
+ std::vector<int32_t> distanceMs;
+ };
+ // Whether or not the data in this structure is valid.
+ bool valid = false;
+ // The time at which the first judder occurrence for this event was detected.
+ int64_t initialTimeUs;
+ // The total duration from the first judder occurrence to the last judder occurrence in this
+ // event.
+ int32_t durationMs;
+ // The number of judder occurrences in this event.
+ int64_t count;
+ // The sum of all judder scores in this event.
+ int64_t sumScore;
+ // The sum of all distances between each judder occurrence in this event.
+ int64_t sumDistanceMs;
+ // Detailed information for the first N judder occurrences in this event.
+ Details details;
+ };
+
+ VideoRenderQualityTracker();
+ VideoRenderQualityTracker(const Configuration &configuration);
+
+ // Called when a tunnel mode frame has been queued.
+ void onTunnelFrameQueued(int64_t contentTimeUs);
+
+ // Called when the app has intentionally decided not to render this frame.
+ void onFrameSkipped(int64_t contentTimeUs);
+
+ // Called when the app has requested the frame to be rendered as soon as possible.
+ void onFrameReleased(int64_t contentTimeUs);
+
+ // Called when the app has requested the frame to be rendered at a specific point in time in the
+ // future.
+ void onFrameReleased(int64_t contentTimeUs, int64_t desiredRenderTimeNs);
+
+ // Called when the system has detected that the frame has actually been rendered to the display.
+ // Returns any freeze events or judder events that were detected.
+ void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+ FreezeEvent *freezeEventOut = nullptr,
+ JudderEvent *judderEventOut = nullptr);
+
+ // Gets and resets data for the current freeze event.
+ FreezeEvent getAndResetFreezeEvent();
+
+ // Gets and resets data for the current judder event.
+ JudderEvent getAndResetJudderEvent();
+
+ // Retrieve the metrics.
+ const VideoRenderQualityMetrics &getMetrics();
+
+ // Called when a change in codec state will result in a content discontinuity - e.g. flush.
+ void resetForDiscontinuity();
+
+ // Clear out all metrics and tracking - e.g. codec reconfigured.
+ void clear();
+
+private:
+ // Tracking of frames that are pending to be rendered to the display.
+ struct FrameInfo {
+ int64_t contentTimeUs;
+ int64_t desiredRenderTimeUs;
+ };
+
+ // Historic tracking of frame durations
+ struct FrameDurationUs {
+ static const int SIZE = 5;
+
+ FrameDurationUs() {
+ for (int i = 0; i < SIZE; ++i) {
+ durationUs[i] = -1;
+ }
+ priorTimestampUs = -1;
+ }
+
+ int32_t &operator[](int index) {
+ assert(index < SIZE);
+ return durationUs[index];
+ }
+
+ const int32_t &operator[](int index) const {
+ assert(index < SIZE);
+ return durationUs[index];
+ }
+
+ // The duration of the past N frames.
+ int32_t durationUs[SIZE];
+
+ // The timestamp of the previous frame.
+ int64_t priorTimestampUs;
+ };
+
+ // Configure histograms for the metrics.
+ static void configureHistograms(VideoRenderQualityMetrics &m, const Configuration &c);
+
+ // The current time in microseconds.
+ static int64_t nowUs();
+
+ // A new frame has been processed, so update the frame durations based on the new frame
+ // timestamp.
+ static void updateFrameDurations(FrameDurationUs &durationUs, int64_t newTimestampUs);
+
+ // Update a frame rate if, and only if, one can be detected.
+ static void updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+ const Configuration &c);
+
+ // Examine the past few frames to detect the frame rate based on each frame's render duration.
+ static float detectFrameRate(const FrameDurationUs &durationUs, const Configuration &c);
+
+ // Determine whether or not 3:2 pulldowng for displaying 24fps content on 60Hz displays is
+ // occurring.
+ static bool is32pulldown(const FrameDurationUs &durationUs, const Configuration &c);
+
+ // Process a frame freeze.
+ static void processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+ int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+ VideoRenderQualityMetrics &m, const Configuration &c);
+
+ // Retrieve a freeze event if an event just finished.
+ static void maybeCaptureFreezeEvent(int64_t actualRenderTimeUs, int64_t lastFreezeEndTimeUs,
+ FreezeEvent &e, const VideoRenderQualityMetrics & m,
+ const Configuration &c, FreezeEvent *freezeEventOut);
+
+ // Compute a judder score for the previously-rendered frame.
+ static int64_t computePreviousJudderScore(const FrameDurationUs &actualRenderDurationUs,
+ const FrameDurationUs &contentRenderDurationUs,
+ const Configuration &c);
+
+ // Process a frame judder.
+ static void processJudder(int32_t judderScore, int64_t judderTimeUs,
+ int64_t lastJudderEndTimeUs,
+ const FrameDurationUs &contentDurationUs,
+ const FrameDurationUs &actualDurationUs, JudderEvent &e,
+ VideoRenderQualityMetrics &m, const Configuration &c);
+
+ // Retrieve a judder event if an event just finished.
+ static void maybeCaptureJudderEvent(int64_t actualRenderTimeUs, int64_t lastJudderEndTimeUs,
+ JudderEvent &e, const VideoRenderQualityMetrics & m,
+ const Configuration &c, JudderEvent *judderEventOut);
+
+ // Check to see if a discontinuity has occurred by examining the content time and the
+ // app-desired render time. If so, reset some internal state.
+ bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+ // Update the metrics because a skipped frame was detected.
+ void processMetricsForSkippedFrame(int64_t contentTimeUs);
+
+ // Update the metrics because a dropped frame was detected.
+ void processMetricsForDroppedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+ // Update the metrics because a rendered frame was detected.
+ void processMetricsForRenderedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs,
+ int64_t actualRenderTimeUs,
+ FreezeEvent *freezeEventOut, JudderEvent *judderEventOut);
+
+ // Configurable elements of the metrics algorithms.
+ const Configuration mConfiguration;
+
+ // Metrics are updated every time a frame event occurs - skipped, dropped, rendered.
+ VideoRenderQualityMetrics mMetrics;
+
+ // The most recently processed timestamp referring to the position in the content stream.
+ int64_t mLastContentTimeUs;
+
+ // The most recently processed timestamp referring to the wall clock time a frame was rendered.
+ int64_t mLastRenderTimeUs;
+
+ // The most recent timestamp of the first frame rendered after the freeze.
+ int64_t mLastFreezeEndTimeUs;
+
+ // The most recent timestamp of frame judder.
+ int64_t mLastJudderEndTimeUs;
+
+ // The render duration of the playback.
+ int64_t mRenderDurationMs;
+
+ // True if the previous frame was dropped.
+ bool mWasPreviousFrameDropped;
+
+ // The freeze event that's currently being tracked.
+ FreezeEvent mFreezeEvent;
+
+ // The judder event that's currently being tracked.
+ JudderEvent mJudderEvent;
+
+ // Frames skipped at the end of playback shouldn't really be considered skipped, therefore keep
+ // a list of the frames, and process them as skipped frames the next time a frame is rendered.
+ std::list<int64_t> mPendingSkippedFrameContentTimeUsList;
+
+ // Since the system only signals when a frame is rendered, dropped frames are detected by
+ // checking to see if the next expected frame is rendered. If not, it is considered dropped.
+ std::queue<FrameInfo> mNextExpectedRenderedFrameQueue;
+
+ // When B-frames are present in the stream, a P-frame will be queued before the B-frame even
+ // though it is rendered after. Therefore, the P-frame is held here and not inserted into
+ // mNextExpectedRenderedFrameQueue until it should be inserted to maintain render order.
+ int64_t mTunnelFrameQueuedContentTimeUs;
+
+ // Frame durations derived from timestamps encoded into the content stream. These are the
+ // durations that each frame is supposed to be rendered for.
+ FrameDurationUs mContentFrameDurationUs;
+
+ // Frame durations derived from timestamps passed in by the app, indicating the wall clock time
+ // at which the app would like to have the frame rendered.
+ FrameDurationUs mDesiredFrameDurationUs;
+
+ // Frame durations derived from timestamps captured by the display subsystem, indicating the
+ // wall clock atime at which the frame is actually rendered.
+ FrameDurationUs mActualFrameDurationUs;
+};
+
+} // namespace android
+
+#endif // VIDEO_RENDER_QUALITY_TRACKER_H_
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index e6b67ce..581292e 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -55,3 +55,20 @@
"-Wall",
],
}
+
+cc_test {
+ name: "VideoRenderQualityTracker_test",
+ srcs: ["VideoRenderQualityTracker_test.cpp"],
+
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libstagefright",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+}
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
new file mode 100644
index 0000000..7823922
--- /dev/null
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -0,0 +1,1027 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoRenderQualityTracker_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+namespace android {
+
+using Metrics = VideoRenderQualityMetrics;
+using Configuration = VideoRenderQualityTracker::Configuration;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
+
+static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
+static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
+ VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+
+class Helper {
+public:
+ Helper(double contentFrameDurationMs, const Configuration &configuration) :
+ mVideoRenderQualityTracker(configuration) {
+ mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+ mMediaTimeUs = 0;
+ mClockTimeNs = 0;
+ }
+
+ void changeContentFrameDuration(double contentFrameDurationMs) {
+ mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+ }
+
+ template<typename T>
+ void render(std::initializer_list<T> renderDurationMsList) {
+ for (auto renderDurationMs : renderDurationMsList) {
+ mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+ mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+ &mJudderEvent);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += int64_t(renderDurationMs * 1000 * 1000);
+ }
+ }
+
+ void render(int numFrames, float durationMs = -1) {
+ int64_t durationUs = durationMs < 0 ? mContentFrameDurationUs : durationMs * 1000;
+ for (int i = 0; i < numFrames; ++i) {
+ mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+ mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+ &mJudderEvent);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += durationUs * 1000;
+ }
+ }
+
+ void skip(int numFrames) {
+ for (int i = 0; i < numFrames; ++i) {
+ mVideoRenderQualityTracker.onFrameSkipped(mMediaTimeUs);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += mContentFrameDurationUs * 1000;
+ }
+ }
+
+ void drop(int numFrames) {
+ for (int i = 0; i < numFrames; ++i) {
+ mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += mContentFrameDurationUs * 1000;
+ }
+ }
+
+ const Metrics & getMetrics() {
+ return mVideoRenderQualityTracker.getMetrics();
+ }
+
+ FreezeEvent getAndClearFreezeEvent() {
+ FreezeEvent e = std::move(mFreezeEvent);
+ mFreezeEvent.valid = false;
+ return e;
+ }
+
+ JudderEvent getAndClearJudderEvent() {
+ JudderEvent e = std::move(mJudderEvent);
+ mJudderEvent.valid = false;
+ return e;
+ }
+
+private:
+ VideoRenderQualityTracker mVideoRenderQualityTracker;
+ int64_t mContentFrameDurationUs;
+ int64_t mMediaTimeUs;
+ int64_t mClockTimeNs;
+ VideoRenderQualityTracker::FreezeEvent mFreezeEvent;
+ VideoRenderQualityTracker::JudderEvent mJudderEvent;
+};
+
+class VideoRenderQualityTrackerTest : public ::testing::Test {
+public:
+ VideoRenderQualityTrackerTest() {}
+};
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withDefaults) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn =
+ [](const std::string &, const std::string &, const std::string &defaultStr) -> std::string {
+ return defaultStr;
+ };
+
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withEmpty) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &, const std::string &) -> std::string {
+ return "";
+ }
+ };
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withInvalid) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &, const std::string &) -> std::string {
+ return "abc";
+ }
+ };
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withAlmostValid) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+ if (flag == "render_metrics_enabled") {
+ return "fals";
+ } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+ return "fals";
+ } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+ return "100a";
+ } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+ return "10b0";
+ } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+ return "c100";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+ return "1,5300,3b400,123";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+ return "2,5300*400,132";
+ } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+ return "3,12345678901234,5,7";
+ } else if (flag == "render_metrics_freeze_event_max") {
+ return "12345678901234";
+ } else if (flag == "render_metrics_freeze_event_details_max") {
+ return "12345.11321";
+ } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+ return "*!-";
+ } else if (flag == "render_metrics_judder_error_tolerance_us") {
+ return "10.5";
+ } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+ return "abc";
+ } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+ return "123,";
+ } else if (flag == "render_metrics_judder_event_max") {
+ return ",1234";
+ } else if (flag == "render_metrics_judder_event_details_max") {
+ return "10*10";
+ } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+ return "140-a";
+ }
+ return "";
+ }
+ };
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withValid) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+ if (flag == "render_metrics_enabled") {
+ return "false";
+ } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+ return "false";
+ } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+ return "2000";
+ } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+ return "3000";
+ } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+ return "4000";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+ return "100,200,300,400";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+ return "1234567890120,1234567890121,1234567890122";
+ } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+ return "500,600,700,800,900";
+ } else if (flag == "render_metrics_freeze_event_max") {
+ return "5000";
+ } else if (flag == "render_metrics_freeze_event_details_max") {
+ return "6000";
+ } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+ return "7000";
+ } else if (flag == "render_metrics_judder_error_tolerance_us") {
+ return "8000";
+ } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+ return "1,2,3,4,5";
+ } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+ return "-1,-2,-3,-4,-5";
+ } else if (flag == "render_metrics_judder_event_max") {
+ return "9000";
+ } else if (flag == "render_metrics_judder_event_details_max") {
+ return "10000";
+ } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+ return "11000";
+ }
+ return "";
+ }
+ };
+
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ // The default configuration here used to verify we're not configuring the values to the
+ // default - if we are accidentally configuring to the default then we're not necessarily
+ // testing the parsing.
+ Configuration d;
+ EXPECT_EQ(c.enabled, false);
+ EXPECT_NE(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, false);
+ EXPECT_NE(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, 2000);
+ EXPECT_NE(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, 3000);
+ EXPECT_NE(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, 4000);
+ EXPECT_NE(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ {
+ std::vector<int32_t> expected({100,200,300,400});
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, expected);
+ EXPECT_NE(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ }
+ {
+ std::vector<int64_t> expected({1234567890120LL,1234567890121LL,1234567890122LL});
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, expected);
+ EXPECT_NE(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ }
+ {
+ std::vector<int32_t> expected({500,600,700,800,900});
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, expected);
+ EXPECT_NE(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ }
+ EXPECT_EQ(c.freezeEventMax, 5000);
+ EXPECT_NE(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, 6000);
+ EXPECT_NE(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, 7000);
+ EXPECT_NE(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, 8000);
+ EXPECT_NE(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ {
+ std::vector<int32_t> expected({1,2,3,4,5});
+ EXPECT_EQ(c.judderScoreHistogramBuckets, expected);
+ EXPECT_NE(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ }
+ {
+ std::vector<int64_t> expected({-1,-2,-3,-4,-5});
+ EXPECT_EQ(c.judderScoreHistogramToScore, expected);
+ EXPECT_NE(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ }
+ EXPECT_EQ(c.judderEventMax, 9000);
+ EXPECT_NE(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, 10000);
+ EXPECT_NE(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, 11000);
+ EXPECT_NE(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
+ Configuration c;
+ Helper h(16.66, c);
+ h.drop(10);
+ h.render({16.66, 16.66, 16.66});
+ h.skip(10); // skipped frames aren't released so they are not counted
+ h.render({16.66, 16.66, 16.66, 16.66});
+ h.drop(10);
+ EXPECT_EQ(27, h.getMetrics().frameReleasedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsSkippedFrames) {
+ Configuration c;
+ Helper h(16.66, c);
+ h.drop(10); // dropped frames are not counted
+ h.skip(10); // frames skipped before rendering a frame are not counted
+ h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10); // dropped frames are not counted
+ h.skip(10);
+ h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.skip(10); // frames skipped at the end of playback are not counted
+ h.drop(10); // dropped frames are not counted
+ EXPECT_EQ(10, h.getMetrics().frameSkippedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSkippedFramesAreDropped_countsDroppedFrames) {
+ Configuration c;
+ c.areSkippedFramesDropped = true;
+ Helper h(16.66, c);
+ h.skip(10); // skipped frames at the beginning of playback are not counted
+ h.drop(10);
+ h.skip(10); // skipped frames at the beginning of playback after dropped frames are not counted
+ h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10);
+ h.skip(10);
+ h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10); // dropped frames at the end of playback are not counted
+ h.skip(10); // skipped frames at the end of playback are not counted
+ EXPECT_EQ(30, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenNotSkippedFramesAreDropped_countsDroppedFrames) {
+ Configuration c;
+ c.areSkippedFramesDropped = false;
+ Helper h(16.66, c);
+ h.skip(10); // skipped frames at the beginning of playback are not counted
+ h.drop(10);
+ h.skip(10); // skipped frames at the beginning of playback after dropped frames are not coutned
+ h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10);
+ h.skip(10); // skipped frames are not counted
+ h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10); // dropped frames at the end of playback are not counted
+ h.skip(10); // skipped frames at the end of playback are not counted
+ EXPECT_EQ(20, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsRenderedFrames) {
+ Configuration c;
+ Helper h(16.66, c);
+ h.drop(10); // dropped frames are not counted
+ h.render({16.66, 16.66, 16.66});
+ h.skip(10); // skipped frames are not counted
+ h.render({16.66, 16.66, 16.66, 16.66});
+ h.drop(10); // dropped frames are not counted
+ EXPECT_EQ(7, h.getMetrics().frameRenderedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detectsFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, handlesSeeking) {
+ Configuration c;
+ c.maxExpectedContentFrameDurationUs = 30;
+ VideoRenderQualityTracker v(c);
+ v.onFrameReleased(0, 0);
+ v.onFrameRendered(0, 0);
+ v.onFrameReleased(20, 20);
+ v.onFrameRendered(20, 20);
+ v.onFrameReleased(40, 40);
+ v.onFrameRendered(40, 40);
+ v.onFrameReleased(60, 60);
+ v.onFrameRendered(60, 60);
+ v.onFrameReleased(80, 80);
+ v.onFrameRendered(80, 80);
+ v.onFrameReleased(7200000000, 100);
+ v.onFrameRendered(7200000000, 100);
+ v.onFrameReleased(7200000020, 120);
+ v.onFrameRendered(7200000020, 120);
+ v.onFrameReleased(7200000040, 140);
+ v.onFrameRendered(7200000040, 140);
+ v.onFrameReleased(7200000060, 160);
+ v.onFrameRendered(7200000060, 160);
+ v.onFrameReleased(7200000080, 180);
+ v.onFrameRendered(7200000080, 180);
+ v.onFrameReleased(0, 200);
+ v.onFrameRendered(0, 200);
+ v.onFrameReleased(20, 220);
+ v.onFrameRendered(20, 220);
+ v.onFrameReleased(40, 240);
+ v.onFrameRendered(40, 240);
+ v.onFrameReleased(60, 260);
+ v.onFrameRendered(60, 260);
+ const VideoRenderQualityMetrics &m = v.getMetrics();
+ EXPECT_EQ(m.judderRate, 0); // frame durations can get messed up during discontinuities so if
+ // the discontinuity is not detected, judder is expected
+ EXPECT_NE(m.contentFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, withSkipping_handlesSeeking) {
+ Configuration c;
+ c.maxExpectedContentFrameDurationUs = 30;
+ VideoRenderQualityTracker v(c);
+ v.onFrameReleased(0, 0);
+ v.onFrameRendered(0, 0);
+ v.onFrameReleased(20, 20);
+ v.onFrameRendered(20, 20);
+ v.onFrameReleased(40, 40);
+ v.onFrameRendered(40, 40);
+ v.onFrameReleased(60, 60);
+ v.onFrameRendered(60, 60);
+ v.onFrameReleased(80, 80);
+ v.onFrameRendered(80, 80);
+ v.onFrameSkipped(7200000000);
+ v.onFrameSkipped(7200000020);
+ v.onFrameReleased(7200000040, 100);
+ v.onFrameRendered(7200000040, 100);
+ v.onFrameReleased(7200000060, 120);
+ v.onFrameRendered(7200000060, 120);
+ v.onFrameReleased(7200000080, 140);
+ v.onFrameSkipped(0);
+ v.onFrameRendered(7200000080, 140);
+ v.onFrameSkipped(20);
+ v.onFrameReleased(40, 160);
+ v.onFrameRendered(40, 160);
+ v.onFrameReleased(60, 180);
+ v.onFrameRendered(60, 180);
+ v.onFrameReleased(80, 200);
+ v.onFrameRendered(80, 200);
+ const VideoRenderQualityMetrics &m = v.getMetrics();
+ EXPECT_EQ(m.judderRate, 0); // frame durations can get messed up during discontinuities so if
+ // the discontinuity is not detected, judder is expected
+ EXPECT_NE(m.contentFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenLowTolerance_doesntDetectFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 0;
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateDestabilizes_detectsFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ h.render({30.0, 16.6, 30.0, 16.6});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detects32Pulldown) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(41.66, c);
+ h.render({49.9, 33.2, 50.0, 33.4, 50.1, 33.2});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad32Pulldown_doesntDetect32Pulldown) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(41.66, c);
+ h.render({50.0, 33.33, 33.33, 50.00, 33.33, 50.00});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateChanges_detectsMostRecentFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+ h.changeContentFrameDuration(41.66);
+ h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.66, 30.0, 16.66, 30.0, 16.66});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
+ Configuration c;
+ Helper h(20, c);
+ h.render(3);
+ EXPECT_EQ(h.getMetrics().freezeRate, 0);
+ h.drop(3);
+ h.render(3);
+ // +1 because the first frame before drops is considered frozen
+ // and then -1 because the last frame has an unknown render duration
+ EXPECT_EQ(h.getMetrics().freezeRate, 4.0 / 8.0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
+ Configuration c;
+ // +17 because freeze durations include the render time of the previous frame
+ c.freezeDurationMsHistogramBuckets = {2 * 17 + 17, 3 * 17 + 17, 6 * 17 + 17};
+ Helper h(17, c);
+ h.render(1);
+ h.drop(1); // below
+ h.render(1);
+ h.drop(3); // bucket 1
+ h.render(1);
+ h.drop(2); // bucket 0
+ h.render(1);
+ h.drop(4); // bucket 1
+ h.render(1);
+ h.drop(2); // bucket 0
+ h.render(1);
+ h.drop(5); // bucket 1
+ h.render(1);
+ h.drop(10); // above
+ h.render(1);
+ h.drop(15); // above
+ h.render(1);
+ EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.emit(), "1{2,3}2");
+ EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getCount(), 8);
+ // the smallest frame drop was 1, +17 because it includes the previous frame render time
+ EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getMin(), 1 * 17 + 17);
+ // the largest frame drop was 10, +17 because it includes the previous frame render time
+ EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getMax(), 15 * 17 + 17);
+ // total frame drop count, multiplied by 17, plus 17 for each occurrence, divided by occurrences
+ EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getAvg(), ((1 + 3 + 2 + 4 + 2 + 5 + 10 + 15)
+ * 17 + 8 * 17) / 8);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDistanceHistogram) {
+ Configuration c;
+ c.freezeDistanceMsHistogramBuckets = {1 * 17, 5 * 17, 6 * 17};
+ Helper h(17, c);
+ h.render(1);
+ h.drop(1);
+ h.render(5); // bucket 0
+ h.drop(3);
+ h.render(3); // bucket 0
+ h.drop(2);
+ h.render(9); // above
+ h.drop(5);
+ h.render(1); // below
+ h.drop(2);
+ h.render(6); // bucket 1
+ h.drop(4);
+ h.render(12); // above
+ h.drop(2);
+ h.render(1);
+ EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.emit(), "1{2,1}2");
+ EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getCount(), 6);
+ // the smallest render between drops was 1, -17 because the last frame rendered also froze
+ EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getMin(), 1 * 17 - 17);
+ // the largest render between drops was 12, -17 because the last frame rendered also froze
+ EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getMax(), 12 * 17 - 17);
+ // total render count between, multiplied by 17, minus 17 for each occurrence, divided by
+ // occurrences
+ EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getAvg(), ((5 + 3 + 9 + 1 + 6 + 12) * 17 -
+ 6 * 17) / 6);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when60hz_hasNoJudder) {
+ Configuration c;
+ Helper h(16.66, c); // ~24Hz
+ h.render({16.66, 16.66, 16.66, 16.66, 16.66, 16.66, 16.66});
+ EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance60hz_hasNoJudder) {
+ Configuration c;
+ Helper h(16.66, c); // ~24Hz
+ h.render({14, 18, 14, 18, 14, 18, 14, 18});
+ EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance60Hz_hasJudder) {
+ Configuration c;
+ Helper h(16.66, c); // ~24Hz
+ h.render({14, 18, 14, /* no 18 between 14s */ 14, 18, 14, 18});
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when30Hz_hasNoJudder) {
+ Configuration c;
+ Helper h(33.33, c);
+ h.render({33.33, 33.33, 33.33, 33.33, 33.33, 33.33});
+ EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance30Hz_hasNoJudder) {
+ Configuration c;
+ Helper h(33.33, c);
+ h.render({29.0, 35.0, 29.0, 35.0, 29.0, 35.0});
+ EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance30Hz_hasJudder) {
+ Configuration c;
+ Helper h(33.33, c);
+ h.render({29.0, 35.0, 29.0, /* no 35 between 29s */ 29.0, 35.0, 29.0, 35.0});
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad30HzTo60Hz_hasJudder) {
+ Configuration c;
+ Helper h(33.33, c);
+ h.render({33.33, 33.33, 50.0, /* frame stayed 1 vsync too long */ 16.66, 33.33, 33.33});
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 2); // note: 2 counts of judder
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when24HzTo60Hz_hasNoJudder) {
+ Configuration c;
+ Helper h(41.66, c);
+ h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+ EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when25HzTo60Hz_hasJudder) {
+ Configuration c;
+ Helper h(40, c);
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when50HzTo60Hz_hasJudder) {
+ Configuration c;
+ Helper h(20, c);
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when30HzTo50Hz_hasJudder) {
+ Configuration c;
+ Helper h(33.33, c);
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariancePulldown24HzTo60Hz_hasNoJudder) {
+ Configuration c;
+ Helper h(41.66, c);
+ h.render({52.0, 31.33, 52.0, 31.33, 52.0, 31.33});
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad24HzTo60Hz_hasJudder) {
+ Configuration c;
+ Helper h(41.66, c);
+ h.render({50.0, 33.33, 50.0, 33.33, /* no 50 between 33s */ 33.33, 50.0, 33.33});
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderScoreHistogram) {
+ Configuration c;
+ c.judderErrorToleranceUs = 2000;
+ c.judderScoreHistogramBuckets = {1, 5, 8};
+ Helper h(16, c);
+ h.render({16, 16, 23, 16, 16, 10, 16, 4, 16, 20, 16, 16});
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.emit(), "0{1,2}1");
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 4);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getMin(), 4);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getMax(), 12);
+ EXPECT_EQ(h.getMetrics().judderScoreHistogram.getAvg(), (7 + 6 + 12 + 4) / 4);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, ranksJudderScoresInOrder) {
+ // Each rendering is ranked from best to worst from a user experience
+ Configuration c;
+ c.judderErrorToleranceUs = 2000;
+ c.judderScoreHistogramBuckets = {0, 1000};
+ int64_t previousScore = 0;
+
+ // 30fps poorly displayed at 60Hz
+ {
+ Helper h(33.33, c);
+ h.render({33.33, 33.33, 16.66, 50.0, 33.33, 33.33});
+ int64_t scoreBad30fpsTo60Hz = h.getMetrics().judderScoreHistogram.getMax();
+ EXPECT_GT(scoreBad30fpsTo60Hz, previousScore);
+ previousScore = scoreBad30fpsTo60Hz;
+ }
+
+ // 25fps displayed at 60hz
+ {
+ Helper h(40, c);
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ h.render({33.33, 33.33, 50.0});
+ int64_t score25fpsTo60hz = h.getMetrics().judderScoreHistogram.getMax();
+ EXPECT_GT(score25fpsTo60hz, previousScore);
+ previousScore = score25fpsTo60hz;
+ }
+
+ // 50fps displayed at 60hz
+ {
+ Helper h(20, c);
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ h.render({16.66, 16.66, 16.66, 33.33});
+ int64_t score50fpsTo60hz = h.getMetrics().judderScoreHistogram.getMax();
+ EXPECT_GT(score50fpsTo60hz, previousScore);
+ previousScore = score50fpsTo60hz;
+ }
+
+ // 24fps poorly displayed at 60Hz
+ {
+ Helper h(41.66, c);
+ h.render({50.0, 33.33, 50.0, 33.33, 33.33, 50.0, 33.33});
+ int64_t scoreBad24HzTo60Hz = h.getMetrics().judderScoreHistogram.getMax();
+ EXPECT_GT(scoreBad24HzTo60Hz, previousScore);
+ previousScore = scoreBad24HzTo60Hz;
+ }
+
+ // 30fps displayed at 50hz
+ {
+ Helper h(33.33, c);
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ h.render({40.0, 40.0, 40.0, 60.0});
+ int64_t score30fpsTo50hz = h.getMetrics().judderScoreHistogram.getMax();
+ EXPECT_GT(score30fpsTo50hz, previousScore);
+ previousScore = score30fpsTo50hz;
+ }
+
+ // 24fps displayed at 50Hz
+ {
+ Helper h(41.66, c);
+ h.render(40.0, 11);
+ h.render(60.0, 1);
+ h.render(40.0, 11);
+ h.render(60.0, 1);
+ h.render(40.0, 11);
+ int64_t score24HzTo50Hz = h.getMetrics().judderScoreHistogram.getMax();
+ EXPECT_GT(score24HzTo50Hz, previousScore);
+ previousScore = score24HzTo50Hz;
+ }
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeEvents) {
+ Configuration c;
+ c.freezeEventMax = 5;
+ c.freezeEventDetailsMax = 4;
+ c.freezeEventDistanceToleranceMs = 1000;
+ Helper h(20, c);
+ h.render(10);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(3);
+ h.render(1000 / 20); // +1 because it's unclear if the current frame is frozen
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(1);
+ h.render(10);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(6);
+ h.render(12);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(10);
+ h.render(1000 / 20 + 1); // +1 because it's unclear if the current frame is frozen
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 1);
+ FreezeEvent e = h.getAndClearFreezeEvent();
+ EXPECT_EQ(e.valid, true); // freeze event
+ // -1 because the last rendered frame is considered frozen
+ EXPECT_EQ(e.initialTimeUs, 9 * 20 * 1000);
+ // only count the last frame of the first group of rendered frames
+ EXPECT_EQ(e.durationMs, (1 + 3 + 1000 / 20 + 1 + 10 + 6 + 12 + 10) * 20);
+ EXPECT_EQ(e.count, 4);
+ // number of dropped frames
+ // +1 because the last rendered frame is considered frozen
+ EXPECT_EQ(e.sumDurationMs, (4 + 2 + 7 + 11) * 20);
+ // number of rendered frames between dropped frames
+ // -1 because the last rendered frame is considered frozen
+ EXPECT_EQ(e.sumDistanceMs, ((1000 / 20) - 1 + 9 + 11) * 20);
+ // +1 for each since the last rendered frame is considered frozen
+ ASSERT_EQ(e.details.durationMs.size(), 4);
+ EXPECT_EQ(e.details.durationMs[0], 4 * 20);
+ EXPECT_EQ(e.details.durationMs[1], 2 * 20);
+ EXPECT_EQ(e.details.durationMs[2], 7 * 20);
+ EXPECT_EQ(e.details.durationMs[3], 11 * 20);
+ // -1 for each since the last rendered frame is considered frozen
+ ASSERT_EQ(e.details.distanceMs.size(), 4);
+ EXPECT_EQ(e.details.distanceMs[0], -1);
+ EXPECT_EQ(e.details.distanceMs[1], 1000 - 20);
+ EXPECT_EQ(e.details.distanceMs[2], 9 * 20);
+ EXPECT_EQ(e.details.distanceMs[3], 11 * 20);
+ int64_t previousEventEndTimeUs = e.initialTimeUs + e.durationMs * 1000;
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 2);
+ e = h.getAndClearFreezeEvent();
+ EXPECT_EQ(e.valid, true);
+ // 1000ms tolerance means 1000ms from the end of the last event to the beginning of this event
+ EXPECT_EQ(e.initialTimeUs, previousEventEndTimeUs + 1000 * 1000);
+ EXPECT_EQ(e.count, 5);
+ // 5 freezes captured in the freeze event, but only 4 details are recorded
+ EXPECT_EQ(e.details.durationMs.size(), 4);
+ EXPECT_EQ(e.details.distanceMs.size(), 4);
+ EXPECT_EQ(e.details.distanceMs[0], 1000); // same as the tolerance
+ // The duration across the entire series f freezes is captured, with only 4 details captured
+ // +1 because the first rendered frame is considered frozen (not the 1st dropped frame)
+ EXPECT_EQ(e.durationMs, (1 + 1 + 4 + 1 + 4 + 1 + 4 + 1 + 4 + 1) * 20);
+ // The duration of all 5 freeze events are captured, with only 4 details captured
+ EXPECT_EQ(e.sumDurationMs, (2 + 2 + 2 + 2 + 2) * 20);
+ // The distance of all 5 freeze events are captured, with only 4 details captured
+ EXPECT_EQ(e.sumDistanceMs, (3 + 3 + 3 + 3) * 20);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 3);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 4);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 5);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ // The 6th event isn't captured because it exceeds the configured limit
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 6);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderEvents) {
+ Configuration c;
+ c.judderEventMax = 4;
+ c.judderEventDetailsMax = 3;
+ c.judderEventDistanceToleranceMs = 100;
+ Helper h(20, c);
+ h.render({19, 20, 19});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ h.render({15, 19, 20, 19});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ h.render({28, 20, 19});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ h.render({13, 20, 20, 20, 20});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ // Start with judder for the next event at the end of the sequence, because judder is scored
+ // one frame behind, and for combining judder occurrences into events, it's not clear yet if
+ // the current frame has judder or not.
+ h.render({15, 20, 20, 20, 20, 20, 15});
+ JudderEvent e = h.getAndClearJudderEvent();
+ EXPECT_EQ(e.valid, true);
+ EXPECT_EQ(e.initialTimeUs, (19 + 20 + 19) * 1000);
+ EXPECT_EQ(e.durationMs, 15 + 19 + 20 + 19 /**/ + 28 + 20 + 19 /**/ + 13 + 20 * 4 /**/ + 15);
+ EXPECT_EQ(e.count, 4);
+ EXPECT_EQ(e.sumScore, (20 - 15) + (28 - 20) + (20 - 13) + (20 - 15));
+ EXPECT_EQ(e.sumDistanceMs, 19 + 20 + 19 /**/ + 20 + 19 /**/ + 20 * 4);
+ ASSERT_EQ(e.details.actualRenderDurationUs.size(), 3); // 3 details per configured maximum
+ EXPECT_EQ(e.details.actualRenderDurationUs[0], 15 * 1000);
+ EXPECT_EQ(e.details.actualRenderDurationUs[1], 28 * 1000);
+ EXPECT_EQ(e.details.actualRenderDurationUs[2], 13 * 1000);
+ ASSERT_EQ(e.details.contentRenderDurationUs.size(), 3);
+ EXPECT_EQ(e.details.contentRenderDurationUs[0], 20 * 1000);
+ EXPECT_EQ(e.details.contentRenderDurationUs[1], 20 * 1000);
+ EXPECT_EQ(e.details.contentRenderDurationUs[2], 20 * 1000);
+ ASSERT_EQ(e.details.distanceMs.size(), 3);
+ EXPECT_EQ(e.details.distanceMs[0], -1);
+ EXPECT_EQ(e.details.distanceMs[1], 19 + 20 + 19);
+ EXPECT_EQ(e.details.distanceMs[2], 20 + 19);
+ h.render({20, 20, 20, 20, 20, 15});
+ e = h.getAndClearJudderEvent();
+ EXPECT_EQ(e.valid, true);
+ ASSERT_EQ(e.details.distanceMs.size(), 1);
+ EXPECT_EQ(e.details.distanceMs[0], 100); // same as the tolerance
+ h.render({20, 20, 20, 20, 20, 15});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+ h.render({20, 20, 20, 20, 20, 15});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+ h.render({20, 20, 20, 20, 20, 20});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false); // max number of judder events exceeded
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesOverallFreezeScore) {
+ Configuration c;
+ // # drops * 20ms + 20ms because current frame is frozen + 1 for bucket threshold
+ c.freezeDurationMsHistogramBuckets = {1 * 20 + 21, 5 * 20 + 21, 10 * 20 + 21};
+ c.freezeDurationMsHistogramToScore = {10, 100, 1000};
+ Helper h(20, c);
+ h.render(5);
+ h.drop(2); // bucket = 0, bucket count = 1, bucket score = 10
+ h.render(5);
+ h.drop(11); // bucket = 2, bucket count = 1, bucket score = 1000
+ h.render(5);
+ h.drop(6); // bucket = 1, bucket count = 1, bucket score = 100
+ h.render(5);
+ h.drop(1); // bucket = null
+ h.render(5);
+ h.drop(3); // bucket = 0, bucket count = 2, bucket score = 20
+ h.render(5);
+ h.drop(10); // bucket = 1, bucket count = 2, bucket score = 200
+ h.render(5);
+ h.drop(7); // bucket = 1, bucket count = 3, bucket score = 300
+ h.render(5);
+ EXPECT_EQ(h.getMetrics().freezeScore, 20 + 300 + 1000);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesOverallJudderScore) {
+ Configuration c;
+ c.judderScoreHistogramBuckets = {0, 6, 10};
+ c.judderScoreHistogramToScore = {10, 100, 1000};
+ Helper h(20, c);
+ h.render({20, 20, 15, 20, 20}); // bucket = 0, bucket count = 1, bucket score = 10
+ h.render({20, 20, 11, 20, 20}); // bucket = 1, bucket count = 1, bucket score = 100
+ h.render({20, 20, 13, 20, 20}); // bucket = 1, bucket count = 2, bucket score = 200
+ h.render({20, 20, 5, 20, 20}); // bucket = 2, bucket count = 1, bucket score = 1000
+ h.render({20, 20, 14, 20, 20}); // bucket = 1, bucket count = 3, bucket score = 300
+ h.render({20, 20, 10, 20, 20}); // bucket = 2, bucket count = 2, bucket score = 2000
+ EXPECT_EQ(h.getMetrics().judderScore, 10 + 300 + 2000);
+}
+
+} // android
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
index 9cdc6d4..23882ea 100644
--- a/media/libstagefright/tests/mediacodec/Android.bp
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -70,4 +70,4 @@
test_suites: [
"general-tests",
],
-}
+}
\ No newline at end of file
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 814a327..76270d3 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -829,7 +829,7 @@
/**
* Query the dataspace of the input {@link AImage}.
*
- * Available since API level 33.
+ * Available since API level 34.
*
* @param image the {@link AImage} of interest.
* @param dataSpace the dataspace of the image will be filled here if the method call succeeds.
@@ -843,7 +843,7 @@
* image has been deleted.</li></ul>
*/
media_status_t AImage_getDataSpace(const AImage* image,
- /*out*/int32_t* dataSpace) __INTRODUCED_IN(33);
+ /*out*/int32_t* dataSpace) __INTRODUCED_IN(34);
__END_DECLS
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 992955b..b6dcaae 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -395,7 +395,7 @@
* the combination of {@code hardwareBufferFormat} and {@code dataSpace} for the
* format of the Image that the reader will produce.</p>
*
- * Available since API level 33.
+ * Available since API level 34.
*
* @param width The default width in pixels of the Images that this reader will produce.
* @param height The default height in pixels of the Images that this reader will produce.
@@ -422,7 +422,7 @@
*/
media_status_t AImageReader_newWithDataSpace(int32_t width, int32_t height, uint64_t usage,
int32_t maxImages, uint32_t hardwareBufferFormat, int32_t dataSpace,
- /*out*/ AImageReader** reader) __INTRODUCED_IN(33);
+ /*out*/ AImageReader** reader) __INTRODUCED_IN(34);
/**
* Acquire the next {@link AImage} from the image reader's queue asynchronously.
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4dd81ab..4f045fd 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -13,13 +13,13 @@
AImageReader_getWindow; # introduced=24
AImageReader_new; # introduced=24
AImageReader_newWithUsage; # introduced=26
- AImageReader_newWithDataSpace; # introduced=Tiramisu
+ AImageReader_newWithDataSpace; # introduced=UpsideDownCake
AImageReader_setBufferRemovedListener; # introduced=26
AImageReader_setImageListener; # introduced=24
AImage_delete; # introduced=24
AImage_deleteAsync; # introduced=26
AImage_getCropRect; # introduced=24
- AImage_getDataSpace; # introduced=Tiramisu
+ AImage_getDataSpace; # introduced=UpsideDownCake
AImage_getFormat; # introduced=24
AImage_getHardwareBuffer; # introduced=26
AImage_getHeight; # introduced=24
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index ab197f8..325adfa 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -58,6 +58,7 @@
#include <audiomanager/IAudioManager.h>
#include "AudioFlinger.h"
+#include "EffectConfiguration.h"
#include "NBAIO_Tee.h"
#include "PropertyUtils.h"
@@ -372,7 +373,7 @@
BatteryNotifier::getInstance().noteResetAudio();
mDevicesFactoryHal = DevicesFactoryHalInterface::create();
- mEffectsFactoryHal = EffectsFactoryHalInterface::create();
+ mEffectsFactoryHal = audioflinger::EffectConfiguration::getEffectsFactoryHal();
mMediaLogNotifier->run("MediaLogNotifier");
std::vector<pid_t> halPids;
@@ -841,6 +842,8 @@
for (const auto& vibratorInfo : mAudioVibratorInfos) {
dprintf(fd, " - %s\n", vibratorInfo.toString().c_str());
}
+ dprintf(fd, "Bluetooth latency modes are %senabled\n",
+ mBluetoothLatencyModesEnabled ? "" : "not ");
}
void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 2f61a01..4fb6138 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -23,6 +23,7 @@
#include <audio_utils/primitives.h>
#include "AudioFlinger.h"
+#include "EffectConfiguration.h"
#include <media/audiohal/EffectsFactoryHalInterface.h>
// ----------------------------------------------------------------------------
@@ -111,14 +112,16 @@
status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
const effect_descriptor_t *desc) {
- sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ const sp<EffectsFactoryHalInterface> effectsFactory =
+ audioflinger::EffectConfiguration::getEffectsFactoryHal();
if (effectsFactory == nullptr) {
return BAD_VALUE;
}
- static AudioHalVersionInfo sMinDeviceEffectHalVersion =
+ static const AudioHalVersionInfo sMinDeviceEffectHalVersion =
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0);
- AudioHalVersionInfo halVersion = effectsFactory->getHalVersion();
+ static const AudioHalVersionInfo halVersion =
+ audioflinger::EffectConfiguration::getAudioHalVersionInfo();
// We can trust AIDL generated AudioHalVersionInfo comparison operator (based on std::tie) as
// long as the type, major and minor sequence doesn't change in the definition.
@@ -137,7 +140,8 @@
const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
sp<EffectHalInterface> *effect) {
status_t status = NO_INIT;
- sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ const sp<EffectsFactoryHalInterface> effectsFactory =
+ audioflinger::EffectConfiguration::getEffectsFactoryHal();
if (effectsFactory != 0) {
status = effectsFactory->createEffect(
pEffectUuid, sessionId, AUDIO_IO_HANDLE_NONE, deviceId, effect);
diff --git a/services/audioflinger/EffectConfiguration.h b/services/audioflinger/EffectConfiguration.h
new file mode 100644
index 0000000..2f07fa2
--- /dev/null
+++ b/services/audioflinger/EffectConfiguration.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+
+namespace android::audioflinger {
+
+/**
+ * Effect Configuration abstraction and helper class.
+ */
+class EffectConfiguration {
+public:
+ static bool isHidl() {
+ static const bool isHidl = getAudioHalVersionInfo().isHidl();
+ return isHidl;
+ }
+
+ static const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() {
+ static const auto effectsFactoryHal = EffectsFactoryHalInterface::create();
+ return effectsFactoryHal;
+ }
+
+ static const detail::AudioHalVersionInfo& getAudioHalVersionInfo() {
+ static const auto audioHalVersionInfo = getEffectsFactoryHal() ?
+ getEffectsFactoryHal()->getHalVersion() : detail::AudioHalVersionInfo{
+ detail::AudioHalVersionInfo::Type::HIDL, 0 /* major */, 0 /* minor */ };
+ return audioHalVersionInfo;
+ }
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index ce2d8f4..77aa804 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -44,6 +44,7 @@
#include <mediautils/TimeCheck.h>
#include "AudioFlinger.h"
+#include "EffectConfiguration.h"
// ----------------------------------------------------------------------------
@@ -65,6 +66,7 @@
namespace android {
using aidl_utils::statusTFromBinderStatus;
+using audioflinger::EffectConfiguration;
using binder::Status;
namespace {
@@ -982,6 +984,7 @@
#ifdef MULTICHANNEL_EFFECT_CHAIN
if (status != NO_ERROR &&
+ EffectConfiguration::isHidl() && // only HIDL effects support channel conversion
mIsOutput &&
(mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
|| mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO)) {
@@ -1012,7 +1015,8 @@
mSupportsFloat = true;
}
- if (status != NO_ERROR) {
+ // only HIDL effects support integer conversion.
+ if (status != NO_ERROR && EffectConfiguration::isHidl()) {
ALOGV("EFFECT_CMD_SET_CONFIG failed with float format, retry with int16_t.");
mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -3032,7 +3036,8 @@
const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
sp<EffectHalInterface> *effect) {
status_t status = NO_INIT;
- sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ const sp<EffectsFactoryHalInterface> effectsFactory =
+ EffectConfiguration::getEffectsFactoryHal();
if (effectsFactory != 0) {
status = effectsFactory->createEffect(pEffectUuid, sessionId, io(), deviceId, effect);
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 64de99a..700bdd2 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3571,12 +3571,7 @@
mIdleSleepTimeUs = idleSleepTimeUs();
mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
- // Shorten standby delay on VOIP RX output to avoid delayed routing updates
- // after a call due to call end tone.
- if (mOutput != nullptr && (mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
- const nsecs_t NS_PER_MS = 1000000;
- mStandbyDelayNs = std::min(mStandbyDelayNs, latency_l() * NS_PER_MS);
- }
+
// make sure standby delay is not too short when connected to an A2DP sink to avoid
// truncating audio when going to standby.
if (!Intersection(outDeviceTypes(), getAudioDeviceOutAllA2dpSet()).empty()) {
@@ -4110,8 +4105,9 @@
// signal actual start of output stream when the render position reported by the kernel
// starts moving.
- if (!mStandby && !mHalStarted && mKernelPositionOnStandby !=
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]) {
+ if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+ && (mKernelPositionOnStandby
+ != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
mHalStarted = true;
mWaitHalStartCV.broadcast();
}
@@ -6320,6 +6316,12 @@
} else {
dprintf(fd, " No FastMixer\n");
}
+
+ dprintf(fd, "Bluetooth latency modes are %senabled\n",
+ mBluetoothLatencyModesEnabled ? "" : "not ");
+ dprintf(fd, "HAL does %ssupport Bluetooth latency modes\n", mOutput != nullptr &&
+ mOutput->audioHwDev->supportsBluetoothVariableLatency() ? "" : "not ");
+ dprintf(fd, "Supported latency modes: %s\n", toString(mSupportedLatencyModes).c_str());
}
uint32_t AudioFlinger::MixerThread::idleSleepTimeUs() const
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index ab1a050..f093e68 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -4373,6 +4373,11 @@
ALOGE("%s the requested device is currently unavailable", __func__);
return BAD_VALUE;
}
+ if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+ ALOGE("%s the requested device(type=%#x) is not usb device", __func__,
+ deviceDescriptor->type());
+ return BAD_VALUE;
+ }
for (const auto& hwModule : mHwModules) {
for (const auto& curProfile : hwModule->getOutputProfiles()) {
if (curProfile->supportsDevice(deviceDescriptor)) {
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 7febd2f..70a1785 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -48,7 +48,13 @@
mDefaultDeviceEffectFuture =
std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
} else if (loadResult < 0) {
- ALOGE("Failed to query effect configuration with status %d", loadResult);
+ ALOGW("Failed to query effect configuration, fallback to load .conf");
+ // load automatic audio effect modules
+ if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
+ loadAudioEffectConfigLegacy(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+ } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
+ loadAudioEffectConfigLegacy(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+ }
} else if (loadResult > 0) {
ALOGE("Effect config is partially invalid, skipped %d elements", loadResult);
}
@@ -947,6 +953,34 @@
return skippedElements;
}
+status_t AudioPolicyEffects::loadAudioEffectConfigLegacy(const char *path)
+{
+ cnode *root;
+ char *data;
+
+ data = (char *)load_file(path, NULL);
+ if (data == NULL) {
+ return -ENODEV;
+ }
+ root = config_node("", "");
+ config_load(root, data);
+
+ Vector <EffectDesc *> effects;
+ loadEffects(root, effects);
+ loadInputEffectConfigurations(root, effects);
+ loadStreamEffectConfigurations(root, effects);
+
+ for (size_t i = 0; i < effects.size(); i++) {
+ delete effects[i];
+ }
+
+ config_free(root);
+ free(root);
+ free(data);
+
+ return NO_ERROR;
+}
+
void AudioPolicyEffects::initDefaultDeviceEffects()
{
Mutex::Autolock _l(mLock);
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 80b0f91..9f65a96 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -39,7 +39,12 @@
*
* This class manages all effects attached to input and output streams in AudioPolicyService.
* The effect configurations can be queried in several ways:
- * With HIDL HAL, the configuration file `audio_effects.xml` will be loaded by libAudioHal.
+ *
+ * With HIDL HAL, the configuration file `audio_effects.xml` will be loaded by libAudioHal. If this
+ * file does not exist, AudioPolicyEffects class will fallback to load configuration from
+ * `/vendor/etc/audio_effects.conf` (AUDIO_EFFECT_VENDOR_CONFIG_FILE). If this file also does not
+ * exist, the configuration will be loaded from the file `/system/etc/audio_effects.conf`.
+ *
* With AIDL HAL, the configuration will be queried with the method `IFactory::queryProcessing()`.
*/
class AudioPolicyEffects : public RefBase
@@ -47,7 +52,7 @@
public:
- // The constructor will parse audio_effects.xml
+ // The constructor will parse audio_effects.conf
// First it will look whether vendor specific file exists,
// otherwise it will parse the system default file.
explicit AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
@@ -116,7 +121,7 @@
void initDefaultDeviceEffects();
// class to store the description of an effects and its parameters
- // as defined in audio_effects.xml
+ // as defined in audio_effects.conf
class EffectDesc {
public:
EffectDesc(const char *name,
@@ -230,7 +235,8 @@
static const char *kStreamNames[AUDIO_STREAM_PUBLIC_CNT+1]; //+1 required as streams start from -1
audio_stream_type_t streamNameToEnum(const char *name);
- // Parse audio_effects.xml
+ // Parse audio_effects.conf
+ status_t loadAudioEffectConfigLegacy(const char *path);
status_t loadAudioEffectConfig(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
// Load all effects descriptors in configuration file
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 0d12060..2e7b3ff 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -818,8 +818,29 @@
Mutex::Autolock _l(mLock);
+ ALOGW_IF(client->silenced, "startInput on silenced input for port %d, uid %d. Unsilencing.",
+ portIdAidl,
+ client->attributionSource.uid);
+
+ if (client->active) {
+ ALOGE("Client should never be active before startInput. Uid %d port %d",
+ client->attributionSource.uid, portId);
+ finishRecording(client->attributionSource, client->attributes.source);
+ return binderStatusFromStatusT(INVALID_OPERATION);
+ }
+
+ // Force the possibly silenced client to be unsilenced since we just called
+ // startRecording (i.e. we have assumed it is unsilenced).
+ // At this point in time, the client is inactive, so no calls to appops are sent in
+ // setAppState_l.
+ // This ensures existing clients have the same behavior as new clients (starting unsilenced).
+ // TODO(b/282076713)
+ setAppState_l(client, APP_STATE_TOP);
+
client->active = true;
client->startTimeNs = systemTime();
+ // This call updates the silenced state, and since we are active, appropriately notifies appops
+ // if we silence the track.
updateUidStates_l();
status_t status;
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 6cac9f9..4710a8a 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -1072,7 +1072,7 @@
Mutex mNotificationClientsLock;
DefaultKeyedVector<int64_t, sp<NotificationClient>> mNotificationClients
GUARDED_BY(mNotificationClientsLock);
- // Manage all effects configured in audio_effects.xml
+ // Manage all effects configured in audio_effects.conf
// never hold AudioPolicyService::mLock when calling AudioPolicyEffects methods as
// those can call back into AudioPolicyService methods and try to acquire the mutex
sp<AudioPolicyEffects> mAudioPolicyEffects GUARDED_BY(mLock);
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index eb824de..668a51a 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -3074,6 +3074,13 @@
return binder::Status::ok();
}
+Status CameraService::reportExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/) {
+ ALOGV("%s: reported %s", __FUNCTION__, stats.toString().c_str());
+ *sessionKey = mCameraServiceProxyWrapper->updateExtensionStats(stats);
+ return Status::ok();
+}
+
void CameraService::removeByClient(const BasicClient* client) {
Mutex::Autolock lock(mServiceLock);
for (auto& i : mActiveClientManager.getAll()) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index d84cb00..3214d4c 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -218,6 +218,9 @@
/*out*/
sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
+ virtual binder::Status reportExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/);
+
// Extra permissions checks
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index e652546..694aff3 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -120,7 +120,7 @@
camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
@@ -153,7 +153,7 @@
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
- ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ colorSpace,
useReadoutTimestamp);
if (res == OK) {
mAppSegmentSurfaceId = (*surfaceIds)[0];
@@ -196,7 +196,7 @@
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
- ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ colorSpace,
useReadoutTimestamp);
if (res == OK) {
mMainImageSurfaceId = sourceSurfaceId[0];
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f5f50a5..cfe51c7 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -1906,7 +1906,7 @@
camera_metadata_entry minDurations =
mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
for (size_t i = 0; i < minDurations.count; i += 4) {
- if (minDurations.data.i64[i] == stream->getFormat()
+ if (minDurations.data.i64[i] == stream->getOriginalFormat()
&& minDurations.data.i64[i+1] == stream->getWidth()
&& minDurations.data.i64[i+2] == stream->getHeight()) {
int64_t minFrameDuration = minDurations.data.i64[i+3];
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index f2a62fa..a387064 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -486,7 +486,7 @@
bufferDeferred = true;
} else {
nsecs_t presentTime = mSyncToDisplay ?
- syncTimestampToDisplayLocked(captureTime) : captureTime;
+ syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
setTransform(transform, true/*mayChangeMirror*/);
res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -1410,7 +1410,7 @@
}
}
-nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
nsecs_t currentTime = systemTime();
if (!mFixedFps) {
mLastCaptureTime = t;
@@ -1453,6 +1453,17 @@
mLastCaptureTime = t;
mLastPresentTime = presentT;
+ // If releaseFence is available, store the fence to check signal
+ // time later.
+ mRefVsyncData = vsyncEventData;
+ mReferenceCaptureTime = t;
+ mReferenceArrivalTime = currentTime;
+ if (releaseFence != -1) {
+ mReferenceFrameFence = new Fence(releaseFence);
+ } else {
+ mFenceSignalOffset = 0;
+ }
+
// Move the expected presentation time back by 1/3 of frame interval to
// mitigate the time drift. Due to time drift, if we directly use the
// expected presentation time, often times 2 expected presentation time
@@ -1462,6 +1473,36 @@
}
}
+ // If there is a reference frame release fence, get the signal time and
+ // update the captureToPresentOffset.
+ if (mReferenceFrameFence != nullptr) {
+ mFenceSignalOffset = 0;
+ nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
+ // Now that the fence has signaled, recalculate the offsets based on
+ // the timeline which was actually latched
+ if (signalTime != INT64_MAX) {
+ for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
+ const auto& timeline = mRefVsyncData.frameTimelines[i];
+ if (timeline.deadlineTimestamp >= signalTime) {
+ nsecs_t originalOffset = mCaptureToPresentOffset;
+ mCaptureToPresentOffset = timeline.expectedPresentationTime
+ - mReferenceCaptureTime;
+ mLastPresentTime = timeline.expectedPresentationTime;
+ mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
+ signalTime - mReferenceArrivalTime : 0;
+
+ ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
+ " original offset %" PRId64 " new offset %" PRId64
+ " fencesignal offset %" PRId64, __FUNCTION__,
+ timeline.deadlineTimestamp, signalTime, originalOffset,
+ mCaptureToPresentOffset, mFenceSignalOffset);
+ break;
+ }
+ }
+ mReferenceFrameFence.clear();
+ }
+ }
+
nsecs_t idealPresentT = t + mCaptureToPresentOffset;
nsecs_t expectedPresentT = mLastPresentTime;
nsecs_t minDiff = INT64_MAX;
@@ -1505,6 +1546,7 @@
// Find best timestamp in the vsync timelines:
// - Only use at most kMaxTimelines timelines to avoid long latency
+ // - Add an extra timeline if display fence is used
// - closest to the ideal presentation time,
// - deadline timestamp is greater than the current time, and
// - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
@@ -1513,7 +1555,9 @@
// - For variable FPS, or if the capture interval deviates from refresh
// interval for more than 5%, find a presentation time closest to the
// (lastPresentationTime + captureToPresentOffset) instead.
- int maxTimelines = std::min(kMaxTimelines, (int)vsyncEventData.frameTimelinesLength);
+ int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
+ int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
+ (int)vsyncEventData.frameTimelinesLength);
float biasForShortDelay = 1.0f;
for (int i = 0; i < maxTimelines; i ++) {
const auto& vsyncTime = vsyncEventData.frameTimelines[i];
@@ -1524,7 +1568,7 @@
biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
}
if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
- vsyncTime.deadlineTimestamp >= currentTime &&
+ vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
(cameraDisplayInSync && vsyncTime.expectedPresentationTime >
mLastPresentTime + minInterval +
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 9a08485..1435081 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -446,7 +446,14 @@
static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
static constexpr float kMaxIntervalRatioDeviation = 0.05f;
static constexpr int kMaxTimelines = 2;
- nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+ nsecs_t syncTimestampToDisplayLocked(nsecs_t t, int releaseFence);
+
+ // In case of fence being used
+ sp<Fence> mReferenceFrameFence;
+ nsecs_t mReferenceCaptureTime = 0;
+ nsecs_t mReferenceArrivalTime = 0;
+ nsecs_t mFenceSignalOffset = 0;
+ VsyncEventData mRefVsyncData;
// Re-space frames by delaying queueBuffer so that frame delivery has
// the same cadence as capture. Default is on for SurfaceTexture bound
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index af5f3ee..3aff2ac 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -26,8 +26,14 @@
namespace android {
-using hardware::ICameraServiceProxy;
+using hardware::CameraExtensionSessionStats;
using hardware::CameraSessionStats;
+using hardware::ICameraServiceProxy;
+
+namespace {
+// Sentinel value to be returned when extension session with a stale or invalid key is reported.
+const String16 POISON_EXT_STATS_KEY("poisoned_stats");
+} // anonymous namespace
/**
* CameraSessionStatsWrapper functions
@@ -97,10 +103,12 @@
mSessionStats.mUserTag = String16(userTag.c_str());
mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
mSessionStats.mStreamStats = streamStats;
+
updateProxyDeviceState(proxyBinder);
mSessionStats.mInternalReconfigure = 0;
mSessionStats.mStreamStats.clear();
+ mSessionStats.mCameraExtensionSessionStats = {};
}
int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
@@ -108,6 +116,65 @@
return mSessionStats.mLogId;
}
+String16 CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ Mutex::Autolock l(mLock);
+ CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
+ if (currStats.key != extStats.key) {
+ // Mismatched keys. Extensions stats likely reported for a closed session
+ ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
+ __FUNCTION__, String8(currStats.key).c_str(), String8(extStats.key).c_str());
+ return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
+ // definitely dropped.
+ }
+
+ // Matching keys...
+ if (currStats.key.size()) {
+ // non-empty matching keys. overwrite.
+ ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
+ extStats.toString().c_str());
+ currStats = extStats;
+ return currStats.key;
+ }
+
+ // Matching empty keys...
+ if (mSessionStats.mClientName != extStats.clientName) {
+ ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
+ "Dropping stats.", __FUNCTION__,
+ String8(mSessionStats.mClientName).c_str(),
+ String8(extStats.clientName).c_str());
+ return POISON_EXT_STATS_KEY;
+ }
+
+ // Matching empty keys for the current client...
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_OPEN ||
+ mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_IDLE) {
+ // Camera is open, but not active. It is possible that the active callback hasn't
+ // occurred yet. Keep the stats, but don't associate it with any session.
+ ALOGV("%s: extension stat reported for an open, but not active camera. "
+ "Saving stats, but not generating key.", __FUNCTION__);
+ currStats = extStats;
+ return {}; // Subsequent calls will handle setting the correct key.
+ }
+
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_ACTIVE) {
+ // camera is active. First call for the session!
+ currStats = extStats;
+
+ // Generate a new key from logId and sessionIndex.
+ std::ostringstream key;
+ key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
+ currStats.key = String16(key.str().c_str());
+ ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
+ return currStats.key;
+ }
+
+ // Camera is closed. Probably a stale call.
+ ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
+ __FUNCTION__, String8(mSessionStats.mCameraId).c_str());
+ return {};
+}
+
/**
* CameraServiceProxyWrapper functions
*/
@@ -337,4 +404,21 @@
return ret;
}
+String16 CameraServiceProxyWrapper::updateExtensionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ String8 cameraId = String8(extStats.cameraId);
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s CameraExtensionSessionStats reported for camera id that isn't open: %s",
+ __FUNCTION__, cameraId.c_str());
+ return {};
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ return stats->updateExtensionSessionStats(extStats);
+ }
+}
+
} // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index d47c738..e32580c 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -65,6 +65,8 @@
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
+ String16 updateExtensionSessionStats(const hardware::CameraExtensionSessionStats& extStats);
+
// Returns the logId associated with this event.
int64_t getLogId();
};
@@ -127,6 +129,9 @@
// frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
// identifier. Returns a non-0 value on success.
int64_t getCurrentLogIdForCamera(const String8& cameraId);
+
+ // Update the stored extension stats to the latest values
+ String16 updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
};
} // android
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index fe87ed6..d1e54ab 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -346,7 +346,7 @@
== OK) {
returnStr += value_string_tmp;
} else {
- returnStr.appendFormat("%hhu", *(data_ptr + index));
+ returnStr.appendFormat("%hhu ", *(data_ptr + index));
}
break;
case TYPE_INT32:
@@ -363,7 +363,7 @@
}
break;
case TYPE_FLOAT:
- returnStr.appendFormat("%0.8f", *(float*)(data_ptr + index));
+ returnStr.appendFormat("%0.8f ", *(float*)(data_ptr + index));
break;
case TYPE_INT64:
returnStr.appendFormat("%" PRId64 " ", *(int64_t*)(data_ptr + index));
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index adb2217..af1372b 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,6 +524,8 @@
"audiotrack",
// other media
"codec",
+ "freeze",
+ "judder",
"extractor",
"mediadrm",
"mediaparser",
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index d1c7a18..5766f1c 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -15,11 +15,13 @@
*/
//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaMetricsService::stringutils"
+#define LOG_TAG "mediametrics::stringutils"
#include <utils/Log.h>
#include "StringUtils.h"
+#include <charconv>
+
#include "AudioTypes.h"
namespace android::mediametrics::stringutils {
@@ -54,6 +56,26 @@
}
}
+bool parseVector(const std::string &str, std::vector<int32_t> *vector) {
+ std::vector<int32_t> values;
+ const char *p = str.c_str();
+ const char *last = p + str.size();
+ while (p != last) {
+ if (*p == ',' || *p == '{' || *p == '}') {
+ p++;
+ }
+ int32_t value = -1;
+ auto [ptr, error] = std::from_chars(p, last, value);
+ if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+ return false;
+ }
+ p = ptr;
+ values.push_back(value);
+ }
+ *vector = std::move(values);
+ return true;
+}
+
std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
{
std::vector<std::pair<std::string, std::string>> result;
diff --git a/services/mediametrics/include/mediametricsservice/StatsdLog.h b/services/mediametrics/include/mediametricsservice/StatsdLog.h
index e207bac..5d5009e 100644
--- a/services/mediametrics/include/mediametricsservice/StatsdLog.h
+++ b/services/mediametrics/include/mediametricsservice/StatsdLog.h
@@ -16,11 +16,13 @@
#pragma once
-#include <audio_utils/SimpleLog.h>
#include <map>
#include <mutex>
#include <sstream>
+#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
+
namespace android::mediametrics {
class StatsdLog {
@@ -61,9 +63,9 @@
}
private:
+ mutable std::mutex mLock;
SimpleLog mSimpleLog; // internally locked
std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
- mutable std::mutex mLock;
};
} // namespace android::mediametrics
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index 78c25ff..ed2cf2e 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -72,6 +72,12 @@
std::vector<std::string> split(const std::string& flags, const char *delim);
/**
+ * Parses a vector of integers using ',' '{' and '}' as delimeters. Leaves
+ * vector unmodified if the parsing fails.
+ */
+bool parseVector(const std::string &str, std::vector<int32_t> *vector);
+
+/**
* Parse the devices string and return a vector of device address pairs.
*
* A failure to parse returns early with the contents that were able to be parsed.
diff --git a/services/mediametrics/include/mediametricsservice/iface_statsd.h b/services/mediametrics/include/mediametricsservice/iface_statsd.h
index 5bc293b..34d71ba 100644
--- a/services/mediametrics/include/mediametricsservice/iface_statsd.h
+++ b/services/mediametrics/include/mediametricsservice/iface_statsd.h
@@ -15,7 +15,9 @@
*/
#include <memory>
+
#include <stats_event.h>
+#include <StatsdLog.h>
namespace android {
namespace mediametrics {
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 158914a..ea76bcd 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -23,6 +23,7 @@
#include <pthread.h>
#include <pwd.h>
#include <stdint.h>
+#include <string>
#include <string.h>
#include <sys/stat.h>
#include <sys/time.h>
@@ -32,14 +33,149 @@
#include <stats_media_metrics.h>
#include <stats_event.h>
-#include "cleaner.h"
-#include "MediaMetricsService.h"
-#include "ValidateId.h"
-#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
-#include "iface_statsd.h"
+#include <frameworks/proto_logging/stats/message/mediametrics_message.pb.h>
+#include <mediametricsservice/cleaner.h>
+#include <mediametricsservice/iface_statsd.h>
+#include <mediametricsservice/MediaMetricsService.h>
+#include <mediametricsservice/StringUtils.h>
+#include <mediametricsservice/ValidateId.h>
namespace android {
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_RENDERED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+
+static const int BITRATE_UNKNOWN =
+ stats::media_metrics::MEDIA_CODEC_RENDERED__BITRATE__BITRATE_UNKNOWN;
+
+static const std::pair<char const *, int> CODEC_LOOKUP[] = {
+ { "avc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+ { "h264", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+ { "hevc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+ { "h265", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+ { "vp8", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP8 },
+ { "vp9", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP9 },
+ { "av1", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+ { "av01", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+ { "dolby-vision", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+};
+
+static const int32_t RESOLUTION_LOOKUP[] = {
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_MAX_SIZE,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_32K,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_16K,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_4K_UHD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1440X2560,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2400,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2340,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_576X1024,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_540X960,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X854,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X640,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_360X640,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_352X640,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_VERY_LOW,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_SMALLEST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO,
+};
+
+static const int32_t FRAMERATE_LOOKUP[] = {
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_25,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_30,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_50,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_60,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_120,
+};
+
+static int32_t getMetricsCodecEnum(const std::string &mime, const std::string &componentName) {
+ for (const auto & codecStrAndEnum : CODEC_LOOKUP) {
+ if (strcasestr(mime.c_str(), codecStrAndEnum.first) != nullptr ||
+ strcasestr(componentName.c_str(), codecStrAndEnum.first) != nullptr) {
+ return codecStrAndEnum.second;
+ }
+ }
+ return MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+}
+
+static int32_t getMetricsResolutionEnum(int32_t width, int32_t height) {
+ if (width == 0 || height == 0) {
+ return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+ }
+ int64_t pixels = int64_t(width) * height / 1000;
+ if (width < 0 || height < 0 || pixels > RESOLUTION_LOOKUP[0]) {
+ return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+ }
+ for (int32_t resolutionEnum : RESOLUTION_LOOKUP) {
+ if (pixels > resolutionEnum) {
+ return resolutionEnum;
+ }
+ }
+ return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+}
+
+static int32_t getMetricsFramerateEnum(float inFramerate) {
+ if (inFramerate == -1.0f) {
+ return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+ }
+ if (inFramerate == -2.0f) {
+ return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+ }
+ int framerate = int(inFramerate * 100); // Table is in hundredths of frames per second
+ static const int framerateTolerance = 40; // Tolerance is 0.4 frames per second - table is 100s
+ for (int32_t framerateEnum : FRAMERATE_LOOKUP) {
+ if (abs(framerate - framerateEnum) < framerateTolerance) {
+ return framerateEnum;
+ }
+ }
+ return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+}
+
+static int32_t getMetricsHdrFormatEnum(std::string &mime, std::string &componentName,
+ int32_t configColorTransfer, int32_t parsedColorTransfer,
+ int32_t hdr10StaticInfo, int32_t hdr10PlusInfo) {
+ if (hdr10PlusInfo) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+ }
+ if (hdr10StaticInfo) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+ }
+ // 7 = COLOR_TRANSFER_HLG in MediaCodecConstants.h
+ if (configColorTransfer == 7 || parsedColorTransfer == 7) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+ }
+ if (strcasestr(mime.c_str(), "dolby-vision") != nullptr ||
+ strcasestr(componentName.c_str(), "dvhe") != nullptr ||
+ strcasestr(componentName.c_str(), "dvav") != nullptr ||
+ strcasestr(componentName.c_str(), "dav1") != nullptr) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+ }
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+}
+
+static void parseVector(const std::string &str, std::vector<int32_t> *vector) {
+ if (!mediametrics::stringutils::parseVector(str, vector)) {
+ ALOGE("failed to parse integer vector from '%s'", str.c_str());
+ }
+}
+
bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
{
@@ -48,17 +184,17 @@
AStatsEvent* event = AStatsEvent_obtain();
AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
- const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
- AStatsEvent_writeInt64(event, timestamp_nanos);
+ const nsecs_t timestampNanos = MediaMetricsService::roundTime(item->getTimestamp());
+ AStatsEvent_writeInt64(event, timestampNanos);
- std::string package_name = item->getPkgName();
- AStatsEvent_writeString(event, package_name.c_str());
+ std::string packageName = item->getPkgName();
+ AStatsEvent_writeString(event, packageName.c_str());
- int64_t package_version_code = item->getPkgVersionCode();
- AStatsEvent_writeInt64(event, package_version_code);
+ int64_t packageVersionCode = item->getPkgVersionCode();
+ AStatsEvent_writeInt64(event, packageVersionCode);
- int64_t media_apex_version = 0;
- AStatsEvent_writeInt64(event, media_apex_version);
+ int64_t mediaApexVersion = 0;
+ AStatsEvent_writeInt64(event, mediaApexVersion);
// the rest into our own proto
//
@@ -84,17 +220,25 @@
}
AStatsEvent_writeString(event, mode.c_str());
- int32_t encoder = -1;
- if (item->getInt32("android.media.mediacodec.encoder", &encoder)) {
- metrics_proto.set_encoder(encoder);
+ int32_t isEncoder = -1;
+ if (item->getInt32("android.media.mediacodec.encoder", &isEncoder)) {
+ metrics_proto.set_encoder(isEncoder);
}
- AStatsEvent_writeInt32(event, encoder);
+ AStatsEvent_writeInt32(event, isEncoder);
- int32_t secure = -1;
- if (item->getInt32("android.media.mediacodec.secure", &secure)) {
- metrics_proto.set_secure(secure);
+ int32_t isSecure = -1;
+ if (item->getInt32("android.media.mediacodec.secure", &isSecure)) {
+ metrics_proto.set_secure(isSecure);
}
- AStatsEvent_writeInt32(event, secure);
+ AStatsEvent_writeInt32(event, isSecure);
+
+ int32_t isHardware = -1;
+ item->getInt32("android.media.mediacodec.hardware", &isHardware);
+ // not logged to MediaCodecReported or MediametricsCodecReported
+
+ int32_t isTunneled = -1;
+ item->getInt32("android.media.mediacodec.tunneled", &isTunneled);
+ // not logged to MediaCodecReported or MediametricsCodecReported
int32_t width = -1;
if (item->getInt32("android.media.mediacodec.width", &width)) {
@@ -133,79 +277,78 @@
AStatsEvent_writeInt32(event, level);
- int32_t max_width = -1;
- if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
- metrics_proto.set_max_width(max_width);
+ int32_t maxWidth = -1;
+ if ( item->getInt32("android.media.mediacodec.maxwidth", &maxWidth)) {
+ metrics_proto.set_max_width(maxWidth);
}
- AStatsEvent_writeInt32(event, max_width);
+ AStatsEvent_writeInt32(event, maxWidth);
- int32_t max_height = -1;
- if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
- metrics_proto.set_max_height(max_height);
+ int32_t maxHeight = -1;
+ if ( item->getInt32("android.media.mediacodec.maxheight", &maxHeight)) {
+ metrics_proto.set_max_height(maxHeight);
}
- AStatsEvent_writeInt32(event, max_height);
+ AStatsEvent_writeInt32(event, maxHeight);
- int32_t error_code = -1;
- if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
- metrics_proto.set_error_code(error_code);
+ int32_t errorCode = -1;
+ if ( item->getInt32("android.media.mediacodec.errcode", &errorCode)) {
+ metrics_proto.set_error_code(errorCode);
}
- AStatsEvent_writeInt32(event, error_code);
+ AStatsEvent_writeInt32(event, errorCode);
- std::string error_state;
- if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
- metrics_proto.set_error_state(error_state);
+ std::string errorState;
+ if ( item->getString("android.media.mediacodec.errstate", &errorState)) {
+ metrics_proto.set_error_state(errorState);
}
- AStatsEvent_writeString(event, error_state.c_str());
+ AStatsEvent_writeString(event, errorState.c_str());
- int64_t latency_max = -1;
- if (item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
- metrics_proto.set_latency_max(latency_max);
+ int64_t latencyMax = -1;
+ if (item->getInt64("android.media.mediacodec.latency.max", &latencyMax)) {
+ metrics_proto.set_latency_max(latencyMax);
}
- AStatsEvent_writeInt64(event, latency_max);
+ AStatsEvent_writeInt64(event, latencyMax);
- int64_t latency_min = -1;
- if (item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
- metrics_proto.set_latency_min(latency_min);
+ int64_t latencyMin = -1;
+ if (item->getInt64("android.media.mediacodec.latency.min", &latencyMin)) {
+ metrics_proto.set_latency_min(latencyMin);
}
- AStatsEvent_writeInt64(event, latency_min);
+ AStatsEvent_writeInt64(event, latencyMin);
- int64_t latency_avg = -1;
- if (item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
- metrics_proto.set_latency_avg(latency_avg);
+ int64_t latencyAvg = -1;
+ if (item->getInt64("android.media.mediacodec.latency.avg", &latencyAvg)) {
+ metrics_proto.set_latency_avg(latencyAvg);
}
- AStatsEvent_writeInt64(event, latency_avg);
+ AStatsEvent_writeInt64(event, latencyAvg);
- int64_t latency_count = -1;
- if (item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
- metrics_proto.set_latency_count(latency_count);
+ int64_t latencyCount = -1;
+ if (item->getInt64("android.media.mediacodec.latency.n", &latencyCount)) {
+ metrics_proto.set_latency_count(latencyCount);
}
- AStatsEvent_writeInt64(event, latency_count);
+ AStatsEvent_writeInt64(event, latencyCount);
- int64_t latency_unknown = -1;
- if (item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
- metrics_proto.set_latency_unknown(latency_unknown);
+ int64_t latencyUnknown = -1;
+ if (item->getInt64("android.media.mediacodec.latency.unknown", &latencyUnknown)) {
+ metrics_proto.set_latency_unknown(latencyUnknown);
}
- AStatsEvent_writeInt64(event, latency_unknown);
+ AStatsEvent_writeInt64(event, latencyUnknown);
- int32_t queue_secure_input_buffer_error = -1;
+ int32_t queueSecureInputBufferError = -1;
if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
- &queue_secure_input_buffer_error)) {
- metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
+ &queueSecureInputBufferError)) {
+ metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
}
- AStatsEvent_writeInt32(event, queue_secure_input_buffer_error);
+ AStatsEvent_writeInt32(event, queueSecureInputBufferError);
- int32_t queue_input_buffer_error = -1;
- if (item->getInt32("android.media.mediacodec.queueInputBufferError",
- &queue_input_buffer_error)) {
- metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
+ int32_t queueInputBufferError = -1;
+ if (item->getInt32("android.media.mediacodec.queueInputBufferError", &queueInputBufferError)) {
+ metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
}
- AStatsEvent_writeInt32(event, queue_input_buffer_error);
+ AStatsEvent_writeInt32(event, queueInputBufferError);
- std::string bitrate_mode;
- if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
- metrics_proto.set_bitrate_mode(bitrate_mode);
+ std::string bitrateMode;
+ if (item->getString("android.media.mediacodec.bitrate_mode", &bitrateMode)) {
+ metrics_proto.set_bitrate_mode(bitrateMode);
}
- AStatsEvent_writeString(event, bitrate_mode.c_str());
+ AStatsEvent_writeString(event, bitrateMode.c_str());
int32_t bitrate = -1;
if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
@@ -213,18 +356,18 @@
}
AStatsEvent_writeInt32(event, bitrate);
- int64_t lifetime_millis = -1;
- if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
- lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
- metrics_proto.set_lifetime_millis(lifetime_millis);
+ int64_t lifetimeMillis = -1;
+ if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMillis)) {
+ lifetimeMillis = mediametrics::bucket_time_minutes(lifetimeMillis);
+ metrics_proto.set_lifetime_millis(lifetimeMillis);
}
- AStatsEvent_writeInt64(event, lifetime_millis);
+ AStatsEvent_writeInt64(event, lifetimeMillis);
- int64_t playback_duration_sec = -1;
- item->getInt64("android.media.mediacodec.playback-duration-sec", &playback_duration_sec);
+ int64_t playbackDurationSec = -1;
+ item->getInt64("android.media.mediacodec.playback-duration-sec", &playbackDurationSec);
// DO NOT record playback-duration in the metrics_proto - it should only
// exist in the flattened atom
- AStatsEvent_writeInt64(event, playback_duration_sec);
+ AStatsEvent_writeInt64(event, playbackDurationSec);
std::string sessionId;
if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
@@ -505,61 +648,188 @@
}
AStatsEvent_writeInt32(event, resolutionChangeCount);
+ int32_t componentColorFormat = -1;
+ if (item->getInt32("android.media.mediacodec.component-color-format", &componentColorFormat)) {
+ metrics_proto.set_component_color_format(componentColorFormat);
+ }
+ AStatsEvent_writeInt32(event, componentColorFormat);
+
+ int64_t firstRenderTimeUs = -1;
+ item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
+ int64_t framesReleased = -1;
+ item->getInt64("android.media.mediacodec.frames-released", &framesReleased);
+ int64_t framesRendered = -1;
+ item->getInt64("android.media.mediacodec.frames-rendered", &framesRendered);
+ int64_t framesDropped = -1;
+ item->getInt64("android.media.mediacodec.frames-dropped", &framesDropped);
+ int64_t framesSkipped = -1;
+ item->getInt64("android.media.mediacodec.frames-skipped", &framesSkipped);
+ double framerateContent = -1;
+ item->getDouble("android.media.mediacodec.framerate-content", &framerateContent);
+ double framerateActual = -1;
+ item->getDouble("android.media.mediacodec.framerate-actual", &framerateActual);
+ int64_t freezeScore = -1;
+ item->getInt64("android.media.mediacodec.freeze-score", &freezeScore);
+ double freezeRate = -1;
+ item->getDouble("android.media.mediacodec.freeze-rate", &freezeRate);
+ std::string freezeScoreHistogramStr;
+ item->getString("android.media.mediacodec.freeze-score-histogram", &freezeScoreHistogramStr);
+ std::string freezeScoreHistogramBucketsStr;
+ item->getString("android.media.mediacodec.freeze-score-histogram-buckets",
+ &freezeScoreHistogramBucketsStr);
+ std::string freezeDurationMsHistogramStr;
+ item->getString("android.media.mediacodec.freeze-duration-ms-histogram",
+ &freezeDurationMsHistogramStr);
+ std::string freezeDurationMsHistogramBucketsStr;
+ item->getString("android.media.mediacodec.freeze-duration-ms-histogram-buckets",
+ &freezeDurationMsHistogramBucketsStr);
+ std::string freezeDistanceMsHistogramStr;
+ item->getString("android.media.mediacodec.freeze-distance-ms-histogram",
+ &freezeDistanceMsHistogramStr);
+ std::string freezeDistanceMsHistogramBucketsStr;
+ item->getString("android.media.mediacodec.freeze-distance-ms-histogram-buckets",
+ &freezeDistanceMsHistogramBucketsStr);
+ int64_t judderScore = -1;
+ item->getInt64("android.media.mediacodec.judder-score", &judderScore);
+ double judderRate = -1;
+ item->getDouble("android.media.mediacodec.judder-rate", &judderRate);
+ std::string judderScoreHistogramStr;
+ item->getString("android.media.mediacodec.judder-score-histogram", &judderScoreHistogramStr);
+ std::string judderScoreHistogramBucketsStr;
+ item->getString("android.media.mediacodec.judder-score-histogram-buckets",
+ &judderScoreHistogramBucketsStr);
+
int err = AStatsEvent_write(event);
if (err < 0) {
ALOGE("Failed to write codec metrics to statsd (%d)", err);
}
AStatsEvent_release(event);
+ if (framesRendered > 0) {
+ int32_t statsUid = item->getUid();
+ int64_t statsCodecId = codecId;
+ char const *statsLogSessionId = sessionId.c_str();
+ int32_t statsIsHardware = isHardware;
+ int32_t statsIsSecure = isSecure;
+ int32_t statsIsTunneled = isTunneled;
+ int32_t statsCodec = getMetricsCodecEnum(mime, codec);
+ int32_t statsResolution = getMetricsResolutionEnum(width, height);
+ int32_t statsBitrate = BITRATE_UNKNOWN;
+ int32_t statsContentFramerate = getMetricsFramerateEnum(framerateContent);
+ int32_t statsActualFramerate = getMetricsFramerateEnum(framerateActual);
+ int32_t statsHdrFormat = getMetricsHdrFormatEnum(mime, codec, configColorTransfer,
+ parsedColorTransfer, hdrStaticInfo,
+ hdr10PlusInfo);
+ int64_t statsFirstRenderTimeUs = firstRenderTimeUs;
+ int64_t statsPlaybackDurationSeconds = playbackDurationSec;
+ int64_t statsFramesTotal = framesReleased + framesSkipped;
+ int64_t statsFramesReleased = framesReleased;
+ int64_t statsFramesRendered = framesRendered;
+ int64_t statsFramesDropped = framesDropped;
+ int64_t statsFramesSkipped = framesSkipped;
+ float statsFrameDropRate = float(double(framesDropped) / statsFramesTotal);
+ float statsFrameSkipRate = float(double(framesSkipped) / statsFramesTotal);
+ float statsFrameSkipDropRate = float(double(framesSkipped + framesDropped) /
+ statsFramesTotal);
+ int64_t statsFreezeScore = freezeScore;
+ float statsFreezeRate = freezeRate;
+ std::vector<int32_t> statsFreezeDurationMsHistogram;
+ parseVector(freezeDurationMsHistogramStr, &statsFreezeDurationMsHistogram);
+ std::vector<int32_t> statsFreezeDurationMsHistogramBuckets;
+ parseVector(freezeDurationMsHistogramBucketsStr, &statsFreezeDurationMsHistogramBuckets);
+ std::vector<int32_t> statsFreezeDistanceMsHistogram;
+ parseVector(freezeDistanceMsHistogramStr, &statsFreezeDistanceMsHistogram);
+ std::vector<int32_t> statsFreezeDistanceMsHistogramBuckets;
+ parseVector(freezeDistanceMsHistogramBucketsStr, &statsFreezeDistanceMsHistogramBuckets);
+ int64_t statsJudderScore = judderScore;
+ float statsJudderRate = judderRate;
+ std::vector<int32_t> statsJudderScoreHistogram;
+ parseVector(judderScoreHistogramStr, &statsJudderScoreHistogram);
+ std::vector<int32_t> statsJudderScoreHistogramBuckets;
+ parseVector(judderScoreHistogramBucketsStr, &statsJudderScoreHistogramBuckets);
+ int result = stats_write(
+ MEDIA_CODEC_RENDERED,
+ statsUid,
+ statsCodecId,
+ statsLogSessionId,
+ statsIsHardware,
+ statsIsSecure,
+ statsIsTunneled,
+ statsCodec,
+ statsResolution,
+ statsBitrate,
+ statsContentFramerate,
+ statsActualFramerate,
+ statsHdrFormat,
+ statsFirstRenderTimeUs,
+ statsPlaybackDurationSeconds,
+ statsFramesTotal,
+ statsFramesReleased,
+ statsFramesRendered,
+ statsFramesDropped,
+ statsFramesSkipped,
+ statsFrameDropRate,
+ statsFrameSkipRate,
+ statsFrameSkipDropRate,
+ statsFreezeScore,
+ statsFreezeRate,
+ statsFreezeDurationMsHistogram,
+ statsFreezeDurationMsHistogramBuckets,
+ statsFreezeDistanceMsHistogram,
+ statsFreezeDistanceMsHistogramBuckets,
+ statsJudderScore,
+ statsJudderRate,
+ statsJudderScoreHistogram,
+ statsJudderScoreHistogramBuckets);
+ ALOGE_IF(result < 0, "Failed to record MEDIA_CODEC_RENDERED atom (%d)", result);
+ }
+
std::string serialized;
if (!metrics_proto.SerializeToString(&serialized)) {
ALOGE("Failed to serialize codec metrics");
return false;
}
- const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const stats::media_metrics::BytesField bf_serialized(serialized.c_str(), serialized.size());
const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
- timestamp_nanos, package_name.c_str(), package_version_code,
- media_apex_version,
+ timestampNanos, packageName.c_str(), packageVersionCode,
+ mediaApexVersion,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_codec_reported:"
<< stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
- << " timestamp_nanos:" << timestamp_nanos
- << " package_name:" << package_name
- << " package_version_code:" << package_version_code
- << " media_apex_version:" << media_apex_version
-
+ << " timestamp_nanos:" << timestampNanos
+ << " package_name:" << packageName
+ << " package_version_code:" << packageVersionCode
+ << " media_apex_version:" << mediaApexVersion
<< " codec:" << codec
<< " mime:" << mime
<< " mode:" << mode
- << " encoder:" << encoder
- << " secure:" << secure
+ << " encoder:" << isEncoder
+ << " secure:" << isSecure
<< " width:" << width
<< " height:" << height
<< " rotation:" << rotation
<< " crypto:" << crypto
<< " profile:" << profile
-
<< " level:" << level
- << " max_width:" << max_width
- << " max_height:" << max_height
- << " error_code:" << error_code
- << " error_state:" << error_state
- << " latency_max:" << latency_max
- << " latency_min:" << latency_min
- << " latency_avg:" << latency_avg
- << " latency_count:" << latency_count
- << " latency_unknown:" << latency_unknown
-
- << " queue_input_buffer_error:" << queue_input_buffer_error
- << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
- << " bitrate_mode:" << bitrate_mode
+ << " max_width:" << maxWidth
+ << " max_height:" << maxHeight
+ << " error_code:" << errorCode
+ << " error_state:" << errorState
+ << " latency_max:" << latencyMax
+ << " latency_min:" << latencyMin
+ << " latency_avg:" << latencyAvg
+ << " latency_count:" << latencyCount
+ << " latency_unknown:" << latencyUnknown
+ << " queue_input_buffer_error:" << queueInputBufferError
+ << " queue_secure_input_buffer_error:" << queueSecureInputBufferError
+ << " bitrate_mode:" << bitrateMode
<< " bitrate:" << bitrate
<< " original_bitrate:" << originalBitrate
- << " lifetime_millis:" << lifetime_millis
- << " playback_duration_seconds:" << playback_duration_sec
+ << " lifetime_millis:" << lifetimeMillis
+ << " playback_duration_seconds:" << playbackDurationSec
<< " log_session_id:" << sessionId
<< " channel_count:" << channelCount
<< " sample_rate:" << sampleRate
@@ -572,7 +842,6 @@
<< " operating_rate:" << operatingRate
<< " priority:" << priority
<< " shaping_enhanced:" << shapingEnhanced
-
<< " qp_i_min:" << qpIMin
<< " qp_i_max:" << qpIMax
<< " qp_p_min:" << qpPMin
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index bc7b47b..4a6aee4 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -17,9 +17,10 @@
#define LOG_TAG "mediametrics_tests"
#include <utils/Log.h>
-
#include <stdio.h>
+#include <string>
#include <unordered_set>
+#include <vector>
#include <gtest/gtest.h>
#include <media/MediaMetricsItem.h>
@@ -30,6 +31,7 @@
#include <system/audio.h>
using namespace android;
+using android::mediametrics::stringutils::parseVector;
static size_t countNewlines(const char *s) {
size_t count = 0;
@@ -57,6 +59,35 @@
ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
}
+TEST(mediametrics_tests, parseVector) {
+ {
+ std::vector<int32_t> values;
+ EXPECT_EQ(true, parseVector("0{4,300,0,-112343,350}9", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({0, 4, 300, 0, -112343, 350, 9}));
+ }
+ {
+ std::vector<int32_t> values;
+ EXPECT_EQ(true, parseVector("53", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({53}));
+ }
+ {
+ std::vector<int32_t> values;
+ EXPECT_EQ(false, parseVector("5{3,6*3}3", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({}));
+ }
+ {
+ std::vector<int32_t> values = {1}; // should still be this when parsing fails
+ std::vector<int32_t> expected = {1};
+ EXPECT_EQ(false, parseVector("51342abcd,1232", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({1}));
+ }
+ {
+ std::vector<int32_t> values = {2}; // should still be this when parsing fails
+ EXPECT_EQ(false, parseVector("12345678901234,12345678901234", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({2}));
+ }
+}
+
TEST(mediametrics_tests, defer) {
bool check = false;
{