Capture basic metrics for video frame rendering
Capture and compute frame drops, frame skips and frame rates for video
playback. Report these metrics through the existing media.metrics
pipeline for bug reports.
Bug: 234833109
Test: atest VideoRenderQualityTracker_test
Test: Play YouTube video, skip forward and backward, and check 'adb
shell dumpsys media.metrics'
Change-Id: Ie8b4d2c85cbc4b94d30926868e9aa4aa5cccf729
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 569a25f..f1534c9 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -270,6 +270,7 @@
"SurfaceUtils.cpp",
"ThrottledSource.cpp",
"Utils.cpp",
+ "VideoRenderQualityTracker.cpp",
"VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
],
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 080c3d0..15817d2 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,7 +30,6 @@
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
#include <android/binder_manager.h>
#include <android/content/pm/IPackageManagerNative.h>
@@ -199,6 +198,14 @@
static const char *kCodecPlaybackDurationSec =
"android.media.mediacodec.playback-duration-sec"; /* in sec */
+static const char *kCodecFramesReleased = "android.media.mediacodec.frames.released";
+static const char *kCodecFramesRendered = "android.media.mediacodec.frames.rendered";
+static const char *kCodecFramesSkipped = "android.media.mediacodec.frames.skipped";
+static const char *kCodecFramesDropped = "android.media.mediacodec.frames.dropped";
+static const char *kCodecFramerateContent = "android.media.mediacodec.framerate.content";
+static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate.desired";
+static const char *kCodecFramerateActual = "android.media.mediacodec.framerate.actual";
+
/* -1: shaper disabled
>=0: number of fields changed */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
@@ -960,8 +967,7 @@
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
- mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
- mIsSurfaceToScreen(false),
+ mIsSurfaceToDisplay(false),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
@@ -1096,6 +1102,20 @@
mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
mReliabilityContextMetrics.resolutionChangeCount);
+ // Video rendering quality metrics
+ {
+ const VideoRenderQualityMetrics& m = mVideoRenderQualityTracker.getMetrics();
+ if (m.frameRenderedCount > 0) {
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
+ mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
+ mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
+ mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
+ mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
+ }
+ }
+
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1111,7 +1131,7 @@
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
- int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
+ int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
if (playbackDurationSec > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
}
@@ -1436,25 +1456,33 @@
ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
}
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
+void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
int what = 0;
msg->findInt32("what", &what);
if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
static bool logged = false;
if (!logged) {
logged = true;
- ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
+ ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
}
return;
}
- // Playback duration only counts if the buffers are going to the screen.
- if (!mIsSurfaceToScreen) {
- return;
- }
- int64_t renderTimeNs;
- size_t index = 0;
- while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
- mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
+ // Rendered frames only matter if they're being sent to the display
+ if (mIsSurfaceToDisplay) {
+ int64_t renderTimeNs;
+ for (size_t index = 0;
+ msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
+ index++) {
+ // Capture metrics for playback duration
+ mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
+ // Capture metrics for quality
+ int64_t mediaTimeUs = 0;
+ if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
+ ALOGE("processRenderedFrames: no media time found");
+ continue;
+ }
+ mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs);
+ }
}
}
@@ -3964,7 +3992,7 @@
asString(previousState),
asString(TunnelPeekState::kBufferRendered));
}
- updatePlaybackDuration(msg);
+ processRenderedFrames(msg);
// check that we have a notification set
if (mOnFrameRenderedNotification != NULL) {
sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4158,6 +4186,10 @@
mState, stateString(mState).c_str());
break;
}
+
+ if (mIsSurfaceToDisplay) {
+ mVideoRenderQualityTracker.resetForDiscontinuity();
+ }
// Notify the RM that the codec has been stopped.
ClientConfigParcel clientConfig;
initClientConfigParcel(clientConfig);
@@ -4213,6 +4245,10 @@
break;
}
+ if (mIsSurfaceToDisplay) {
+ mVideoRenderQualityTracker.resetForDiscontinuity();
+ }
+
if (mFlags & kFlagIsAsync) {
setState(FLUSHED);
} else {
@@ -5927,7 +5963,9 @@
// If rendering to the screen, then schedule a time in the future to poll to see if this
// frame was ever rendered to seed onFrameRendered callbacks.
- if (mIsSurfaceToScreen) {
+ if (mIsSurfaceToDisplay) {
+ noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
+ : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs, renderTimeNs);
// can't initialize this in the constructor because the Looper parent class needs to be
// initialized first
if (mMsgPollForRenderedBuffers == nullptr) {
@@ -5957,6 +5995,11 @@
ALOGI("rendring output error %d", err);
}
} else {
+ if (mIsSurfaceToDisplay) {
+ int64_t mediaTimeUs = -1;
+ buffer->meta()->findInt64("timeUs", &mediaTimeUs);
+ mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
+ }
mBufferChannel->discardBuffer(buffer);
}
@@ -6023,7 +6066,7 @@
// in case we don't connect, ensure that we don't signal the surface is
// connected to the screen
- mIsSurfaceToScreen = false;
+ mIsSurfaceToDisplay = false;
err = nativeWindowConnect(surface.get(), "connectToSurface");
if (err == OK) {
@@ -6053,7 +6096,7 @@
// keep track whether or not the buffers of the connected surface go to the screen
int result = 0;
surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
- mIsSurfaceToScreen = result != 0;
+ mIsSurfaceToDisplay = result != 0;
}
}
// do not return ALREADY_EXISTS unless surfaces are the same
@@ -6071,7 +6114,7 @@
}
// assume disconnected even on error
mSurface.clear();
- mIsSurfaceToScreen = false;
+ mIsSurfaceToDisplay = false;
}
return err;
}
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
new file mode 100644
index 0000000..7224269
--- /dev/null
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -0,0 +1,299 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "VideoRenderQualityTracker"
+#include <utils/Log.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+#include <assert.h>
+#include <cmath>
+#include <sys/time.h>
+
+namespace android {
+
+VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
+ firstFrameRenderTimeUs = 0;
+ frameReleasedCount = 0;
+ frameRenderedCount = 0;
+ frameDroppedCount = 0;
+ frameSkippedCount = 0;
+ contentFrameRate = FRAME_RATE_UNDETERMINED;
+ desiredFrameRate = FRAME_RATE_UNDETERMINED;
+ actualFrameRate = FRAME_RATE_UNDETERMINED;
+}
+
+VideoRenderQualityTracker::Configuration::Configuration() {
+ // Assume that the app is skipping frames because it's detected that the frame couldn't be
+ // rendered in time.
+ areSkippedFramesDropped = true;
+
+ // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
+ maxExpectedContentFrameDurationUs = 400 * 1000;
+
+ // Allow for 2 milliseconds of deviation when detecting frame rates
+ frameRateDetectionToleranceUs = 2 * 1000;
+
+ // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
+ // because of frame drops for live content, or because the user is seeking.
+ contentTimeAdvancedForLiveContentToleranceUs = 200 * 1000;
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker() :
+ mConfiguration(Configuration()) {
+ resetForDiscontinuity();
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
+ mConfiguration(configuration) {
+ resetForDiscontinuity();
+}
+
+void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
+ // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
+ // app might be seeking to a starting point that isn't the first key frame.
+ if (mLastRenderTimeUs == -1) {
+ return;
+ }
+ // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
+ // app could be terminating the playback. The pending count will be added to the metrics if and
+ // when the next frame is rendered.
+ mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
+ onFrameReleased(contentTimeUs, nowUs() * 1000);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
+ int64_t desiredRenderTimeNs) {
+ int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
+ resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
+ mMetrics.frameReleasedCount++;
+ mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
+ mLastContentTimeUs = contentTimeUs;
+}
+
+void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs) {
+ int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
+
+ // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
+ // frames since the app is not skipping them to terminate playback.
+ for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
+ processMetricsForSkippedFrame(contentTimeUs);
+ }
+ mPendingSkippedFrameContentTimeUsList = {};
+
+ static const FrameInfo noFrame = {-1, -1};
+ FrameInfo nextExpectedFrame = noFrame;
+ while (!mNextExpectedRenderedFrameQueue.empty()) {
+ nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
+ mNextExpectedRenderedFrameQueue.pop();
+ // Happy path - the rendered frame is what we expected it to be
+ if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ // This isn't really supposed to happen - the next rendered frame should be the expected
+ // frame, or, if there's frame drops, it will be a frame later in the content stream
+ if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
+ ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
+ (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
+ break;
+ }
+ processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
+ nextExpectedFrame.desiredRenderTimeUs);
+ }
+ processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
+ nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs);
+ mLastRenderTimeUs = actualRenderTimeUs;
+}
+
+const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() const {
+ return mMetrics;
+}
+
+void VideoRenderQualityTracker::resetForDiscontinuity() {
+ mLastContentTimeUs = -1;
+ mLastRenderTimeUs = -1;
+
+ // Don't worry about tracking frame rendering times from now up until playback catches up to the
+ // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
+ // to the user is is minimal, so better to just keep things simple and don't bother.
+ mNextExpectedRenderedFrameQueue = {};
+
+ // Ignore any frames that were skipped just prior to the discontinuity.
+ mPendingSkippedFrameContentTimeUsList = {};
+
+ // All frame durations can be now ignored since all bets are off now on what the render
+ // durations should be after the discontinuity.
+ for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
+ mActualFrameDurationUs[i] = -1;
+ mDesiredFrameDurationUs[i] = -1;
+ mContentFrameDurationUs[i] = -1;
+ }
+}
+
+bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
+ int64_t desiredRenderTimeUs) {
+ if (mLastContentTimeUs == -1) {
+ resetForDiscontinuity();
+ return true;
+ }
+ if (contentTimeUs < mLastContentTimeUs) {
+ ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
+ int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+ int(contentTimeUs / 1000));
+ resetForDiscontinuity();
+ return true;
+ }
+ if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
+ // The content frame duration could be long due to frame drops for live content. This can be
+ // detected by looking at the app's desired rendering duration. If the app's rendered frame
+ // duration is roughly the same as the content's frame duration, then it is assumed that
+ // the forward discontinuity is due to frame drops for live content. A false positive can
+ // occur if the time the user spends seeking is equal to the duration of the seek. This is
+ // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
+ // distracted, and then returns to seeking forward.
+ int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
+ int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
+ bool skippedForwardDueToLiveContentFrameDrops =
+ abs(contentFrameDurationUs - desiredFrameDurationUs) <
+ mConfiguration.contentTimeAdvancedForLiveContentToleranceUs;
+ if (!skippedForwardDueToLiveContentFrameDrops) {
+ ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
+ int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+ int(contentTimeUs / 1000));
+ resetForDiscontinuity();
+ return true;
+ }
+ }
+ return false;
+}
+
+void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
+ mMetrics.frameSkippedCount++;
+ if (mConfiguration.areSkippedFramesDropped) {
+ processMetricsForDroppedFrame(contentTimeUs, -1);
+ return;
+ }
+ updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+ updateFrameDurations(mDesiredFrameDurationUs, -1);
+ updateFrameDurations(mActualFrameDurationUs, -1);
+ updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+}
+
+void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
+ int64_t desiredRenderTimeUs) {
+ mMetrics.frameDroppedCount++;
+ updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+ updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+ updateFrameDurations(mActualFrameDurationUs, -1);
+ updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+ updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+}
+
+void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
+ int64_t desiredRenderTimeUs,
+ int64_t actualRenderTimeUs) {
+ // Capture the timestamp at which the first frame was rendered
+ if (mMetrics.firstFrameRenderTimeUs == 0) {
+ mMetrics.firstFrameRenderTimeUs = actualRenderTimeUs;
+ }
+
+ mMetrics.frameRenderedCount++;
+ // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
+ // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
+ // anything other than count it as a rendered frame.
+ if (contentTimeUs == -1) {
+ return;
+ }
+ updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+ updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+ updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
+ updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+ updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+ updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
+}
+
+int64_t VideoRenderQualityTracker::nowUs() {
+ struct timespec t;
+ t.tv_sec = t.tv_nsec = 0;
+ clock_gettime(CLOCK_MONOTONIC, &t);
+ return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
+}
+
+void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
+ int64_t newTimestampUs) {
+ for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
+ durationUs[i] = durationUs[i - 1];
+ }
+ if (newTimestampUs == -1) {
+ durationUs[0] = -1;
+ } else {
+ durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
+ newTimestampUs - durationUs.priorTimestampUs;
+ durationUs.priorTimestampUs = newTimestampUs;
+ }
+}
+
+void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+ const Configuration &c) {
+ float newFrameRate = detectFrameRate(durationUs, c);
+ if (newFrameRate != FRAME_RATE_UNDETERMINED) {
+ frameRate = newFrameRate;
+ }
+}
+
+float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
+ const Configuration &c) {
+ // At least 3 frames are necessary to detect stable frame rates
+ assert(FrameDurationUs::SIZE >= 3);
+ if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
+ return FRAME_RATE_UNDETERMINED;
+ }
+ // Only determine frame rate if the render durations are stable across 3 frames
+ if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
+ abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
+ return is32pulldown(durationUs, c) ? FRAME_RATE_24HZ_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
+ }
+ return 1000.0 * 1000.0 / durationUs[0];
+}
+
+bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
+ const Configuration &c) {
+ // At least 5 frames are necessary to detect stable 3:2 pulldown
+ assert(FrameDurationUs::SIZE >= 5);
+ if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
+ durationUs[4] == -1) {
+ return false;
+ }
+ // 3:2 pulldown expects that every other frame has identical duration...
+ if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
+ abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
+ abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
+ return false;
+ }
+ // ... for either 2 vsysncs or 3 vsyncs
+ if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
+ abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
+ (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
+ abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
+ return true;
+ }
+ return false;
+}
+
+} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 3d4b6f8..386b177 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -30,6 +30,8 @@
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/CodecErrorLog.h>
#include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/PlaybackDurationAccumulator.h>
+#include <media/stagefright/VideoRenderQualityTracker.h>
#include <utils/Vector.h>
class C2Buffer;
@@ -63,7 +65,6 @@
struct PersistentSurface;
class SoftwareRenderer;
class Surface;
-class PlaybackDurationAccumulator;
namespace hardware {
namespace cas {
namespace native {
@@ -459,7 +460,7 @@
void onGetMetrics(const sp<AMessage>& msg);
constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
void updateTunnelPeek(const sp<AMessage> &msg);
- void updatePlaybackDuration(const sp<AMessage> &msg);
+ void processRenderedFrames(const sp<AMessage> &msg);
inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
@@ -569,8 +570,9 @@
sp<CryptoAsync> mCryptoAsync;
sp<ALooper> mCryptoLooper;
- std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
- bool mIsSurfaceToScreen;
+ bool mIsSurfaceToDisplay;
+ PlaybackDurationAccumulator mPlaybackDurationAccumulator;
+ VideoRenderQualityTracker mVideoRenderQualityTracker;
MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
similarity index 95%
rename from media/libstagefright/PlaybackDurationAccumulator.h
rename to media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
index cb5f0c4..bdf1171 100644
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
@@ -33,7 +33,7 @@
}
// Process a render time expressed in nanoseconds.
- void processRenderTime(int64_t newRenderTimeNs) {
+ void onFrameRendered(int64_t newRenderTimeNs) {
// If we detect wrap-around or out of order frames, just ignore the duration for this
// and the next frame.
if (newRenderTimeNs < mPreviousRenderTimeNs) {
@@ -59,7 +59,7 @@
int64_t mPreviousRenderTimeNs;
};
-}
+} // android
-#endif
+#endif // PLAYBACK_DURATION_ACCUMULATOR_H_
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
new file mode 100644
index 0000000..bcec783
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -0,0 +1,230 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#define VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#include <assert.h>
+#include <list>
+#include <queue>
+
+namespace android {
+
+static const float FRAME_RATE_UNDETERMINED = -1.0f;
+static const float FRAME_RATE_24HZ_3_2_PULLDOWN = -2.0f;
+
+// A variety of video rendering quality metrics.
+struct VideoRenderQualityMetrics {
+ VideoRenderQualityMetrics();
+
+ // The render time of the first video frame.
+ int64_t firstFrameRenderTimeUs;
+
+ // The number of frames released to be rendered.
+ int64_t frameReleasedCount;
+
+ // The number of frames actually rendered.
+ int64_t frameRenderedCount;
+
+ // The number of frames dropped - frames that were released but never rendered.
+ int64_t frameDroppedCount;
+
+ // The number of frames that were intentionally dropped/skipped by the app.
+ int64_t frameSkippedCount;
+
+ // The frame rate as detected by looking at the position timestamp from the content stream.
+ float contentFrameRate;
+
+ // The frame rate as detected by looking at the desired render time passed in by the app.
+ float desiredFrameRate;
+
+ // The frame rate as detected by looking at the actual render time, as returned by the system
+ // post-render.
+ float actualFrameRate;
+};
+
+///////////////////////////////////////////////////////
+// This class analyzes various timestamps related to video rendering to compute a set of metrics
+// that attempt to capture the quality of the user experience during video playback.
+//
+// The following timestamps (in microseconds) are analyzed to compute these metrics:
+// * The content timestamp found in the content stream, indicating the position of each video
+// frame.
+// * The desired timestamp passed in by the app, indicating at what point in time in the future
+// the app would like the frame to be rendered.
+// * The actual timestamp passed in by the display subsystem, indicating the point in time at
+// which the frame was actually rendered.
+//
+// Core to the algorithms are deriving frame durations based on these timestamps and determining
+// the result of each video frame in the content stream:
+// * skipped: the app didn't want to render the frame
+// * dropped: the display subsystem could not render the frame in time
+// * rendered: the display subsystem rendered the frame
+//
+class VideoRenderQualityTracker {
+public:
+ // Configurable elements of the metrics algorithms
+ class Configuration {
+ public:
+ Configuration();
+
+ // Whether or not frames that are intentionally not rendered by the app should be considered
+ // as dropped.
+ bool areSkippedFramesDropped;
+
+ // How large of a jump forward in content time is allowed before it is considered a
+ // discontinuity (seek/playlist) and various internal states are reset.
+ int32_t maxExpectedContentFrameDurationUs;
+
+ // How much tolerance in frame duration when considering whether or not two frames have the
+ // same frame rate.
+ int32_t frameRateDetectionToleranceUs;
+
+ // A skip forward in content time could occur during frame drops of live content. Therefore
+ // the content frame duration and the app-desired frame duration are compared using this
+ // tolerance to determine whether the app is intentionally seeking forward or whether the
+ // skip forward in content time is due to frame drops. If the app-desired frame duration is
+ // short, but the content frame duration is large, it is assumed the app is intentionally
+ // seeking forward.
+ int32_t contentTimeAdvancedForLiveContentToleranceUs;
+ };
+
+ VideoRenderQualityTracker();
+ VideoRenderQualityTracker(const Configuration &configuration);
+
+ // Called when the app has intentionally decided not to render this frame.
+ void onFrameSkipped(int64_t contentTimeUs);
+
+ // Called when the app has requested the frame to be rendered as soon as possible.
+ void onFrameReleased(int64_t contentTimeUs);
+
+ // Called when the app has requested the frame to be rendered at a specific point in time in the
+ // future.
+ void onFrameReleased(int64_t contentTimeUs, int64_t desiredRenderTimeNs);
+
+ // Called when the system has detected that the frame has actually been rendered to the display.
+ void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs);
+
+ // Retrieve the metrics.
+ const VideoRenderQualityMetrics &getMetrics() const;
+
+ // Called when a change in codec state will result in a content discontinuity - e.g. flush.
+ void resetForDiscontinuity();
+
+private:
+ // Tracking of frames that are pending to be rendered to the display.
+ struct FrameInfo {
+ int64_t contentTimeUs;
+ int64_t desiredRenderTimeUs;
+ };
+
+ // Historic tracking of frame durations
+ struct FrameDurationUs {
+ static const int SIZE = 5;
+
+ FrameDurationUs() {
+ for (int i = 0; i < SIZE; ++i) {
+ durationUs[i] = -1;
+ }
+ priorTimestampUs = -1;
+ }
+
+ int32_t &operator[](int index) {
+ assert(index < SIZE);
+ return durationUs[index];
+ }
+
+ const int32_t &operator[](int index) const {
+ assert(index < SIZE);
+ return durationUs[index];
+ }
+
+ // The duration of the past N frames.
+ int32_t durationUs[SIZE];
+
+ // The timestamp of the previous frame.
+ int64_t priorTimestampUs;
+ };
+
+ // The current time in microseconds.
+ static int64_t nowUs();
+
+ // A new frame has been processed, so update the frame durations based on the new frame
+ // timestamp.
+ static void updateFrameDurations(FrameDurationUs &durationUs, int64_t newTimestampUs);
+
+ // Update a frame rate if, and only if, one can be detected.
+ static void updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+ const Configuration &c);
+
+ // Examine the past few frames to detect the frame rate based on each frame's render duration.
+ static float detectFrameRate(const FrameDurationUs &durationUs, const Configuration &c);
+
+ // Determine whether or not 3:2 pulldowng for displaying 24fps content on 60Hz displays is
+ // occurring.
+ static bool is32pulldown(const FrameDurationUs &durationUs, const Configuration &c);
+
+ // Check to see if a discontinuity has occurred by examining the content time and the
+ // app-desired render time. If so, reset some internal state.
+ bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+ // Update the metrics because a skipped frame was detected.
+ void processMetricsForSkippedFrame(int64_t contentTimeUs);
+
+ // Update the metrics because a dropped frame was detected.
+ void processMetricsForDroppedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+ // Update the metrics because a rendered frame was detected.
+ void processMetricsForRenderedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs,
+ int64_t actualRenderTimeUs);
+
+ // Configurable elements of the metrics algorithms.
+ const Configuration mConfiguration;
+
+ // Metrics are updated every time a frame event occurs - skipped, dropped, rendered.
+ VideoRenderQualityMetrics mMetrics;
+
+ // The most recently processed timestamp referring to the position in the content stream.
+ int64_t mLastContentTimeUs;
+
+ // The most recently processed timestamp referring to the wall clock time a frame was rendered.
+ int64_t mLastRenderTimeUs;
+
+ // Frames skipped at the end of playback shouldn't really be considered skipped, therefore keep
+ // a list of the frames, and process them as skipped frames the next time a frame is rendered.
+ std::list<int64_t> mPendingSkippedFrameContentTimeUsList;
+
+ // Since the system only signals when a frame is rendered, dropped frames are detected by
+ // checking to see if the next expected frame is rendered. If not, it is considered dropped.
+ std::queue<FrameInfo> mNextExpectedRenderedFrameQueue;
+
+ // Frame durations derived from timestamps encoded into the content stream. These are the
+ // durations that each frame is supposed to be rendered for.
+ FrameDurationUs mContentFrameDurationUs;
+
+ // Frame durations derived from timestamps passed in by the app, indicating the wall clock time
+ // at which the app would like to have the frame rendered.
+ FrameDurationUs mDesiredFrameDurationUs;
+
+ // Frame durations derived from timestamps captured by the display subsystem, indicating the
+ // wall clock atime at which the frame is actually rendered.
+ FrameDurationUs mActualFrameDurationUs;
+};
+
+} // namespace android
+
+#endif // VIDEO_RENDER_QUALITY_TRACKER_H_
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index e6b67ce..7f89605 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -55,3 +55,20 @@
"-Wall",
],
}
+
+cc_test {
+ name: "VideoRenderQualityTracker_test",
+ srcs: ["VideoRenderQualityTracker_test.cpp"],
+
+ // TODO(b/234833109): Figure out why shared_libs linkage causes stack corruption
+ static_libs: [
+ "libstagefright",
+ "liblog",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+}
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
new file mode 100644
index 0000000..56ec788
--- /dev/null
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoRenderQualityTracker_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+namespace android {
+
+using Metrics = VideoRenderQualityMetrics;
+using Configuration = VideoRenderQualityTracker::Configuration;
+
+class Helper {
+public:
+ Helper(double contentFrameDurationMs, const Configuration &configuration) :
+ mVideoRenderQualityTracker(configuration) {
+ mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+ mMediaTimeUs = 0;
+ mClockTimeNs = 0;
+ }
+
+ void changeContentFrameDuration(double contentFrameDurationMs) {
+ mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+ }
+
+ template<typename T>
+ void render(std::initializer_list<T> renderDurationMsList) {
+ for (auto renderDurationMs : renderDurationMsList) {
+ mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+ mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += int64_t(renderDurationMs * 1000 * 1000);
+ }
+ }
+
+ void skip(int numFrames) {
+ for (int i = 0; i < numFrames; ++i) {
+ mVideoRenderQualityTracker.onFrameSkipped(mMediaTimeUs);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += mContentFrameDurationUs * 1000;
+ }
+ }
+
+ void drop(int numFrames) {
+ for (int i = 0; i < numFrames; ++i) {
+ mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+ mMediaTimeUs += mContentFrameDurationUs;
+ mClockTimeNs += mContentFrameDurationUs * 1000;
+ }
+ }
+
+ const Metrics & getMetrics() const {
+ return mVideoRenderQualityTracker.getMetrics();
+ }
+
+private:
+ VideoRenderQualityTracker mVideoRenderQualityTracker;
+ int64_t mContentFrameDurationUs;
+ int64_t mMediaTimeUs;
+ int64_t mClockTimeNs;
+};
+
+class VideoRenderQualityTrackerTest : public ::testing::Test {
+public:
+ VideoRenderQualityTrackerTest() {}
+};
+
+TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
+ Configuration c;
+ Helper h(16.66, c);
+ h.drop(10);
+ h.render({16.66, 16.66, 16.66});
+ h.skip(10); // skipped frames aren't released so they are not counted
+ h.render({16.66, 16.66, 16.66, 16.66});
+ h.drop(10);
+ EXPECT_EQ(27, h.getMetrics().frameReleasedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsSkippedFrames) {
+ Configuration c;
+ Helper h(16.66, c);
+ h.drop(10); // dropped frames are not counted
+ h.skip(10); // frames skipped before rendering a frame are not counted
+ h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10); // dropped frames are not counted
+ h.skip(10);
+ h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.skip(10); // frames skipped at the end of playback are not counted
+ h.drop(10); // dropped frames are not counted
+ EXPECT_EQ(10, h.getMetrics().frameSkippedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSkippedFramesAreDropped_countsDroppedFrames) {
+ Configuration c;
+ c.areSkippedFramesDropped = true;
+ Helper h(16.66, c);
+ h.skip(10); // skipped frames at the beginning of playback are not counted
+ h.drop(10);
+ h.skip(10); // skipped frames at the beginning of playback after dropped frames are not counted
+ h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10);
+ h.skip(10);
+ h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10); // dropped frames at the end of playback are not counted
+ h.skip(10); // skipped frames at the end of playback are not counted
+ EXPECT_EQ(30, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenNotSkippedFramesAreDropped_countsDroppedFrames) {
+ Configuration c;
+ c.areSkippedFramesDropped = false;
+ Helper h(16.66, c);
+ h.skip(10); // skipped frames at the beginning of playback are not counted
+ h.drop(10);
+ h.skip(10); // skipped frames at the beginning of playback after dropped frames are not coutned
+ h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10);
+ h.skip(10); // skipped frames are not counted
+ h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+ h.drop(10); // dropped frames at the end of playback are not counted
+ h.skip(10); // skipped frames at the end of playback are not counted
+ EXPECT_EQ(20, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsRenderedFrames) {
+ Configuration c;
+ Helper h(16.66, c);
+ h.drop(10); // dropped frames are not counted
+ h.render({16.66, 16.66, 16.66});
+ h.skip(10); // skipped frames are not counted
+ h.render({16.66, 16.66, 16.66, 16.66});
+ h.drop(10); // dropped frames are not counted
+ EXPECT_EQ(7, h.getMetrics().frameRenderedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detectsFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenLowTolerance_doesntDetectFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 0;
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateDestabilizes_detectsFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ h.render({30.0, 16.6, 30.0, 16.6});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detects32Pulldown) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(41.66, c);
+ h.render({49.9, 33.2, 50.0, 33.4, 50.1, 33.2});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24HZ_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad32Pulldown_doesntDetect32Pulldown) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(41.66, c);
+ h.render({50.0, 33.33, 33.33, 50.00, 33.33, 50.00});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateChanges_detectsMostRecentFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.6, 16.7, 16.6, 16.7});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+ h.changeContentFrameDuration(41.66);
+ h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24HZ_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameRate) {
+ Configuration c;
+ c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+ Helper h(16.66, c);
+ h.render({16.66, 30.0, 16.66, 30.0, 16.66});
+ EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+ EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+} // android
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
index 9cdc6d4..23882ea 100644
--- a/media/libstagefright/tests/mediacodec/Android.bp
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -70,4 +70,4 @@
test_suites: [
"general-tests",
],
-}
+}
\ No newline at end of file