Record MediaCodec video playback duration
The algorithm accumulates the duration of time each frame is active
for the attached surface using the onFrameRendered timestamps. It
ignores counting duration where the attached surface is not being
sent to the screen. It also ignores counting frames that are
active for a long period of time (e.g. pauses, rebuffers, etc.).
Test: Tested manually across a variety of apps by comparing
the dumpsys metrics with the duration displayed in the app UI.
The results were accurate to the second, even with multiple
fast-forwards, rewinds, rebufferings and pauses for both tunnel
mode and non-tunnel mode.
Bug: 184112407
Change-Id: I5d9ae2359ca5ead8d444cffc2ff36946228748cd
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 57bdba0..89e161c 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -29,6 +29,7 @@
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
+#include "PlaybackDurationAccumulator.h"
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -152,6 +153,8 @@
static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
+static const char *kCodecPlaybackDuration =
+ "android.media.mediacodec.playback-duration"; /* in sec */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped"; /* 0/1 */
@@ -740,6 +743,8 @@
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
+ mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
+ mIsSurfaceToScreen(false),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
@@ -846,6 +851,10 @@
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
+ int64_t playbackDuration = mPlaybackDurationAccumulator->getDurationInSeconds();
+ if (playbackDuration > 0) {
+ mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDuration, playbackDuration);
+ }
if (mLifetimeStartNs > 0) {
nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
@@ -985,6 +994,28 @@
ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
}
+void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
+ int what = 0;
+ msg->findInt32("what", &what);
+ if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
+ static bool logged = false;
+ if (!logged) {
+ logged = true;
+ ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
+ }
+ return;
+ }
+ // Playback duration only counts if the buffers are going to the screen.
+ if (!mIsSurfaceToScreen) {
+ return;
+ }
+ int64_t renderTimeNs;
+ size_t index = 0;
+ while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
+ mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
+ }
+}
+
bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
{
if (nbuckets <= 0 || width <= 0) {
@@ -3199,6 +3230,7 @@
ALOGV("TunnelPeekState: %s -> %s",
asString(previousState),
asString(TunnelPeekState::kBufferRendered));
+ updatePlaybackDuration(msg);
// check that we have a notification set
if (mOnFrameRenderedNotification != NULL) {
sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4905,6 +4937,10 @@
return ALREADY_EXISTS;
}
+ // in case we don't connect, ensure that we don't signal the surface is
+ // connected to the screen
+ mIsSurfaceToScreen = false;
+
err = nativeWindowConnect(surface.get(), "connectToSurface");
if (err == OK) {
// Require a fresh set of buffers after each connect by using a unique generation
@@ -4930,6 +4966,10 @@
if (!mAllowFrameDroppingBySurface) {
disableLegacyBufferDropPostQ(surface);
}
+ // keep track whether or not the buffers of the connected surface go to the screen
+ int result = 0;
+ surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
+ mIsSurfaceToScreen = result != 0;
}
}
// do not return ALREADY_EXISTS unless surfaces are the same
@@ -4947,6 +4987,7 @@
}
// assume disconnected even on error
mSurface.clear();
+ mIsSurfaceToScreen = false;
}
return err;
}
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/PlaybackDurationAccumulator.h
new file mode 100644
index 0000000..cb5f0c4
--- /dev/null
+++ b/media/libstagefright/PlaybackDurationAccumulator.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PLAYBACK_DURATION_ACCUMULATOR_H_
+
+namespace android {
+
+// Accumulates playback duration by processing render times of individual frames and by ignoring
+// frames rendered during inactive playbacks such as seeking, pausing, or re-buffering.
+class PlaybackDurationAccumulator {
+private:
+ // Controls the maximum delta between render times before considering the playback is not
+ // active and has stalled.
+ static const int64_t MAX_PRESENTATION_DURATION_NS = 500 * 1000 * 1000;
+
+public:
+ PlaybackDurationAccumulator() {
+ mPlaybackDurationNs = 0;
+ mPreviousRenderTimeNs = 0;
+ }
+
+ // Process a render time expressed in nanoseconds.
+ void processRenderTime(int64_t newRenderTimeNs) {
+ // If we detect wrap-around or out of order frames, just ignore the duration for this
+ // and the next frame.
+ if (newRenderTimeNs < mPreviousRenderTimeNs) {
+ mPreviousRenderTimeNs = 0;
+ }
+ if (mPreviousRenderTimeNs > 0) {
+ int64_t presentationDurationNs = newRenderTimeNs - mPreviousRenderTimeNs;
+ if (presentationDurationNs < MAX_PRESENTATION_DURATION_NS) {
+ mPlaybackDurationNs += presentationDurationNs;
+ }
+ }
+ mPreviousRenderTimeNs = newRenderTimeNs;
+ }
+
+ int64_t getDurationInSeconds() {
+ return mPlaybackDurationNs / 1000 / 1000 / 1000; // Nanoseconds to seconds.
+ }
+
+private:
+ // The playback duration accumulated so far.
+ int64_t mPlaybackDurationNs;
+ // The previous render time used to compute the next presentation duration.
+ int64_t mPreviousRenderTimeNs;
+};
+
+}
+
+#endif
+
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 3517bae..0d67d7d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -58,6 +58,7 @@
struct PersistentSurface;
class SoftwareRenderer;
class Surface;
+class PlaybackDurationAccumulator;
namespace hardware {
namespace cas {
namespace native {
@@ -418,6 +419,7 @@
void updateLowLatency(const sp<AMessage> &msg);
constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
void updateTunnelPeek(const sp<AMessage> &msg);
+ void updatePlaybackDuration(const sp<AMessage> &msg);
sp<AMessage> mOutputFormat;
sp<AMessage> mInputFormat;
@@ -485,6 +487,9 @@
std::shared_ptr<BufferChannelBase> mBufferChannel;
+ PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
+ bool mIsSurfaceToScreen;
+
MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,