Merge "Add media item info to editing metrics" into main
diff --git a/core/api/current.txt b/core/api/current.txt
index 0e42f80..36d7145 100644
--- a/core/api/current.txt
+++ b/core/api/current.txt
@@ -25869,6 +25869,9 @@
method public int describeContents();
method public int getErrorCode();
method public int getFinalState();
+ method @NonNull public java.util.List<android.media.metrics.MediaItemInfo> getInputMediaItemInfos();
+ method public long getOperationTypes();
+ method @Nullable public android.media.metrics.MediaItemInfo getOutputMediaItemInfo();
method public void writeToParcel(@NonNull android.os.Parcel, int);
field @NonNull public static final android.os.Parcelable.Creator<android.media.metrics.EditingEndedEvent> CREATOR;
field public static final int ERROR_CODE_AUDIO_PROCESSING_FAILED = 18; // 0x12
@@ -25893,14 +25896,25 @@
field public static final int FINAL_STATE_CANCELED = 2; // 0x2
field public static final int FINAL_STATE_ERROR = 3; // 0x3
field public static final int FINAL_STATE_SUCCEEDED = 1; // 0x1
+ field public static final long OPERATION_TYPE_AUDIO_EDIT = 8L; // 0x8L
+ field public static final long OPERATION_TYPE_AUDIO_TRANSCODE = 2L; // 0x2L
+ field public static final long OPERATION_TYPE_AUDIO_TRANSMUX = 32L; // 0x20L
+ field public static final long OPERATION_TYPE_PAUSED = 64L; // 0x40L
+ field public static final long OPERATION_TYPE_RESUMED = 128L; // 0x80L
+ field public static final long OPERATION_TYPE_VIDEO_EDIT = 4L; // 0x4L
+ field public static final long OPERATION_TYPE_VIDEO_TRANSCODE = 1L; // 0x1L
+ field public static final long OPERATION_TYPE_VIDEO_TRANSMUX = 16L; // 0x10L
field public static final int TIME_SINCE_CREATED_UNKNOWN = -1; // 0xffffffff
}
@FlaggedApi("com.android.media.editing.flags.add_media_metrics_editing") public static final class EditingEndedEvent.Builder {
ctor public EditingEndedEvent.Builder(int);
+ method @NonNull public android.media.metrics.EditingEndedEvent.Builder addInputMediaItemInfo(@NonNull android.media.metrics.MediaItemInfo);
+ method @NonNull public android.media.metrics.EditingEndedEvent.Builder addOperationType(long);
method @NonNull public android.media.metrics.EditingEndedEvent build();
method @NonNull public android.media.metrics.EditingEndedEvent.Builder setErrorCode(int);
method @NonNull public android.media.metrics.EditingEndedEvent.Builder setMetricsBundle(@NonNull android.os.Bundle);
+ method @NonNull public android.media.metrics.EditingEndedEvent.Builder setOutputMediaItemInfo(@NonNull android.media.metrics.MediaItemInfo);
method @NonNull public android.media.metrics.EditingEndedEvent.Builder setTimeSinceCreatedMillis(@IntRange(from=android.media.metrics.EditingEndedEvent.TIME_SINCE_CREATED_UNKNOWN) long);
}
@@ -25920,6 +25934,65 @@
field @NonNull public static final android.media.metrics.LogSessionId LOG_SESSION_ID_NONE;
}
+ @FlaggedApi("com.android.media.editing.flags.add_media_metrics_editing") public final class MediaItemInfo implements android.os.Parcelable {
+ method public int describeContents();
+ method public int getAudioChannelCount();
+ method public long getAudioSampleCount();
+ method public int getAudioSampleRateHz();
+ method public long getClipDurationMillis();
+ method @NonNull public java.util.List<java.lang.String> getCodecNames();
+ method @Nullable public String getContainerMimeType();
+ method public long getDataTypes();
+ method public long getDurationMillis();
+ method @NonNull public java.util.List<java.lang.String> getSampleMimeTypes();
+ method public int getSourceType();
+ method public int getVideoDataSpace();
+ method public float getVideoFrameRate();
+ method public long getVideoSampleCount();
+ method @NonNull public android.util.Size getVideoSize();
+ method public void writeToParcel(@NonNull android.os.Parcel, int);
+ field @NonNull public static final android.os.Parcelable.Creator<android.media.metrics.MediaItemInfo> CREATOR;
+ field public static final long DATA_TYPE_AUDIO = 4L; // 0x4L
+ field public static final long DATA_TYPE_CUE_POINTS = 128L; // 0x80L
+ field public static final long DATA_TYPE_DEPTH = 16L; // 0x10L
+ field public static final long DATA_TYPE_GAIN_MAP = 32L; // 0x20L
+ field public static final long DATA_TYPE_GAPLESS = 256L; // 0x100L
+ field public static final long DATA_TYPE_HIGH_DYNAMIC_RANGE_VIDEO = 1024L; // 0x400L
+ field public static final long DATA_TYPE_HIGH_FRAME_RATE = 64L; // 0x40L
+ field public static final long DATA_TYPE_IMAGE = 1L; // 0x1L
+ field public static final long DATA_TYPE_METADATA = 8L; // 0x8L
+ field public static final long DATA_TYPE_SPATIAL_AUDIO = 512L; // 0x200L
+ field public static final long DATA_TYPE_VIDEO = 2L; // 0x2L
+ field public static final int SOURCE_TYPE_CAMERA = 2; // 0x2
+ field public static final int SOURCE_TYPE_EDITING_SESSION = 3; // 0x3
+ field public static final int SOURCE_TYPE_GALLERY = 1; // 0x1
+ field public static final int SOURCE_TYPE_GENERATED = 7; // 0x7
+ field public static final int SOURCE_TYPE_LOCAL_FILE = 4; // 0x4
+ field public static final int SOURCE_TYPE_REMOTE_FILE = 5; // 0x5
+ field public static final int SOURCE_TYPE_REMOTE_LIVE_STREAM = 6; // 0x6
+ field public static final int SOURCE_TYPE_UNSPECIFIED = 0; // 0x0
+ field public static final int VALUE_UNSPECIFIED = -1; // 0xffffffff
+ }
+
+ @FlaggedApi("com.android.media.editing.flags.add_media_metrics_editing") public static final class MediaItemInfo.Builder {
+ ctor public MediaItemInfo.Builder();
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder addCodecName(@NonNull String);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder addDataType(long);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder addSampleMimeType(@NonNull String);
+ method @NonNull public android.media.metrics.MediaItemInfo build();
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setAudioChannelCount(@IntRange(from=0) int);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setAudioSampleCount(@IntRange(from=0) long);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setAudioSampleRateHz(@IntRange(from=0) int);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setClipDurationMillis(long);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setContainerMimeType(@NonNull String);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setDurationMillis(long);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setSourceType(int);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setVideoDataSpace(int);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setVideoFrameRate(@FloatRange(from=0) float);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setVideoSampleCount(@IntRange(from=0) long);
+ method @NonNull public android.media.metrics.MediaItemInfo.Builder setVideoSize(@NonNull android.util.Size);
+ }
+
public final class MediaMetricsManager {
method @NonNull public android.media.metrics.BundleSession createBundleSession();
method @NonNull public android.media.metrics.EditingSession createEditingSession();
diff --git a/media/java/android/media/metrics/EditingEndedEvent.java b/media/java/android/media/metrics/EditingEndedEvent.java
index 5ed8d40..f1c5c9d 100644
--- a/media/java/android/media/metrics/EditingEndedEvent.java
+++ b/media/java/android/media/metrics/EditingEndedEvent.java
@@ -20,6 +20,7 @@
import android.annotation.FlaggedApi;
import android.annotation.IntDef;
import android.annotation.IntRange;
+import android.annotation.LongDef;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.os.Bundle;
@@ -27,6 +28,8 @@
import android.os.Parcelable;
import java.lang.annotation.Retention;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Objects;
/** Event for an editing operation having ended. */
@@ -156,14 +159,66 @@
@SuppressWarnings("HidingField") // Hiding field from superclass as for playback events.
private final long mTimeSinceCreatedMillis;
+ private final ArrayList<MediaItemInfo> mInputMediaItemInfos;
+ @Nullable private final MediaItemInfo mOutputMediaItemInfo;
+
+ /** @hide */
+ @LongDef(
+ prefix = {"OPERATION_TYPE_"},
+ flag = true,
+ value = {
+ OPERATION_TYPE_VIDEO_TRANSCODE,
+ OPERATION_TYPE_AUDIO_TRANSCODE,
+ OPERATION_TYPE_VIDEO_EDIT,
+ OPERATION_TYPE_AUDIO_EDIT,
+ OPERATION_TYPE_VIDEO_TRANSMUX,
+ OPERATION_TYPE_AUDIO_TRANSMUX,
+ OPERATION_TYPE_PAUSED,
+ OPERATION_TYPE_RESUMED,
+ })
+ @Retention(java.lang.annotation.RetentionPolicy.SOURCE)
+ public @interface OperationType {}
+
+ /** Input video was decoded and re-encoded. */
+ public static final long OPERATION_TYPE_VIDEO_TRANSCODE = 1;
+
+ /** Input audio was decoded and re-encoded. */
+ public static final long OPERATION_TYPE_AUDIO_TRANSCODE = 1L << 1;
+
+ /** Input video was edited. */
+ public static final long OPERATION_TYPE_VIDEO_EDIT = 1L << 2;
+
+ /** Input audio was edited. */
+ public static final long OPERATION_TYPE_AUDIO_EDIT = 1L << 3;
+
+ /** Input video samples were writted (muxed) directly to the output file without transcoding. */
+ public static final long OPERATION_TYPE_VIDEO_TRANSMUX = 1L << 4;
+
+ /** Input audio samples were written (muxed) directly to the output file without transcoding. */
+ public static final long OPERATION_TYPE_AUDIO_TRANSMUX = 1L << 5;
+
+ /** The editing operation was paused before it completed. */
+ public static final long OPERATION_TYPE_PAUSED = 1L << 6;
+
+ /** The editing operation resumed a previous (paused) operation. */
+ public static final long OPERATION_TYPE_RESUMED = 1L << 7;
+
+ private final @OperationType long mOperationTypes;
+
private EditingEndedEvent(
@FinalState int finalState,
@ErrorCode int errorCode,
long timeSinceCreatedMillis,
+ ArrayList<MediaItemInfo> inputMediaItemInfos,
+ @Nullable MediaItemInfo outputMediaItemInfo,
+ @OperationType long operationTypes,
@NonNull Bundle extras) {
mFinalState = finalState;
mErrorCode = errorCode;
mTimeSinceCreatedMillis = timeSinceCreatedMillis;
+ mInputMediaItemInfos = inputMediaItemInfos;
+ mOutputMediaItemInfo = outputMediaItemInfo;
+ mOperationTypes = operationTypes;
mMetricsBundle = extras.deepCopy();
}
@@ -194,6 +249,23 @@
return mTimeSinceCreatedMillis;
}
+ /** Gets information about the input media items, or an empty list if unspecified. */
+ @NonNull
+ public List<MediaItemInfo> getInputMediaItemInfos() {
+ return new ArrayList<>(mInputMediaItemInfos);
+ }
+
+ /** Gets information about the output media item, or {@code null} if unspecified. */
+ @Nullable
+ public MediaItemInfo getOutputMediaItemInfo() {
+ return mOutputMediaItemInfo;
+ }
+
+ /** Gets a set of flags describing the types of operations performed. */
+ public @OperationType long getOperationTypes() {
+ return mOperationTypes;
+ }
+
/**
* Gets metrics-related information that is not supported by dedicated methods.
*
@@ -208,7 +280,7 @@
@Override
@NonNull
public String toString() {
- return "PlaybackErrorEvent { "
+ return "EditingEndedEvent { "
+ "finalState = "
+ mFinalState
+ ", "
@@ -217,6 +289,15 @@
+ ", "
+ "timeSinceCreatedMillis = "
+ mTimeSinceCreatedMillis
+ + ", "
+ + "inputMediaItemInfos = "
+ + mInputMediaItemInfos
+ + ", "
+ + "outputMediaItemInfo = "
+ + mOutputMediaItemInfo
+ + ", "
+ + "operationTypes = "
+ + mOperationTypes
+ " }";
}
@@ -227,12 +308,21 @@
EditingEndedEvent that = (EditingEndedEvent) o;
return mFinalState == that.mFinalState
&& mErrorCode == that.mErrorCode
+ && Objects.equals(mInputMediaItemInfos, that.mInputMediaItemInfos)
+ && Objects.equals(mOutputMediaItemInfo, that.mOutputMediaItemInfo)
+ && mOperationTypes == that.mOperationTypes
&& mTimeSinceCreatedMillis == that.mTimeSinceCreatedMillis;
}
@Override
public int hashCode() {
- return Objects.hash(mFinalState, mErrorCode, mTimeSinceCreatedMillis);
+ return Objects.hash(
+ mFinalState,
+ mErrorCode,
+ mInputMediaItemInfos,
+ mOutputMediaItemInfo,
+ mOperationTypes,
+ mTimeSinceCreatedMillis);
}
@Override
@@ -240,6 +330,9 @@
dest.writeInt(mFinalState);
dest.writeInt(mErrorCode);
dest.writeLong(mTimeSinceCreatedMillis);
+ dest.writeTypedList(mInputMediaItemInfos);
+ dest.writeTypedObject(mOutputMediaItemInfo, /* parcelableFlags= */ 0);
+ dest.writeLong(mOperationTypes);
dest.writeBundle(mMetricsBundle);
}
@@ -249,15 +342,14 @@
}
private EditingEndedEvent(@NonNull Parcel in) {
- int finalState = in.readInt();
- int errorCode = in.readInt();
- long timeSinceCreatedMillis = in.readLong();
- Bundle metricsBundle = in.readBundle();
-
- mFinalState = finalState;
- mErrorCode = errorCode;
- mTimeSinceCreatedMillis = timeSinceCreatedMillis;
- mMetricsBundle = metricsBundle;
+ mFinalState = in.readInt();
+ mErrorCode = in.readInt();
+ mTimeSinceCreatedMillis = in.readLong();
+ mInputMediaItemInfos = new ArrayList<>();
+ in.readTypedList(mInputMediaItemInfos, MediaItemInfo.CREATOR);
+ mOutputMediaItemInfo = in.readTypedObject(MediaItemInfo.CREATOR);
+ mOperationTypes = in.readLong();
+ mMetricsBundle = in.readBundle();
}
public static final @NonNull Creator<EditingEndedEvent> CREATOR =
@@ -277,8 +369,11 @@
@FlaggedApi(FLAG_ADD_MEDIA_METRICS_EDITING)
public static final class Builder {
private final @FinalState int mFinalState;
+ private final ArrayList<MediaItemInfo> mInputMediaItemInfos;
private @ErrorCode int mErrorCode;
private long mTimeSinceCreatedMillis;
+ @Nullable private MediaItemInfo mOutputMediaItemInfo;
+ private @OperationType long mOperationTypes;
private Bundle mMetricsBundle;
/**
@@ -290,6 +385,7 @@
mFinalState = finalState;
mErrorCode = ERROR_CODE_NONE;
mTimeSinceCreatedMillis = TIME_SINCE_CREATED_UNKNOWN;
+ mInputMediaItemInfos = new ArrayList<>();
mMetricsBundle = new Bundle();
}
@@ -312,20 +408,49 @@
return this;
}
+ /** Adds information about a media item that was input to the editing operation. */
+ public @NonNull Builder addInputMediaItemInfo(@NonNull MediaItemInfo mediaItemInfo) {
+ mInputMediaItemInfos.add(Objects.requireNonNull(mediaItemInfo));
+ return this;
+ }
+
+ /** Sets information about the output media item. */
+ public @NonNull Builder setOutputMediaItemInfo(@NonNull MediaItemInfo mediaItemInfo) {
+ mOutputMediaItemInfo = Objects.requireNonNull(mediaItemInfo);
+ return this;
+ }
+
+ /**
+ * Adds an operation type to the set of operations performed.
+ *
+ * @param operationType A type of operation performed as part of this editing operation.
+ */
+ public @NonNull Builder addOperationType(@OperationType long operationType) {
+ mOperationTypes |= operationType;
+ return this;
+ }
+
/**
* Sets metrics-related information that is not supported by dedicated methods.
*
* <p>Used for backwards compatibility by the metrics infrastructure.
*/
public @NonNull Builder setMetricsBundle(@NonNull Bundle metricsBundle) {
- mMetricsBundle = metricsBundle;
+ mMetricsBundle = Objects.requireNonNull(metricsBundle);
return this;
}
/** Builds an instance. */
public @NonNull EditingEndedEvent build() {
return new EditingEndedEvent(
- mFinalState, mErrorCode, mTimeSinceCreatedMillis, mMetricsBundle);
+ mFinalState,
+ mErrorCode,
+ mTimeSinceCreatedMillis,
+ mInputMediaItemInfos,
+ mOutputMediaItemInfo,
+ mOperationTypes,
+ mMetricsBundle);
}
}
+
}
diff --git a/media/java/android/media/metrics/MediaItemInfo.java b/media/java/android/media/metrics/MediaItemInfo.java
new file mode 100644
index 0000000..63dd3cc
--- /dev/null
+++ b/media/java/android/media/metrics/MediaItemInfo.java
@@ -0,0 +1,565 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.metrics;
+
+import static com.android.media.editing.flags.Flags.FLAG_ADD_MEDIA_METRICS_EDITING;
+
+import android.annotation.FlaggedApi;
+import android.annotation.FloatRange;
+import android.annotation.IntDef;
+import android.annotation.IntRange;
+import android.annotation.LongDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SuppressLint;
+import android.hardware.DataSpace;
+import android.media.MediaCodec;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Size;
+
+import java.lang.annotation.Retention;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/** Represents information about a piece of media (for example, an audio or video file). */
+@FlaggedApi(FLAG_ADD_MEDIA_METRICS_EDITING)
+public final class MediaItemInfo implements Parcelable {
+
+ /** @hide */
+ @IntDef(
+ prefix = {"SOURCE_TYPE_"},
+ value = {
+ SOURCE_TYPE_UNSPECIFIED,
+ SOURCE_TYPE_GALLERY,
+ SOURCE_TYPE_CAMERA,
+ SOURCE_TYPE_EDITING_SESSION,
+ SOURCE_TYPE_LOCAL_FILE,
+ SOURCE_TYPE_REMOTE_FILE,
+ SOURCE_TYPE_REMOTE_LIVE_STREAM,
+ SOURCE_TYPE_GENERATED,
+ })
+ @Retention(java.lang.annotation.RetentionPolicy.SOURCE)
+ public @interface SourceType {}
+
+ /** The media item's source is not known. */
+ public static final int SOURCE_TYPE_UNSPECIFIED = 0;
+
+ /** The media item came from the device gallery. */
+ public static final int SOURCE_TYPE_GALLERY = 1;
+
+ /** The media item came directly from camera capture. */
+ public static final int SOURCE_TYPE_CAMERA = 2;
+
+ /** The media item was output by a previous editing session. */
+ public static final int SOURCE_TYPE_EDITING_SESSION = 3;
+
+ /** The media item is stored on the local device's file system. */
+ public static final int SOURCE_TYPE_LOCAL_FILE = 4;
+
+ /** The media item is a remote file (for example, it's loaded from an HTTP server). */
+ public static final int SOURCE_TYPE_REMOTE_FILE = 5;
+
+ /** The media item is a remotely-served live stream. */
+ public static final int SOURCE_TYPE_REMOTE_LIVE_STREAM = 6;
+
+ /** The media item was generated by another system. */
+ public static final int SOURCE_TYPE_GENERATED = 7;
+
+ /** @hide */
+ @LongDef(
+ prefix = {"DATA_TYPE_"},
+ flag = true,
+ value = {
+ DATA_TYPE_IMAGE,
+ DATA_TYPE_VIDEO,
+ DATA_TYPE_AUDIO,
+ DATA_TYPE_METADATA,
+ DATA_TYPE_DEPTH,
+ DATA_TYPE_GAIN_MAP,
+ DATA_TYPE_HIGH_FRAME_RATE,
+ DATA_TYPE_CUE_POINTS,
+ DATA_TYPE_GAPLESS,
+ DATA_TYPE_SPATIAL_AUDIO,
+ DATA_TYPE_HIGH_DYNAMIC_RANGE_VIDEO,
+ })
+ @Retention(java.lang.annotation.RetentionPolicy.SOURCE)
+ public @interface DataType {}
+
+ /** The media item includes image data. */
+ public static final long DATA_TYPE_IMAGE = 1L;
+
+ /** The media item includes video data. */
+ public static final long DATA_TYPE_VIDEO = 1L << 1;
+
+ /** The media item includes audio data. */
+ public static final long DATA_TYPE_AUDIO = 1L << 2;
+
+ /** The media item includes metadata. */
+ public static final long DATA_TYPE_METADATA = 1L << 3;
+
+ /** The media item includes depth (z-distance) information. */
+ public static final long DATA_TYPE_DEPTH = 1L << 4;
+
+ /** The media item includes gain map information (for example, an Ultra HDR gain map). */
+ public static final long DATA_TYPE_GAIN_MAP = 1L << 5;
+
+ /** The media item includes high frame rate video data. */
+ public static final long DATA_TYPE_HIGH_FRAME_RATE = 1L << 6;
+
+ /** The media item includes time-dependent speed setting metadata. */
+ public static final long DATA_TYPE_CUE_POINTS = 1L << 7;
+
+ /** The media item includes gapless audio metadata. */
+ public static final long DATA_TYPE_GAPLESS = 1L << 8;
+
+ /** The media item includes spatial audio data. */
+ public static final long DATA_TYPE_SPATIAL_AUDIO = 1L << 9;
+
+ /** The media item includes high dynamic range (HDR) video. */
+ public static final long DATA_TYPE_HIGH_DYNAMIC_RANGE_VIDEO = 1L << 10;
+
+ /** Special value for numerical fields where the value was not specified. */
+ public static final int VALUE_UNSPECIFIED = -1;
+
+ private final @SourceType int mSourceType;
+ private final @DataType long mDataTypes;
+ private final long mDurationMillis;
+ private final long mClipDurationMillis;
+ @Nullable private final String mContainerMimeType;
+ private final List<String> mSampleMimeTypes;
+ private final List<String> mCodecNames;
+ private final int mAudioSampleRateHz;
+ private final int mAudioChannelCount;
+ private final long mAudioSampleCount;
+ private final Size mVideoSize;
+ private final int mVideoDataSpace;
+ private final float mVideoFrameRate;
+ private final long mVideoSampleCount;
+
+ private MediaItemInfo(
+ @SourceType int sourceType,
+ @DataType long dataTypes,
+ long durationMillis,
+ long clipDurationMillis,
+ @Nullable String containerMimeType,
+ List<String> sampleMimeTypes,
+ List<String> codecNames,
+ int audioSampleRateHz,
+ int audioChannelCount,
+ long audioSampleCount,
+ Size videoSize,
+ int videoDataSpace,
+ float videoFrameRate,
+ long videoSampleCount) {
+ mSourceType = sourceType;
+ mDataTypes = dataTypes;
+ mDurationMillis = durationMillis;
+ mClipDurationMillis = clipDurationMillis;
+ mContainerMimeType = containerMimeType;
+ mSampleMimeTypes = sampleMimeTypes;
+ mCodecNames = codecNames;
+ mAudioSampleRateHz = audioSampleRateHz;
+ mAudioChannelCount = audioChannelCount;
+ mAudioSampleCount = audioSampleCount;
+ mVideoSize = videoSize;
+ mVideoDataSpace = videoDataSpace;
+ mVideoFrameRate = videoFrameRate;
+ mVideoSampleCount = videoSampleCount;
+ }
+
+ /**
+ * Returns where the media item came from, or {@link #SOURCE_TYPE_UNSPECIFIED} if not specified.
+ */
+ public @SourceType int getSourceType() {
+ return mSourceType;
+ }
+
+ /** Returns the data types that are present in the media item. */
+ public @DataType long getDataTypes() {
+ return mDataTypes;
+ }
+
+ /**
+ * Returns the duration of the media item, in milliseconds, or {@link #VALUE_UNSPECIFIED} if not
+ * specified.
+ */
+ public long getDurationMillis() {
+ return mDurationMillis;
+ }
+
+ /**
+ * Returns the duration of the clip taken from the media item, in milliseconds, or {@link
+ * #VALUE_UNSPECIFIED} if not specified.
+ */
+ public long getClipDurationMillis() {
+ return mClipDurationMillis;
+ }
+
+ /** Returns the MIME type of the media container, or {@code null} if unspecified. */
+ @Nullable
+ public String getContainerMimeType() {
+ return mContainerMimeType;
+ }
+
+ /**
+ * Returns the MIME types of samples stored in the media container, or an empty list if not
+ * known.
+ */
+ @NonNull
+ public List<String> getSampleMimeTypes() {
+ return new ArrayList<>(mSampleMimeTypes);
+ }
+
+ /**
+ * Returns the {@linkplain MediaCodec#getName() media codec names} for codecs that were used as
+ * part of encoding/decoding this media item, or an empty list if not known or not applicable.
+ */
+ @NonNull
+ public List<String> getCodecNames() {
+ return new ArrayList<>(mCodecNames);
+ }
+
+ /**
+ * Returns the sample rate of audio, in Hertz, or {@link #VALUE_UNSPECIFIED} if not specified.
+ */
+ public int getAudioSampleRateHz() {
+ return mAudioSampleRateHz;
+ }
+
+ /** Returns the number of audio channels, or {@link #VALUE_UNSPECIFIED} if not specified. */
+ public int getAudioChannelCount() {
+ return mAudioChannelCount;
+ }
+
+ /**
+ * Returns the number of audio frames in the item, after clipping (if applicable), or {@link
+ * #VALUE_UNSPECIFIED} if not specified.
+ */
+ public long getAudioSampleCount() {
+ return mAudioSampleCount;
+ }
+
+ /**
+ * Returns the video size, in pixels, or a {@link Size} with width and height set to {@link
+ * #VALUE_UNSPECIFIED} if not specified.
+ */
+ @NonNull
+ public Size getVideoSize() {
+ return mVideoSize;
+ }
+
+ /** Returns the {@linkplain DataSpace data space} for video, as a packed integer. */
+ @SuppressLint("MethodNameUnits") // Packed integer for an android.hardware.DataSpace.
+ public int getVideoDataSpace() {
+ return mVideoDataSpace;
+ }
+
+ /**
+ * Returns the average video frame rate, in frames per second, or {@link #VALUE_UNSPECIFIED} if
+ * not specified.
+ */
+ public float getVideoFrameRate() {
+ return mVideoFrameRate;
+ }
+
+ /**
+ * Returns the number of video frames, aftrer clipping (if applicable), or {@link
+ * #VALUE_UNSPECIFIED} if not specified.
+ */
+ public long getVideoSampleCount() {
+ return mVideoSampleCount;
+ }
+
+ /** Builder for {@link MediaItemInfo}. */
+ @FlaggedApi(FLAG_ADD_MEDIA_METRICS_EDITING)
+ public static final class Builder {
+
+ private @SourceType int mSourceType;
+ private @DataType long mDataTypes;
+ private long mDurationMillis;
+ private long mClipDurationMillis;
+ @Nullable private String mContainerMimeType;
+ private final ArrayList<String> mSampleMimeTypes;
+ private final ArrayList<String> mCodecNames;
+ private int mAudioSampleRateHz;
+ private int mAudioChannelCount;
+ private long mAudioSampleCount;
+ @Nullable private Size mVideoSize;
+ private int mVideoDataSpace;
+ private float mVideoFrameRate;
+ private long mVideoSampleCount;
+
+ /** Creates a new builder. */
+ public Builder() {
+ mSourceType = SOURCE_TYPE_UNSPECIFIED;
+ mDurationMillis = VALUE_UNSPECIFIED;
+ mClipDurationMillis = VALUE_UNSPECIFIED;
+ mSampleMimeTypes = new ArrayList<>();
+ mCodecNames = new ArrayList<>();
+ mAudioSampleRateHz = VALUE_UNSPECIFIED;
+ mAudioChannelCount = VALUE_UNSPECIFIED;
+ mAudioSampleCount = VALUE_UNSPECIFIED;
+ mVideoSize = new Size(VALUE_UNSPECIFIED, VALUE_UNSPECIFIED);
+ mVideoFrameRate = VALUE_UNSPECIFIED;
+ mVideoSampleCount = VALUE_UNSPECIFIED;
+ }
+
+ /** Sets where the media item came from. */
+ public @NonNull Builder setSourceType(@SourceType int sourceType) {
+ mSourceType = sourceType;
+ return this;
+ }
+
+ /** Adds an additional data type represented as part of the media item. */
+ public @NonNull Builder addDataType(@DataType long dataType) {
+ mDataTypes |= dataType;
+ return this;
+ }
+
+ /** Sets the duration of the media item, in milliseconds. */
+ public @NonNull Builder setDurationMillis(long durationMillis) {
+ mDurationMillis = durationMillis;
+ return this;
+ }
+
+ /** Sets the duration of the clip taken from the media item, in milliseconds. */
+ public @NonNull Builder setClipDurationMillis(long clipDurationMillis) {
+ mClipDurationMillis = clipDurationMillis;
+ return this;
+ }
+
+ /** Sets the MIME type of the media container. */
+ public @NonNull Builder setContainerMimeType(@NonNull String containerMimeType) {
+ mContainerMimeType = Objects.requireNonNull(containerMimeType);
+ return this;
+ }
+
+ /** Adds a sample MIME type stored in the media container. */
+ public @NonNull Builder addSampleMimeType(@NonNull String mimeType) {
+ mSampleMimeTypes.add(Objects.requireNonNull(mimeType));
+ return this;
+ }
+
+ /**
+ * Adds an {@linkplain MediaCodec#getName() media codec name} that was used as part of
+ * decoding/encoding this media item.
+ */
+ public @NonNull Builder addCodecName(@NonNull String codecName) {
+ mCodecNames.add(Objects.requireNonNull(codecName));
+ return this;
+ }
+
+ /** Sets the sample rate of audio, in Hertz. */
+ public @NonNull Builder setAudioSampleRateHz(@IntRange(from = 0) int audioSampleRateHz) {
+ mAudioSampleRateHz = audioSampleRateHz;
+ return this;
+ }
+
+ /** Sets the number of audio channels. */
+ public @NonNull Builder setAudioChannelCount(@IntRange(from = 0) int audioChannelCount) {
+ mAudioChannelCount = audioChannelCount;
+ return this;
+ }
+
+ /** Sets the number of audio frames in the item, after clipping (if applicable). */
+ public @NonNull Builder setAudioSampleCount(@IntRange(from = 0) long audioSampleCount) {
+ mAudioSampleCount = audioSampleCount;
+ return this;
+ }
+
+ /** Sets the video size, in pixels. */
+ public @NonNull Builder setVideoSize(@NonNull Size videoSize) {
+ mVideoSize = Objects.requireNonNull(videoSize);
+ return this;
+ }
+
+ /**
+ * Sets the {@link DataSpace} of video frames.
+ *
+ * @param videoDataSpace The data space, returned by {@link DataSpace#pack(int, int, int)}.
+ */
+ public @NonNull Builder setVideoDataSpace(int videoDataSpace) {
+ mVideoDataSpace = videoDataSpace;
+ return this;
+ }
+
+ /** Sets the average video frame rate, in frames per second. */
+ public @NonNull Builder setVideoFrameRate(@FloatRange(from = 0) float videoFrameRate) {
+ mVideoFrameRate = videoFrameRate;
+ return this;
+ }
+
+ /** Sets the number of video frames, after clipping (if applicable). */
+ public @NonNull Builder setVideoSampleCount(@IntRange(from = 0) long videoSampleCount) {
+ mVideoSampleCount = videoSampleCount;
+ return this;
+ }
+
+ /** Builds an instance. */
+ @NonNull
+ public MediaItemInfo build() {
+ return new MediaItemInfo(
+ mSourceType,
+ mDataTypes,
+ mDurationMillis,
+ mClipDurationMillis,
+ mContainerMimeType,
+ mSampleMimeTypes,
+ mCodecNames,
+ mAudioSampleRateHz,
+ mAudioChannelCount,
+ mAudioSampleCount,
+ mVideoSize,
+ mVideoDataSpace,
+ mVideoFrameRate,
+ mVideoSampleCount);
+ }
+ }
+
+ @Override
+ @NonNull
+ public String toString() {
+ return "MediaItemInfo { "
+ + "sourceType = "
+ + mSourceType
+ + ", "
+ + "dataTypes = "
+ + mDataTypes
+ + ", "
+ + "durationMillis = "
+ + mDurationMillis
+ + ", "
+ + "clipDurationMillis = "
+ + mClipDurationMillis
+ + ", "
+ + "containerMimeType = "
+ + mContainerMimeType
+ + ", "
+ + "sampleMimeTypes = "
+ + mSampleMimeTypes
+ + ", "
+ + "codecNames = "
+ + mCodecNames
+ + ", "
+ + "audioSampleRateHz = "
+ + mAudioSampleRateHz
+ + ", "
+ + "audioChannelCount = "
+ + mAudioChannelCount
+ + ", "
+ + "audioSampleCount = "
+ + mAudioSampleCount
+ + ", "
+ + "videoSize = "
+ + mVideoSize
+ + ", "
+ + "videoDataSpace = "
+ + mVideoDataSpace
+ + ", "
+ + "videoFrameRate = "
+ + mVideoFrameRate
+ + ", "
+ + "videoSampleCount = "
+ + mVideoSampleCount
+ + " }";
+ }
+
+ @Override
+ public boolean equals(@Nullable Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MediaItemInfo that = (MediaItemInfo) o;
+ return mSourceType == that.mSourceType
+ && mDataTypes == that.mDataTypes
+ && mDurationMillis == that.mDurationMillis
+ && mClipDurationMillis == that.mClipDurationMillis
+ && Objects.equals(mContainerMimeType, that.mContainerMimeType)
+ && mSampleMimeTypes.equals(that.mSampleMimeTypes)
+ && mCodecNames.equals(that.mCodecNames)
+ && mAudioSampleRateHz == that.mAudioSampleRateHz
+ && mAudioChannelCount == that.mAudioChannelCount
+ && mAudioSampleCount == that.mAudioSampleCount
+ && Objects.equals(mVideoSize, that.mVideoSize)
+ && Objects.equals(mVideoDataSpace, that.mVideoDataSpace)
+ && mVideoFrameRate == that.mVideoFrameRate
+ && mVideoSampleCount == that.mVideoSampleCount;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mSourceType, mDataTypes);
+ }
+
+ @Override
+ public void writeToParcel(@NonNull Parcel dest, int flags) {
+ dest.writeInt(mSourceType);
+ dest.writeLong(mDataTypes);
+ dest.writeLong(mDurationMillis);
+ dest.writeLong(mClipDurationMillis);
+ dest.writeString(mContainerMimeType);
+ dest.writeStringList(mSampleMimeTypes);
+ dest.writeStringList(mCodecNames);
+ dest.writeInt(mAudioSampleRateHz);
+ dest.writeInt(mAudioChannelCount);
+ dest.writeLong(mAudioSampleCount);
+ dest.writeInt(mVideoSize.getWidth());
+ dest.writeInt(mVideoSize.getHeight());
+ dest.writeInt(mVideoDataSpace);
+ dest.writeFloat(mVideoFrameRate);
+ dest.writeLong(mVideoSampleCount);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ private MediaItemInfo(@NonNull Parcel in) {
+ mSourceType = in.readInt();
+ mDataTypes = in.readLong();
+ mDurationMillis = in.readLong();
+ mClipDurationMillis = in.readLong();
+ mContainerMimeType = in.readString();
+ mSampleMimeTypes = new ArrayList<>();
+ in.readStringList(mSampleMimeTypes);
+ mCodecNames = new ArrayList<>();
+ in.readStringList(mCodecNames);
+ mAudioSampleRateHz = in.readInt();
+ mAudioChannelCount = in.readInt();
+ mAudioSampleCount = in.readLong();
+ int videoSizeWidth = in.readInt();
+ int videoSizeHeight = in.readInt();
+ mVideoSize = new Size(videoSizeWidth, videoSizeHeight);
+ mVideoDataSpace = in.readInt();
+ mVideoFrameRate = in.readFloat();
+ mVideoSampleCount = in.readLong();
+ }
+
+ public static final @NonNull Creator<MediaItemInfo> CREATOR =
+ new Creator<>() {
+ @Override
+ public MediaItemInfo[] newArray(int size) {
+ return new MediaItemInfo[size];
+ }
+
+ @Override
+ public MediaItemInfo createFromParcel(@NonNull Parcel in) {
+ return new MediaItemInfo(in);
+ }
+ };
+}
diff --git a/services/core/java/com/android/server/media/metrics/MediaMetricsManagerService.java b/services/core/java/com/android/server/media/metrics/MediaMetricsManagerService.java
index bbe6d3a..2cd8fe0 100644
--- a/services/core/java/com/android/server/media/metrics/MediaMetricsManagerService.java
+++ b/services/core/java/com/android/server/media/metrics/MediaMetricsManagerService.java
@@ -18,10 +18,13 @@
import android.content.Context;
import android.content.pm.PackageManager;
+import android.hardware.DataSpace;
import android.media.MediaMetrics;
+import android.media.codec.Enums;
import android.media.metrics.BundleSession;
import android.media.metrics.EditingEndedEvent;
import android.media.metrics.IMediaMetricsManager;
+import android.media.metrics.MediaItemInfo;
import android.media.metrics.NetworkEvent;
import android.media.metrics.PlaybackErrorEvent;
import android.media.metrics.PlaybackMetrics;
@@ -31,7 +34,9 @@
import android.os.PersistableBundle;
import android.provider.DeviceConfig;
import android.provider.DeviceConfig.Properties;
+import android.text.TextUtils;
import android.util.Base64;
+import android.util.Size;
import android.util.Slog;
import android.util.StatsEvent;
import android.util.StatsLog;
@@ -72,7 +77,14 @@
private static final String mMetricsId = MediaMetrics.Name.METRICS_MANAGER;
private static final String FAILED_TO_GET = "failed_to_get";
+
+ private static final MediaItemInfo EMPTY_MEDIA_ITEM_INFO = new MediaItemInfo.Builder().build();
+ private static final int DURATION_BUCKETS_BELOW_ONE_MINUTE = 8;
+ private static final int DURATION_BUCKETS_COUNT = 13;
+ private static final String AUDIO_MIME_TYPE_PREFIX = "audio/";
+ private static final String VIDEO_MIME_TYPE_PREFIX = "video/";
private final SecureRandom mSecureRandom;
+
@GuardedBy("mLock")
private Integer mMode = null;
@GuardedBy("mLock")
@@ -353,6 +365,51 @@
if (level == LOGGING_LEVEL_BLOCKED) {
return;
}
+ MediaItemInfo inputMediaItemInfo =
+ event.getInputMediaItemInfos().isEmpty()
+ ? EMPTY_MEDIA_ITEM_INFO
+ : event.getInputMediaItemInfos().get(0);
+ String inputAudioSampleMimeType =
+ getFilteredFirstMimeType(
+ inputMediaItemInfo.getSampleMimeTypes(), AUDIO_MIME_TYPE_PREFIX);
+ String inputVideoSampleMimeType =
+ getFilteredFirstMimeType(
+ inputMediaItemInfo.getSampleMimeTypes(), VIDEO_MIME_TYPE_PREFIX);
+ Size inputVideoSize = inputMediaItemInfo.getVideoSize();
+ int inputVideoResolution = getVideoResolutionEnum(inputVideoSize);
+ if (inputVideoResolution == Enums.RESOLUTION_UNKNOWN) {
+ // Try swapping width/height in case it's a portrait video.
+ inputVideoResolution =
+ getVideoResolutionEnum(
+ new Size(inputVideoSize.getHeight(), inputVideoSize.getWidth()));
+ }
+ List<String> inputCodecNames = inputMediaItemInfo.getCodecNames();
+ String inputFirstCodecName = !inputCodecNames.isEmpty() ? inputCodecNames.get(0) : "";
+ String inputSecondCodecName = inputCodecNames.size() > 1 ? inputCodecNames.get(1) : "";
+
+ MediaItemInfo outputMediaItemInfo =
+ event.getOutputMediaItemInfo() == null
+ ? EMPTY_MEDIA_ITEM_INFO
+ : event.getOutputMediaItemInfo();
+ String outputAudioSampleMimeType =
+ getFilteredFirstMimeType(
+ outputMediaItemInfo.getSampleMimeTypes(), AUDIO_MIME_TYPE_PREFIX);
+ String outputVideoSampleMimeType =
+ getFilteredFirstMimeType(
+ outputMediaItemInfo.getSampleMimeTypes(), VIDEO_MIME_TYPE_PREFIX);
+ Size outputVideoSize = outputMediaItemInfo.getVideoSize();
+ int outputVideoResolution = getVideoResolutionEnum(outputVideoSize);
+ if (outputVideoResolution == Enums.RESOLUTION_UNKNOWN) {
+ // Try swapping width/height in case it's a portrait video.
+ outputVideoResolution =
+ getVideoResolutionEnum(
+ new Size(outputVideoSize.getHeight(), outputVideoSize.getWidth()));
+ }
+ List<String> outputCodecNames = outputMediaItemInfo.getCodecNames();
+ String outputFirstCodecName =
+ !outputCodecNames.isEmpty() ? outputCodecNames.get(0) : "";
+ String outputSecondCodecName =
+ outputCodecNames.size() > 1 ? outputCodecNames.get(1) : "";
StatsEvent statsEvent =
StatsEvent.newBuilder()
.setAtomId(798)
@@ -360,6 +417,66 @@
.writeInt(event.getFinalState())
.writeInt(event.getErrorCode())
.writeLong(event.getTimeSinceCreatedMillis())
+ .writeInt(getThroughputFps(event))
+ .writeInt(event.getInputMediaItemInfos().size())
+ .writeInt(inputMediaItemInfo.getSourceType())
+ .writeLong(
+ getBucketedDurationMillis(
+ inputMediaItemInfo.getDurationMillis()))
+ .writeLong(
+ getBucketedDurationMillis(
+ inputMediaItemInfo.getClipDurationMillis()))
+ .writeString(
+ getFilteredMimeType(inputMediaItemInfo.getContainerMimeType()))
+ .writeString(inputAudioSampleMimeType)
+ .writeString(inputVideoSampleMimeType)
+ .writeInt(getCodecEnum(inputVideoSampleMimeType))
+ .writeInt(
+ getFilteredAudioSampleRateHz(
+ inputMediaItemInfo.getAudioSampleRateHz()))
+ .writeInt(inputMediaItemInfo.getAudioChannelCount())
+ .writeInt(inputVideoSize.getWidth())
+ .writeInt(inputVideoSize.getHeight())
+ .writeInt(inputVideoResolution)
+ .writeInt(getVideoResolutionAspectRatioEnum(inputVideoSize))
+ .writeInt(inputMediaItemInfo.getVideoDataSpace())
+ .writeInt(
+ getVideoHdrFormatEnum(
+ inputMediaItemInfo.getVideoDataSpace(),
+ inputVideoSampleMimeType))
+ .writeInt(Math.round(inputMediaItemInfo.getVideoFrameRate()))
+ .writeInt(getVideoFrameRateEnum(inputMediaItemInfo.getVideoFrameRate()))
+ .writeString(inputFirstCodecName)
+ .writeString(inputSecondCodecName)
+ .writeLong(
+ getBucketedDurationMillis(
+ outputMediaItemInfo.getDurationMillis()))
+ .writeLong(
+ getBucketedDurationMillis(
+ outputMediaItemInfo.getClipDurationMillis()))
+ .writeString(
+ getFilteredMimeType(outputMediaItemInfo.getContainerMimeType()))
+ .writeString(outputAudioSampleMimeType)
+ .writeString(outputVideoSampleMimeType)
+ .writeInt(getCodecEnum(outputVideoSampleMimeType))
+ .writeInt(
+ getFilteredAudioSampleRateHz(
+ outputMediaItemInfo.getAudioSampleRateHz()))
+ .writeInt(outputMediaItemInfo.getAudioChannelCount())
+ .writeInt(outputVideoSize.getWidth())
+ .writeInt(outputVideoSize.getHeight())
+ .writeInt(outputVideoResolution)
+ .writeInt(getVideoResolutionAspectRatioEnum(outputVideoSize))
+ .writeInt(outputMediaItemInfo.getVideoDataSpace())
+ .writeInt(
+ getVideoHdrFormatEnum(
+ outputMediaItemInfo.getVideoDataSpace(),
+ outputVideoSampleMimeType))
+ .writeInt(Math.round(outputMediaItemInfo.getVideoFrameRate()))
+ .writeInt(
+ getVideoFrameRateEnum(outputMediaItemInfo.getVideoFrameRate()))
+ .writeString(outputFirstCodecName)
+ .writeString(outputSecondCodecName)
.usePooledBuffer()
.build();
StatsLog.write(statsEvent);
@@ -511,4 +628,215 @@
}
}
}
+
+ private static int getThroughputFps(EditingEndedEvent event) {
+ MediaItemInfo outputMediaItemInfo = event.getOutputMediaItemInfo();
+ if (outputMediaItemInfo == null) {
+ return -1;
+ }
+ long videoSampleCount = outputMediaItemInfo.getVideoSampleCount();
+ if (videoSampleCount == MediaItemInfo.VALUE_UNSPECIFIED) {
+ return -1;
+ }
+ long elapsedTimeMs = event.getTimeSinceCreatedMillis();
+ if (elapsedTimeMs == EditingEndedEvent.TIME_SINCE_CREATED_UNKNOWN) {
+ return -1;
+ }
+ return (int)
+ Math.min(Integer.MAX_VALUE, Math.round(1000.0 * videoSampleCount / elapsedTimeMs));
+ }
+
+ private static long getBucketedDurationMillis(long durationMillis) {
+ if (durationMillis == MediaItemInfo.VALUE_UNSPECIFIED || durationMillis <= 0) {
+ return -1;
+ }
+ // Bucket values in an exponential distribution to reduce the precision that's stored:
+ // bucket index -> range -> bucketed duration
+ // 1 -> [0, 469 ms) -> 235 ms
+ // 2 -> [469 ms, 938 ms) -> 469 ms
+ // 3 -> [938 ms, 1875 ms) -> 938 ms
+ // 4 -> [1875 ms, 3750 ms) -> 1875 ms
+ // 5 -> [3750 ms, 7500 ms) -> 3750 ms
+ // [...]
+ // 13 -> [960000 ms, max) -> 960000 ms
+ int bucketIndex =
+ (int)
+ Math.floor(
+ DURATION_BUCKETS_BELOW_ONE_MINUTE
+ + Math.log((durationMillis + 1) / 60_000.0) / Math.log(2));
+ // Clamp to range [0, DURATION_BUCKETS_COUNT].
+ bucketIndex = Math.min(DURATION_BUCKETS_COUNT, Math.max(0, bucketIndex));
+ // Map back onto the representative value for the bucket.
+ return (long)
+ Math.ceil(Math.pow(2, bucketIndex - DURATION_BUCKETS_BELOW_ONE_MINUTE) * 60_000.0);
+ }
+
+ /**
+ * Returns the first entry in {@code mimeTypes} with the given prefix, if it matches the
+ * filtering allowlist. If no entries match the prefix or if the first matching entry is not on
+ * the allowlist, returns an empty string.
+ */
+ private static String getFilteredFirstMimeType(List<String> mimeTypes, String prefix) {
+ int size = mimeTypes.size();
+ for (int i = 0; i < size; i++) {
+ String mimeType = mimeTypes.get(i);
+ if (mimeType.startsWith(prefix)) {
+ return getFilteredMimeType(mimeType);
+ }
+ }
+ return "";
+ }
+
+ private static String getFilteredMimeType(String mimeType) {
+ if (TextUtils.isEmpty(mimeType)) {
+ return "";
+ }
+ // Discard all inputs that aren't allowlisted MIME types.
+ return switch (mimeType) {
+ case "video/mp4",
+ "video/x-matroska",
+ "video/webm",
+ "video/3gpp",
+ "video/avc",
+ "video/hevc",
+ "video/x-vnd.on2.vp8",
+ "video/x-vnd.on2.vp9",
+ "video/av01",
+ "video/mp2t",
+ "video/mp4v-es",
+ "video/mpeg",
+ "video/x-flv",
+ "video/dolby-vision",
+ "video/raw",
+ "audio/mp4",
+ "audio/mp4a-latm",
+ "audio/x-matroska",
+ "audio/webm",
+ "audio/mpeg",
+ "audio/mpeg-L1",
+ "audio/mpeg-L2",
+ "audio/ac3",
+ "audio/eac3",
+ "audio/eac3-joc",
+ "audio/av4",
+ "audio/true-hd",
+ "audio/vnd.dts",
+ "audio/vnd.dts.hd",
+ "audio/vorbis",
+ "audio/opus",
+ "audio/flac",
+ "audio/ogg",
+ "audio/wav",
+ "audio/midi",
+ "audio/raw",
+ "application/mp4",
+ "application/webm",
+ "application/x-matroska",
+ "application/dash+xml",
+ "application/x-mpegURL",
+ "application/vnd.ms-sstr+xml" ->
+ mimeType;
+ default -> "";
+ };
+ }
+
+ private static int getCodecEnum(String mimeType) {
+ if (TextUtils.isEmpty(mimeType)) {
+ return Enums.CODEC_UNKNOWN;
+ }
+ return switch (mimeType) {
+ case "video/avc" -> Enums.CODEC_AVC;
+ case "video/hevc" -> Enums.CODEC_HEVC;
+ case "video/x-vnd.on2.vp8" -> Enums.CODEC_VP8;
+ case "video/x-vnd.on2.vp9" -> Enums.CODEC_VP9;
+ case "video/av01" -> Enums.CODEC_AV1;
+ default -> Enums.CODEC_UNKNOWN;
+ };
+ }
+
+ private static int getFilteredAudioSampleRateHz(int sampleRateHz) {
+ return switch (sampleRateHz) {
+ case 8000, 11025, 16000, 22050, 44100, 48000, 96000, 192000 -> sampleRateHz;
+ default -> -1;
+ };
+ }
+
+ private static int getVideoResolutionEnum(Size size) {
+ int width = size.getWidth();
+ int height = size.getHeight();
+ if (width == 352 && height == 640) {
+ return Enums.RESOLUTION_352X640;
+ } else if (width == 360 && height == 640) {
+ return Enums.RESOLUTION_360X640;
+ } else if (width == 480 && height == 640) {
+ return Enums.RESOLUTION_480X640;
+ } else if (width == 480 && height == 854) {
+ return Enums.RESOLUTION_480X854;
+ } else if (width == 540 && height == 960) {
+ return Enums.RESOLUTION_540X960;
+ } else if (width == 576 && height == 1024) {
+ return Enums.RESOLUTION_576X1024;
+ } else if (width == 1280 && height == 720) {
+ return Enums.RESOLUTION_720P_HD;
+ } else if (width == 1920 && height == 1080) {
+ return Enums.RESOLUTION_1080P_FHD;
+ } else if (width == 1440 && height == 2560) {
+ return Enums.RESOLUTION_1440X2560;
+ } else if (width == 3840 && height == 2160) {
+ return Enums.RESOLUTION_4K_UHD;
+ } else if (width == 7680 && height == 4320) {
+ return Enums.RESOLUTION_8K_UHD;
+ } else {
+ return Enums.RESOLUTION_UNKNOWN;
+ }
+ }
+
+ private static int getVideoResolutionAspectRatioEnum(Size size) {
+ int width = size.getWidth();
+ int height = size.getHeight();
+ if (width <= 0 || height <= 0) {
+ return android.media.editing.Enums.RESOLUTION_ASPECT_RATIO_UNSPECIFIED;
+ } else if (width < height) {
+ return android.media.editing.Enums.RESOLUTION_ASPECT_RATIO_PORTRAIT;
+ } else if (height < width) {
+ return android.media.editing.Enums.RESOLUTION_ASPECT_RATIO_LANDSCAPE;
+ } else {
+ return android.media.editing.Enums.RESOLUTION_ASPECT_RATIO_SQUARE;
+ }
+ }
+
+ private static int getVideoHdrFormatEnum(int dataSpace, String mimeType) {
+ if (dataSpace == DataSpace.DATASPACE_UNKNOWN) {
+ return Enums.HDR_FORMAT_UNKNOWN;
+ }
+ if (mimeType.equals("video/dolby-vision")) {
+ return Enums.HDR_FORMAT_DOLBY_VISION;
+ }
+ int standard = DataSpace.getStandard(dataSpace);
+ int transfer = DataSpace.getTransfer(dataSpace);
+ if (standard == DataSpace.STANDARD_BT2020 && transfer == DataSpace.TRANSFER_HLG) {
+ return Enums.HDR_FORMAT_HLG;
+ }
+ if (standard == DataSpace.STANDARD_BT2020 && transfer == DataSpace.TRANSFER_ST2084) {
+ // We don't currently distinguish HDR10+ from HDR10.
+ return Enums.HDR_FORMAT_HDR10;
+ }
+ return Enums.HDR_FORMAT_NONE;
+ }
+
+ private static int getVideoFrameRateEnum(float frameRate) {
+ int frameRateInt = Math.round(frameRate);
+ return switch (frameRateInt) {
+ case 24 -> Enums.FRAMERATE_24;
+ case 25 -> Enums.FRAMERATE_25;
+ case 30 -> Enums.FRAMERATE_30;
+ case 50 -> Enums.FRAMERATE_50;
+ case 60 -> Enums.FRAMERATE_60;
+ case 120 -> Enums.FRAMERATE_120;
+ case 240 -> Enums.FRAMERATE_240;
+ case 480 -> Enums.FRAMERATE_480;
+ case 960 -> Enums.FRAMERATE_960;
+ default -> Enums.FRAMERATE_UNKNOWN;
+ };
+ }
}