Merge "VT: fixed an infinite loop of RR sending." into sc-v2-dev am: d833cf079b
Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/16663697
Change-Id: I01e832cc52d627260d8737d1d2d1234fea48d979
diff --git a/.clang-format b/.clang-format
new file mode 100644
index 0000000..a7614d2
--- /dev/null
+++ b/.clang-format
@@ -0,0 +1,13 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
diff --git a/Android.bp b/Android.bp
index 60f0ff1..ee609e1 100644
--- a/Android.bp
+++ b/Android.bp
@@ -57,7 +57,7 @@
min_sdk_version: "29",
apex_available: [
"//apex_available:platform",
- "com.android.bluetooth.updatable",
+ "com.android.bluetooth",
"com.android.media",
"com.android.media.swcodec",
],
@@ -86,7 +86,7 @@
min_sdk_version: "29",
apex_available: [
"//apex_available:platform",
- "com.android.bluetooth.updatable",
+ "com.android.bluetooth",
"com.android.media",
"com.android.media.swcodec",
],
diff --git a/OWNERS b/OWNERS
index 0be1196..40c65e7 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,7 +1,6 @@
# Bug component: 1344
elaurent@google.com
etalvala@google.com
-hkuang@google.com
lajos@google.com
# go/android-fwk-media-solutions for info on areas of ownership.
diff --git a/apex/OWNERS b/apex/OWNERS
index 5587f5f..54802d4 100644
--- a/apex/OWNERS
+++ b/apex/OWNERS
@@ -1,6 +1,7 @@
-chz@google.com
-dwkang@google.com
+essick@google.com
jiyong@google.com
lajos@google.com
-marcone@google.com
-wjia@google.com
+nchalko@google.com
+
+include platform/packages/modules/common:/MODULES_OWNERS
+
diff --git a/apex/mediaswcodec.32rc b/apex/mediaswcodec.32rc
new file mode 100644
index 0000000..79aef36
--- /dev/null
+++ b/apex/mediaswcodec.32rc
@@ -0,0 +1,6 @@
+service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
+ class main
+ user mediacodec
+ group camera drmrpc mediadrm
+ ioprio rt 4
+ task_profiles ProcessCapacityHigh
diff --git a/apex/mediatranscoding.32rc b/apex/mediatranscoding.32rc
new file mode 100644
index 0000000..5169462
--- /dev/null
+++ b/apex/mediatranscoding.32rc
@@ -0,0 +1,12 @@
+# media.transcoding service is defined on com.android.media apex which goes back
+# to API29, but we only want it started on API31+ devices. So we declare it as
+# "disabled" and start it explicitly on boot.
+service media.transcoding /apex/com.android.media/bin/mediatranscoding
+ class main
+ user media
+ group media
+ ioprio rt 4
+ # Restrict to little cores only with system-background cpuset.
+ task_profiles ServiceCapacityLow
+ interface aidl media.transcoding
+ disabled
diff --git a/camera/Android.bp b/camera/Android.bp
index 6878c20..4ed3269 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -43,6 +43,10 @@
],
}
+cc_library_headers {
+ name: "camera_headers",
+ export_include_dirs: ["include"],
+}
cc_library_shared {
name: "libcamera_client",
diff --git a/camera/OWNERS b/camera/OWNERS
index 1b548e4..2a1d523 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -1,3 +1,4 @@
+
# Bug component: 41727
etalvala@google.com
arakesh@google.com
@@ -5,4 +6,3 @@
jchowdhary@google.com
shuzhenwang@google.com
ruchamk@google.com
-
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index ccbfaa9..da887a2 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -139,6 +139,8 @@
return !(*this == other);
}
bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
if (*this == other) return false;
if (mContext != other.mContext) return mContext < other.mContext;
if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
@@ -152,6 +154,7 @@
}
if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
}
bool operator > (const Callback& other) const {
return (*this != other && !(*this < other));
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index dab2fef..05124c0 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -182,7 +182,7 @@
int64_t format = entry.data.i64[i + STREAM_FORMAT_OFFSET];
int64_t width = entry.data.i64[i + STREAM_WIDTH_OFFSET];
int64_t height = entry.data.i64[i + STREAM_HEIGHT_OFFSET];
- int64_t duration = entry.data.i32[i + STREAM_DURATION_OFFSET];
+ int64_t duration = entry.data.i64[i + STREAM_DURATION_OFFSET];
// Leave the unfiltered format in so apps depending on previous wrong
// filter behavior continue to work
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 8359bb1..4663529 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -136,6 +136,8 @@
return !(*this == other);
}
bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
if (*this == other) return false;
if (mContext != other.mContext) return mContext < other.mContext;
if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
@@ -146,6 +148,7 @@
if (mPhysicalCamUnavailable != other.mPhysicalCamUnavailable)
return mPhysicalCamUnavailable < other.mPhysicalCamUnavailable;
return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
}
bool operator > (const Callback& other) const {
return (*this != other && !(*this < other));
diff --git a/cmds/OWNERS b/cmds/OWNERS
index 0d32aac..a48c37a 100644
--- a/cmds/OWNERS
+++ b/cmds/OWNERS
@@ -1,3 +1,3 @@
elaurent@google.com
+essick@google.com
lajos@google.com
-marcone@google.com
diff --git a/cmds/stagefright/Android.bp b/cmds/stagefright/Android.bp
new file mode 100644
index 0000000..c4783d3
--- /dev/null
+++ b/cmds/stagefright/Android.bp
@@ -0,0 +1,278 @@
+package {
+ default_applicable_licenses: ["frameworks_av_cmds_stagefright_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+ name: "frameworks_av_cmds_stagefright_license",
+ visibility: [":__subpackages__"],
+ license_kinds: [
+ "SPDX-license-identifier-Apache-2.0",
+ ],
+ license_text: [
+ "NOTICE",
+ ],
+}
+
+cc_binary {
+ name: "stagefright",
+
+ srcs: [
+ "AudioPlayer.cpp",
+ "stagefright.cpp",
+ "jpeg.cpp",
+ "SineSource.cpp",
+ ],
+
+ header_libs: [
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "libmedia",
+ "libmedia_codeclist",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libjpeg",
+ "libui",
+ "libgui",
+ "libcutils",
+ "liblog",
+ "libhidlbase",
+ "libdatasource",
+ "libaudioclient",
+ "android.hardware.media.omx@1.0",
+ "framework-permission-aidl-cpp",
+ ],
+
+ static_libs: ["framework-permission-aidl-cpp"],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+
+ system_ext_specific: true,
+}
+
+cc_binary {
+ name: "record",
+
+ srcs: [
+ "AudioPlayer.cpp",
+ "SineSource.cpp",
+ "record.cpp",
+ ],
+
+ header_libs: [
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ "camera_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "libmedia",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libdatasource",
+ "libaudioclient",
+ "framework-permission-aidl-cpp",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+}
+
+cc_binary {
+ name: "recordvideo",
+
+ srcs: [
+ "AudioPlayer.cpp",
+ "recordvideo.cpp",
+ ],
+
+ header_libs: [
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "libmedia",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libaudioclient",
+ "framework-permission-aidl-cpp",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+}
+
+cc_binary {
+ name: "audioloop",
+
+ srcs: [
+ "AudioPlayer.cpp",
+ "SineSource.cpp",
+ "audioloop.cpp",
+ ],
+
+ header_libs: [
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "libmedia",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libaudioclient",
+ "framework-permission-aidl-cpp",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+}
+
+cc_binary {
+ name: "stream",
+
+ srcs: ["stream.cpp"],
+
+ header_libs: [
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libui",
+ "libgui",
+ "libstagefright_foundation",
+ "libmedia",
+ "libcutils",
+ "libdatasource",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+}
+
+cc_binary {
+ name: "codec",
+
+ srcs: [
+ "codec.cpp",
+ "SimplePlayer.cpp",
+ ],
+
+ header_libs: [
+ "libmediadrm_headers",
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libmedia",
+ "libmedia_omx",
+ "libaudioclient",
+ "libui",
+ "libgui",
+ "libcutils",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+}
+
+cc_binary {
+ name: "mediafilter",
+
+ srcs: [
+ "filters/argbtorgba.rscript",
+ "filters/nightvision.rscript",
+ "filters/saturation.rscript",
+ "mediafilter.cpp",
+ ],
+
+ header_libs: [
+ "libmediadrm_headers",
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ "rs-headers",
+ ],
+
+ include_dirs: ["frameworks/av/media/libstagefright"],
+
+ shared_libs: [
+ "libstagefright",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libmedia_omx",
+ "libui",
+ "libgui",
+ "libRScpp",
+ ],
+
+ static_libs: ["libstagefright_mediafilter"],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+
+ sanitize: {
+ cfi: true,
+ },
+}
+
+cc_binary {
+ name: "muxer",
+
+ srcs: ["muxer.cpp"],
+
+ header_libs: [
+ "libmediametrics_headers",
+ "libstagefright_headers",
+ ],
+
+ shared_libs: [
+ "libstagefright",
+ "liblog",
+ "libutils",
+ "libbinder",
+ "libstagefright_foundation",
+ "libcutils",
+ "libc",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ ],
+}
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
deleted file mode 100644
index 803c4a4..0000000
--- a/cmds/stagefright/Android.mk
+++ /dev/null
@@ -1,276 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- AudioPlayer.cpp \
- stagefright.cpp \
- jpeg.cpp \
- SineSource.cpp
-
-LOCAL_HEADER_LIBRARIES := \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia libmedia_codeclist libutils libbinder \
- libstagefright_foundation libjpeg libui libgui libcutils liblog \
- libhidlbase libdatasource libaudioclient \
- android.hardware.media.omx@1.0 \
- framework-permission-aidl-cpp
-
-LOCAL_STATIC_LIBRARIES := framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/av/media/libstagefright/include \
- frameworks/native/include/media/openmax \
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_SYSTEM_EXT_MODULE:= true
-LOCAL_MODULE:= stagefright
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- AudioPlayer.cpp \
- SineSource.cpp \
- record.cpp
-
-LOCAL_HEADER_LIBRARIES := \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia liblog libutils libbinder \
- libstagefright_foundation libdatasource libaudioclient \
- framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/camera/include \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax \
- frameworks/native/include/media/hardware
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= record
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- AudioPlayer.cpp \
- recordvideo.cpp
-
-LOCAL_HEADER_LIBRARIES := \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia liblog libutils libbinder \
- libstagefright_foundation libaudioclient
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax \
- frameworks/native/include/media/hardware \
- framework-permission-aidl-cpp
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= recordvideo
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- AudioPlayer.cpp \
- SineSource.cpp \
- audioloop.cpp
-
-LOCAL_HEADER_LIBRARIES := \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia liblog libutils libbinder \
- libstagefright_foundation libaudioclient \
- framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= audioloop
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- stream.cpp \
-
-LOCAL_HEADER_LIBRARIES := \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright liblog libutils libbinder libui libgui \
- libstagefright_foundation libmedia libcutils libdatasource
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= stream
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- codec.cpp \
- SimplePlayer.cpp \
-
-LOCAL_HEADER_LIBRARIES := \
- libmediadrm_headers \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright liblog libutils libbinder libstagefright_foundation \
- libmedia libmedia_omx libaudioclient libui libgui libcutils
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= codec
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- filters/argbtorgba.rscript \
- filters/nightvision.rscript \
- filters/saturation.rscript \
- mediafilter.cpp \
-
-LOCAL_HEADER_LIBRARIES := \
- libmediadrm_headers \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright \
- liblog \
- libutils \
- libbinder \
- libstagefright_foundation \
- libmedia_omx \
- libui \
- libgui \
- libRScpp \
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax \
- frameworks/rs/cpp \
- frameworks/rs \
-
-intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
-LOCAL_C_INCLUDES += $(intermediates)
-
-LOCAL_STATIC_LIBRARIES:= \
- libstagefright_mediafilter
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= mediafilter
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-LOCAL_SANITIZE := cfi
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- muxer.cpp \
-
-LOCAL_HEADER_LIBRARIES := \
- libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright liblog libutils libbinder libstagefright_foundation \
- libcutils libc
-
-LOCAL_C_INCLUDES:= \
- frameworks/av/media/libstagefright \
- frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= muxer
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index c430f05..6d1263e 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -70,6 +70,10 @@
using namespace android;
+namespace {
+ constexpr static int PIXEL_FORMAT_RGBA_1010102_AS_8888 = -HAL_PIXEL_FORMAT_RGBA_1010102;
+}
+
static long gNumRepetitions;
static long gMaxNumFrames; // 0 means decode all available.
static long gReproduceBug; // if not -1.
@@ -629,7 +633,14 @@
fprintf(stderr, " -m max-number-of-frames-to-decode in each pass\n");
fprintf(stderr, " -b bug to reproduce\n");
fprintf(stderr, " -i(nfo) dump codec info (profiles and color formats supported, details)\n");
- fprintf(stderr, " -t(humbnail) extract video thumbnail or album art\n");
+ fprintf(stderr, " -t(humbnail) extract video thumbnail or album art (/sdcard/out.jpg)\n");
+ fprintf(stderr, " -P(ixelFormat) pixel format to use for raw thumbnail "
+ "(/sdcard/out.raw)\n");
+ fprintf(stderr, " %d: RGBA_565\n", HAL_PIXEL_FORMAT_RGB_565);
+ fprintf(stderr, " %d: RGBA_8888\n", HAL_PIXEL_FORMAT_RGBA_8888);
+ fprintf(stderr, " %d: BGRA_8888\n", HAL_PIXEL_FORMAT_BGRA_8888);
+ fprintf(stderr, " %d: RGBA_1010102\n", HAL_PIXEL_FORMAT_RGBA_1010102);
+ fprintf(stderr, " %d: RGBA_1010102 as RGBA_8888\n", PIXEL_FORMAT_RGBA_1010102_AS_8888);
fprintf(stderr, " -s(oftware) prefer software codec\n");
fprintf(stderr, " -r(hardware) force to use hardware codec\n");
fprintf(stderr, " -o playback audio\n");
@@ -787,6 +798,7 @@
bool useSurfaceTexAlloc = false;
bool dumpStream = false;
bool dumpPCMStream = false;
+ int32_t pixelFormat = 0; // thumbnail pixel format
String8 dumpStreamFilename;
gNumRepetitions = 1;
gMaxNumFrames = 0;
@@ -800,7 +812,7 @@
sp<android::ALooper> looper;
int res;
- while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
+ while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:P:")) >= 0) {
switch (res) {
case 'a':
{
@@ -841,6 +853,7 @@
break;
}
+ case 'P':
case 'm':
case 'n':
case 'b':
@@ -856,6 +869,8 @@
gNumRepetitions = x;
} else if (res == 'm') {
gMaxNumFrames = x;
+ } else if (res == 'P') {
+ pixelFormat = x;
} else {
CHECK_EQ(res, 'b');
gReproduceBug = x;
@@ -978,24 +993,71 @@
close(fd);
fd = -1;
+ uint32_t retrieverPixelFormat = HAL_PIXEL_FORMAT_RGB_565;
+ if (pixelFormat == PIXEL_FORMAT_RGBA_1010102_AS_8888) {
+ retrieverPixelFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
+ } else if (pixelFormat) {
+ retrieverPixelFormat = pixelFormat;
+ }
sp<IMemory> mem =
retriever->getFrameAtTime(-1,
MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
- HAL_PIXEL_FORMAT_RGB_565,
- false /*metaOnly*/);
+ retrieverPixelFormat, false /*metaOnly*/);
if (mem != NULL) {
failed = false;
- printf("getFrameAtTime(%s) => OK\n", filename);
+ printf("getFrameAtTime(%s) format=%d => OK\n", filename, retrieverPixelFormat);
VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
- CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
- frame->getFlattenedData(),
- frame->mWidth, frame->mHeight), 0);
+ if (pixelFormat) {
+ int bpp = 0;
+ switch (pixelFormat) {
+ case HAL_PIXEL_FORMAT_RGB_565:
+ bpp = 2;
+ break;
+ case PIXEL_FORMAT_RGBA_1010102_AS_8888:
+ // convert RGBA_1010102 to RGBA_8888
+ {
+ uint32_t *data = (uint32_t *)frame->getFlattenedData();
+ uint32_t *end = data + frame->mWidth * frame->mHeight;
+ for (; data < end; ++data) {
+ *data =
+ // pick out 8-bit R, G, B values and move them to the
+ // correct position
+ ( (*data & 0x3fc) >> 2) | // R
+ ( (*data & 0xff000) >> 4) | // G
+ ( (*data & 0x3fc00000) >> 6) | // B
+ // pick out 2-bit A and expand to 8-bits
+ (((*data & 0xc0000000) >> 6) * 0x55);
+ }
+ }
+
+ FALLTHROUGH_INTENDED;
+
+ case HAL_PIXEL_FORMAT_RGBA_1010102:
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ bpp = 4;
+ break;
+ }
+ if (bpp) {
+ FILE *out = fopen("/sdcard/out.raw", "wb");
+ fwrite(frame->getFlattenedData(), bpp * frame->mWidth, frame->mHeight, out);
+ fclose(out);
+
+ printf("write out %d x %d x %dbpp\n", frame->mWidth, frame->mHeight, bpp);
+ } else {
+ printf("unknown pixel format.\n");
+ }
+ } else {
+ CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
+ frame->getFlattenedData(),
+ frame->mWidth, frame->mHeight), 0);
+ }
}
- {
+ if (!pixelFormat) {
mem = retriever->extractAlbumArt();
if (mem != NULL) {
diff --git a/drm/OWNERS b/drm/OWNERS
index e788754..090c021 100644
--- a/drm/OWNERS
+++ b/drm/OWNERS
@@ -1 +1,3 @@
jtinker@google.com
+kelzhan@google.com
+robertshih@google.com
diff --git a/drm/drmserver/drmserver.rc b/drm/drmserver/drmserver.rc
index eb176c1..0319ff9 100644
--- a/drm/drmserver/drmserver.rc
+++ b/drm/drmserver/drmserver.rc
@@ -3,7 +3,7 @@
class main
user drm
group drm system inet drmrpc readproc
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
on property:drm.service.enabled=true
start drm
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 0ffe626..71df58c 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -66,7 +66,7 @@
],
static_libs: [
- "resourcemanager_aidl_interface-ndk_platform",
+ "resourcemanager_aidl_interface-ndk",
],
export_shared_lib_headers: [
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
index 7281066..49bbad4 100644
--- a/drm/libmediadrm/fuzzer/Android.bp
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -35,7 +35,7 @@
static_libs: [
"libmediadrm",
"liblog",
- "resourcemanager_aidl_interface-ndk_platform",
+ "resourcemanager_aidl_interface-ndk",
],
header_libs: [
"libmedia_headers",
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
index 9afd3d7..ec4517d 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
@@ -11,4 +11,4 @@
user media
group media mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
index c1abe7f..3b48cf2 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
@@ -10,4 +10,4 @@
user media
group media mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
index 1e0d431..6e64978 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
@@ -13,4 +13,4 @@
user media
group media mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
index 8130511..e302e1b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
@@ -11,4 +11,4 @@
user media
group media mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
index 46aba88..84a63a1 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
@@ -15,4 +15,4 @@
user media
group media mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
index 8186933..649599e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
@@ -13,4 +13,4 @@
user media
group media mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/include/OWNERS b/include/OWNERS
index d6bd998..88de595 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,6 +1,5 @@
elaurent@google.com
-gkasten@google.com
hunga@google.com
jtinker@google.com
lajos@google.com
-marcone@google.com
+essick@google.com
diff --git a/media/OWNERS b/media/OWNERS
index 4cf4870..4a25b68 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -1,7 +1,6 @@
# Bug component: 1344
elaurent@google.com
essick@google.com
-hkuang@google.com
hunga@google.com
jiabin@google.com
jmtrivi@google.com
@@ -15,6 +14,7 @@
robertshih@google.com
taklee@google.com
wonsik@google.com
+ytai@google.com
# go/android-fwk-media-solutions for info on areas of ownership.
include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 5bc7262..41fe080 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -3,18 +3,18 @@
"presubmit-large": [
// runs whenever we change something in this tree
{
- "name": "CtsMediaTestCases",
+ "name": "CtsMediaCodecTestCases",
"options": [
{
- "include-filter": "android.media.cts.EncodeDecodeTest"
+ "include-filter": "android.media.codec.cts.EncodeDecodeTest"
}
]
},
{
- "name": "CtsMediaTestCases",
+ "name": "CtsMediaCodecTestCases",
"options": [
{
- "include-filter": "android.media.cts.DecodeEditEncodeTest"
+ "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
}
]
}
@@ -46,18 +46,18 @@
// runs regularly, independent of changes in this tree.
// signals if changes elsewhere break media functionality
{
- "name": "CtsMediaTestCases",
+ "name": "CtsMediaCodecTestCases",
"options": [
{
- "include-filter": "android.media.cts.EncodeDecodeTest"
+ "include-filter": "android.media.codec.cts.EncodeDecodeTest"
}
]
},
{
- "name": "CtsMediaTestCases",
+ "name": "CtsMediaCodecTestCases",
"options": [
{
- "include-filter": "android.media.cts.DecodeEditEncodeTest"
+ "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
}
]
}
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index be25ffb..e5f9907 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,6 +25,7 @@
],
shared_libs: [
+ "packagemanager_aidl-cpp",
"libaaudioservice",
"libaudioflinger",
"libaudiopolicyservice",
@@ -41,7 +42,6 @@
"libpowermanager",
"libutils",
"libvibrator",
-
],
// TODO check if we still need all of these include directories
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 8ee1efb..c5ac7f9 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -73,10 +73,8 @@
IPCThreadState::self()->joinThreadPool();
for (;;) {
siginfo_t info;
- int ret = waitid(P_PID, childPid, &info, WEXITED | WSTOPPED | WCONTINUED);
- if (ret == EINTR) {
- continue;
- }
+ int ret = TEMP_FAILURE_RETRY(waitid(P_PID, childPid, &info,
+ WEXITED | WSTOPPED | WCONTINUED));
if (ret < 0) {
break;
}
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 6ac4210..f477f1c 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -15,13 +15,68 @@
},
{
"exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaAudioTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
},
// TODO: b/149314419
{
- "exclude-filter": "android.media.cts.AudioPlaybackCaptureTest"
+ "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
},
{
- "exclude-filter": "android.media.cts.AudioRecordTest"
+ "exclude-filter": "android.media.audio.cts.AudioRecordTest"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaDecoderTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaEncoderTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaCodecTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaPlayerTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
}
]
}
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c08cd59..c7985ca 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -505,124 +505,6 @@
}
}
-static void copyOutputBufferToYuvPlanarFrame(
- uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride,
- uint32_t width, uint32_t height) {
-
- for (size_t i = 0; i < height; ++i) {
- memcpy(dstY, srcY, width);
- srcY += srcYStride;
- dstY += dstYStride;
- }
-
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstV, srcV, width / 2);
- srcV += srcVStride;
- dstV += dstUVStride;
- }
-
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstU, srcU, width / 2);
- srcU += srcUStride;
- dstU += dstUVStride;
- }
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst,
- const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width, size_t height) {
-
- // Converting two lines at a time, slightly faster
- for (size_t y = 0; y < height; y += 2) {
- uint32_t *dstTop = (uint32_t *) dst;
- uint32_t *dstBot = (uint32_t *) (dst + dstStride);
- uint16_t *ySrcTop = (uint16_t*) srcY;
- uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
- uint16_t *uSrc = (uint16_t*) srcU;
- uint16_t *vSrc = (uint16_t*) srcV;
-
- uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
- size_t x = 0;
- for (; x < width - 3; x += 4) {
-
- u01 = *((uint32_t*)uSrc); uSrc += 2;
- v01 = *((uint32_t*)vSrc); vSrc += 2;
-
- y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
- y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
- y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
- y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-
- uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
- uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
- *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
- *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
- *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
- *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
- *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
- *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
- *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
- *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
- }
-
- // There should be at most 2 more pixels to process. Note that we don't
- // need to consider odd case as the buffer is always aligned to even.
- if (x < width) {
- u01 = *uSrc;
- v01 = *vSrc;
- y01 = *((uint32_t*)ySrcTop);
- y45 = *((uint32_t*)ySrcBot);
- uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
- *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
- *dstTop++ = ((y01 >> 16) << 10) | uv0;
- *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
- *dstBot++ = ((y45 >> 16) << 10) | uv0;
- }
-
- srcY += srcYStride * 2;
- srcU += srcUStride;
- srcV += srcVStride;
- dst += dstStride * 2;
- }
-
- return;
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
- uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
- const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride,
- size_t width, size_t height) {
-
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; ++x) {
- dstY[x] = (uint8_t)(srcY[x] >> 2);
- }
-
- srcY += srcYStride;
- dstY += dstYStride;
- }
-
- for (size_t y = 0; y < (height + 1) / 2; ++y) {
- for (size_t x = 0; x < (width + 1) / 2; ++x) {
- dstU[x] = (uint8_t)(srcU[x] >> 2);
- dstV[x] = (uint8_t)(srcV[x] >> 2);
- }
-
- srcU += srcUStride;
- srcV += srcVStride;
- dstU += dstUVStride;
- dstV += dstUVStride;
- }
- return;
-}
bool C2SoftAomDec::outputBuffer(
const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work)
@@ -711,21 +593,16 @@
dstYStride / sizeof(uint32_t),
mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+ mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
- copyOutputBufferToYuvPlanarFrame(
- dstY, dstU, dstV, srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+ srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
}
finishWork(*(int64_t*)img->user_priv, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index e8287f9..cc4517d 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -511,7 +511,7 @@
status_t C2SoftAvcDec::initDecoder() {
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN32(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -777,20 +777,20 @@
return C2_CORRUPTED;
}
if (mOutBlock &&
- (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+ (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
mOutBlock.reset();
}
if (!mOutBlock) {
uint32_t format = HAL_PIXEL_FORMAT_YV12;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
- pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+ pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
return err;
}
ALOGV("provided (%dx%d) required (%dx%d)",
- mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+ mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
}
return C2_OK;
@@ -928,7 +928,7 @@
if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- mStride = ALIGN32(ps_decode_op->u4_pic_wd);
+ mStride = ALIGN128(ps_decode_op->u4_pic_wd);
setParams(mStride, IVD_DECODE_FRAME);
}
if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 5c07d29..59d5184 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -38,7 +38,7 @@
#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t
#define ivdext_ctl_get_vui_params_ip_t ih264d_ctl_get_vui_params_ip_t
#define ivdext_ctl_get_vui_params_op_t ih264d_ctl_get_vui_params_op_t
-#define ALIGN32(x) ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
#define MAX_NUM_CORES 4
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 6c4b7d9..434246f 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -29,7 +29,179 @@
#include <SimpleC2Component.h>
namespace android {
+constexpr uint8_t kNeutralUVBitDepth8 = 128;
+constexpr uint16_t kNeutralUVBitDepth10 = 512;
+void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
+ const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUVStride, uint32_t width, uint32_t height,
+ bool isMonochrome) {
+ for (size_t i = 0; i < height; ++i) {
+ memcpy(dstY, srcY, width);
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t i = 0; i < height / 2; ++i) {
+ memset(dstV, kNeutralUVBitDepth8, width / 2);
+ memset(dstU, kNeutralUVBitDepth8, width / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dstV, srcV, width / 2);
+ srcV += srcVStride;
+ dstV += dstUVStride;
+ }
+
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dstU, srcU, width / 2);
+ srcU += srcUStride;
+ dstU += dstUVStride;
+ }
+}
+
+void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+ const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width, size_t height) {
+ // Converting two lines at a time, slightly faster
+ for (size_t y = 0; y < height; y += 2) {
+ uint32_t *dstTop = (uint32_t *)dst;
+ uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+ uint16_t *ySrcTop = (uint16_t *)srcY;
+ uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+ uint16_t *uSrc = (uint16_t *)srcU;
+ uint16_t *vSrc = (uint16_t *)srcV;
+
+ uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+ size_t x = 0;
+ for (; x < width - 3; x += 4) {
+ u01 = *((uint32_t *)uSrc);
+ uSrc += 2;
+ v01 = *((uint32_t *)vSrc);
+ vSrc += 2;
+
+ y01 = *((uint32_t *)ySrcTop);
+ ySrcTop += 2;
+ y23 = *((uint32_t *)ySrcTop);
+ ySrcTop += 2;
+ y45 = *((uint32_t *)ySrcBot);
+ ySrcBot += 2;
+ y67 = *((uint32_t *)ySrcBot);
+ ySrcBot += 2;
+
+ uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+ uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+ *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
+ *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
+ *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
+ *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
+
+ *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
+ *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
+ *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
+ *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
+ }
+
+ // There should be at most 2 more pixels to process. Note that we don't
+ // need to consider odd case as the buffer is always aligned to even.
+ if (x < width) {
+ u01 = *uSrc;
+ v01 = *vSrc;
+ y01 = *((uint32_t *)ySrcTop);
+ y45 = *((uint32_t *)ySrcBot);
+ uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+ *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
+ *dstTop++ = ((y01 >> 16) << 10) | uv0;
+ *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
+ *dstBot++ = ((y45 >> 16) << 10) | uv0;
+ }
+
+ srcY += srcYStride * 2;
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dst += dstStride * 2;
+ }
+}
+
+void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUVStride, size_t width, size_t height,
+ bool isMonochrome) {
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ dstY[x] = (uint8_t)(srcY[x] >> 2);
+ }
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ memset(dstV, kNeutralUVBitDepth8, (width + 1) / 2);
+ memset(dstU, kNeutralUVBitDepth8, (width + 1) / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ for (size_t x = 0; x < (width + 1) / 2; ++x) {
+ dstU[x] = (uint8_t)(srcU[x] >> 2);
+ dstV[x] = (uint8_t)(srcV[x] >> 2);
+ }
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dstU += dstUVStride;
+ dstV += dstUVStride;
+ }
+}
+
+void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUVStride, size_t width, size_t height,
+ bool isMonochrome) {
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ dstY[x] = srcY[x] << 6;
+ }
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ for (size_t x = 0; x < (width + 1) / 2; ++x) {
+ dstUV[2 * x] = kNeutralUVBitDepth10 << 6;
+ dstUV[2 * x + 1] = kNeutralUVBitDepth10 << 6;
+ }
+ dstUV += dstUVStride;
+ }
+ return;
+ }
+
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ for (size_t x = 0; x < (width + 1) / 2; ++x) {
+ dstUV[2 * x] = srcU[x] << 6;
+ dstUV[2 * x + 1] = srcV[x] << 6;
+ }
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dstUV += dstUVStride;
+ }
+}
std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
mQueue.pop_front();
@@ -591,6 +763,43 @@
return hasQueuedWork;
}
+int SimpleC2Component::getHalPixelFormatForBitDepth10(bool allowRGBA1010102) {
+ // Save supported hal pixel formats for bit depth of 10, the first time this is called
+ if (!mBitDepth10HalPixelFormats.size()) {
+ std::vector<int> halPixelFormats;
+ // TODO(b/178229371) Enable HAL_PIXEL_FORMAT_YCBCR_P010 once framework supports it
+ // halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+
+ // since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
+ // is populated only once, allowRGBA1010102 is not considered at this stage.
+ halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
+
+ for (int halPixelFormat : halPixelFormats) {
+ std::shared_ptr<C2GraphicBlock> block;
+
+ uint32_t gpuConsumerFlags = halPixelFormat == HAL_PIXEL_FORMAT_RGBA_1010102
+ ? C2AndroidMemoryUsage::HW_TEXTURE_READ
+ : 0;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ | gpuConsumerFlags,
+ C2MemoryUsage::CPU_WRITE};
+ // TODO(b/214411172) Use AHardwareBuffer_isSupported once it supports P010
+ c2_status_t status =
+ mOutputBlockPool->fetchGraphicBlock(320, 240, halPixelFormat, usage, &block);
+ if (status == C2_OK) {
+ mBitDepth10HalPixelFormats.push_back(halPixelFormat);
+ }
+ }
+ // Add YV12 in the end as a fall-back option
+ mBitDepth10HalPixelFormats.push_back(HAL_PIXEL_FORMAT_YV12);
+ }
+ // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
+ // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
+ if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
+ return HAL_PIXEL_FORMAT_YV12;
+ }
+ // Return the first entry from supported formats
+ return mBitDepth10HalPixelFormats[0];
+}
std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index e5e16d8..d244f45 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -28,6 +28,24 @@
namespace android {
+void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
+ const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUVStride, uint32_t width, uint32_t height,
+ bool isMonochrome = false);
+void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+ const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width, size_t height);
+void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUVStride, size_t width, size_t height,
+ bool isMonochrome = false);
+void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+ size_t srcUStride, size_t srcVStride, size_t dstYStride,
+ size_t dstUVStride, size_t width, size_t height,
+ bool isMonochrome = false);
class SimpleC2Component
: public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
public:
@@ -149,6 +167,7 @@
static constexpr uint32_t NO_DRAIN = ~0u;
C2ReadView mDummyReadView;
+ int getHalPixelFormatForBitDepth10(bool allowRGBA1010102);
private:
const std::shared_ptr<C2ComponentInterface> mIntf;
@@ -232,6 +251,7 @@
class BlockingBlockPool;
std::shared_ptr<BlockingBlockPool> mOutputBlockPool;
+ std::vector<int> mBitDepth10HalPixelFormats;
SimpleC2Component() = delete;
};
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 6fead3a..182edfb 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -439,9 +439,6 @@
}
FLAC__bool ok = FLAC__stream_encoder_finish(mFlacStreamEncoder);
if (!ok) return C2_CORRUPTED;
- mIsFirstFrame = true;
- mAnchorTimeStamp = 0ull;
- mProcessedSamples = 0u;
return C2_OK;
}
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index f857e87..0f59d76 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -20,17 +20,14 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2Mapper.h>
#include <SimpleC2Interface.h>
#include <log/log.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
namespace android {
-namespace {
-
-constexpr uint8_t NEUTRAL_UV_VALUE = 128;
-
-} // namespace
// codecname set and passed in as a compile flag from Android.bp
constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -156,6 +153,42 @@
.withSetter(DefaultColorAspectsSetter)
.build());
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
// TODO: support more formats?
addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
.withConstValue(new C2StreamPixelFormatInfo::output(
@@ -218,6 +251,37 @@
return C2R::Ok();
}
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsTuning::output> &def,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+ ? def.v.primaries : coded.v.primaries;
+ me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+ ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ return C2R::Ok();
+ }
+
static C2R ProfileLevelSetter(
bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
const C2P<C2StreamPictureSizeInfo::output> &size) {
@@ -232,6 +296,10 @@
return mDefaultColorAspects;
}
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ return mColorAspects;
+ }
+
static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
C2P<C2StreamHdr10PlusInfo::input> &me) {
(void)mayBlock;
@@ -254,6 +322,8 @@
std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
};
@@ -371,6 +441,10 @@
const std::shared_ptr<C2GraphicBlock> &block) {
std::shared_ptr<C2Buffer> buffer =
createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
uint32_t flags = 0;
if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
@@ -465,148 +539,36 @@
}
}
-static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride,
- uint32_t width, uint32_t height,
- bool isMonochrome) {
+void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = buffer->color_primary;
+ vuiColorAspects.transfer = buffer->transfer_characteristics;
+ vuiColorAspects.coeffs = buffer->matrix_coefficients;
+ vuiColorAspects.fullRange = buffer->color_range;
- for (size_t i = 0; i < height; ++i) {
- memcpy(dstY, srcY, width);
- srcY += srcYStride;
- dstY += dstYStride;
- }
-
- if (isMonochrome) {
- // Fill with neutral U/V values.
- for (size_t i = 0; i < height / 2; ++i) {
- memset(dstV, NEUTRAL_UV_VALUE, width / 2);
- memset(dstU, NEUTRAL_UV_VALUE, width / 2);
- dstV += dstUVStride;
- dstU += dstUVStride;
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
}
- return;
- }
-
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstV, srcV, width / 2);
- srcV += srcVStride;
- dstV += dstUVStride;
- }
-
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstU, srcU, width / 2);
- srcU += srcUStride;
- dstU += dstUVStride;
- }
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY,
- const uint16_t *srcU,
- const uint16_t *srcV, size_t srcYStride,
- size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width,
- size_t height) {
- // Converting two lines at a time, slightly faster
- for (size_t y = 0; y < height; y += 2) {
- uint32_t *dstTop = (uint32_t *)dst;
- uint32_t *dstBot = (uint32_t *)(dst + dstStride);
- uint16_t *ySrcTop = (uint16_t *)srcY;
- uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
- uint16_t *uSrc = (uint16_t *)srcU;
- uint16_t *vSrc = (uint16_t *)srcV;
-
- uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
- size_t x = 0;
- for (; x < width - 3; x += 4) {
- u01 = *((uint32_t *)uSrc);
- uSrc += 2;
- v01 = *((uint32_t *)vSrc);
- vSrc += 2;
-
- y01 = *((uint32_t *)ySrcTop);
- ySrcTop += 2;
- y23 = *((uint32_t *)ySrcTop);
- ySrcTop += 2;
- y45 = *((uint32_t *)ySrcBot);
- ySrcBot += 2;
- y67 = *((uint32_t *)ySrcBot);
- ySrcBot += 2;
-
- uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
- uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
- *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
- *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
- *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
- *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
- *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
- *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
- *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
- *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
- }
-
- // There should be at most 2 more pixels to process. Note that we don't
- // need to consider odd case as the buffer is always aligned to even.
- if (x < width) {
- u01 = *uSrc;
- v01 = *vSrc;
- y01 = *((uint32_t *)ySrcTop);
- y45 = *((uint32_t *)ySrcBot);
- uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
- *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
- *dstTop++ = ((y01 >> 16) << 10) | uv0;
- *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
- *dstBot++ = ((y45 >> 16) << 10) | uv0;
- }
-
- srcY += srcYStride * 2;
- srcU += srcUStride;
- srcV += srcVStride;
- dst += dstStride * 2;
- }
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
- uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
- const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride,
- size_t width, size_t height, bool isMonochrome) {
-
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; ++x) {
- dstY[x] = (uint8_t)(srcY[x] >> 2);
- }
-
- srcY += srcYStride;
- dstY += dstYStride;
- }
-
- if (isMonochrome) {
- // Fill with neutral U/V values.
- for (size_t y = 0; y < (height + 1) / 2; ++y) {
- memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
- memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
- dstV += dstUVStride;
- dstU += dstUVStride;
- }
- return;
- }
-
- for (size_t y = 0; y < (height + 1) / 2; ++y) {
- for (size_t x = 0; x < (width + 1) / 2; ++x) {
- dstU[x] = (uint8_t)(srcU[x] >> 2);
- dstV[x] = (uint8_t)(srcV[x] >> 2);
- }
-
- srcU += srcUStride;
- srcV += srcVStride;
- dstU += dstUVStride;
- dstV += dstUVStride;
- }
}
bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
@@ -651,6 +613,7 @@
}
}
+ getVuiParams(buffer);
if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
buffer->image_format == libgav1::kImageFormatMonochrome400)) {
ALOGE("image_format %d not supported", buffer->image_format);
@@ -666,20 +629,22 @@
uint32_t format = HAL_PIXEL_FORMAT_YV12;
if (buffer->bitdepth == 10) {
IntfImpl::Lock lock = mIntf->lock();
- std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
- mIntf->getDefaultColorAspects_l();
-
- if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
- defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
- defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
- if (buffer->image_format != libgav1::kImageFormatYuv420) {
+ std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects =
+ mIntf->getColorAspects_l();
+ bool allowRGBA1010102 = false;
+ if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+ codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+ codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ allowRGBA1010102 = true;
+ }
+ format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+ if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
+ (buffer->image_format != libgav1::kImageFormatYuv420)) {
ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
- mSignalledError = true;
- work->result = C2_OMITTED;
- work->workletsProcessed = 1u;
- return false;
- }
- format = HAL_PIXEL_FORMAT_RGBA_1010102;
+ mSignalledError = true;
+ work->result = C2_OMITTED;
+ work->workletsProcessed = 1u;
+ return false;
}
}
C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
@@ -721,22 +686,24 @@
const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410(
- (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
- srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
+ convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
+ mWidth, mHeight);
+ } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+ srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
+ dstUVStride / 2, mWidth, mHeight, isMonochrome);
} else {
- convertYUV420Planar16ToYUV420Planar(
- dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
- srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
- isMonochrome);
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride, mWidth,
+ mHeight, isMonochrome);
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
- copyOutputBufferToYV12Frame(
- dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
- dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
+ convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+ srcVStride, dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
}
finishWork(buffer->user_private_data, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 555adc9..134fa0d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -17,7 +17,10 @@
#ifndef ANDROID_C2_SOFT_GAV1_DEC_H_
#define ANDROID_C2_SOFT_GAV1_DEC_H_
+#include <media/stagefright/foundation/ColorUtils.h>
+
#include <SimpleC2Component.h>
+#include <C2Config.h>
#include "libgav1/src/gav1/decoder.h"
#include "libgav1/src/gav1/decoder_settings.h"
@@ -56,10 +59,32 @@
bool mSignalledOutputEos;
bool mSignalledError;
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+ transfer(C2Color::TRANSFER_UNSPECIFIED),
+ coeffs(C2Color::MATRIX_UNSPECIFIED),
+ fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
+
struct timeval mTimeStart; // Time at the start of decode()
struct timeval mTimeEnd; // Time at the end of decode()
bool initDecoder();
+ void getVuiParams(const libgav1::DecoderBuffer *buffer);
void destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
const std::shared_ptr<C2GraphicBlock>& block);
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 6bcf3a2..5f5b2ef 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -502,7 +502,7 @@
status_t C2SoftHevcDec::initDecoder() {
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN32(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -768,20 +768,20 @@
return C2_CORRUPTED;
}
if (mOutBlock &&
- (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+ (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
mOutBlock.reset();
}
if (!mOutBlock) {
uint32_t format = HAL_PIXEL_FORMAT_YV12;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
- pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+ pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
return err;
}
ALOGV("provided (%dx%d) required (%dx%d)",
- mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+ mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
}
return C2_OK;
@@ -917,7 +917,7 @@
if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
+ setParams(ALIGN128(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
}
if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
mWidth = ps_decode_op->u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index b9b0a48..b9296e9 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -36,7 +36,7 @@
#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
#define ivdext_ctl_get_vui_params_ip_t ihevcd_cxa_ctl_get_vui_params_ip_t
#define ivdext_ctl_get_vui_params_op_t ihevcd_cxa_ctl_get_vui_params_op_t
-#define ALIGN32(x) ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
#define MAX_NUM_CORES 4
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 4bc1777..b7a5686 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -245,6 +245,19 @@
})
.withSetter(CodedColorAspectsSetter, mColorAspects)
.build());
+
+ addParameter(
+ DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+ .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+ {C2Config::picture_type_t(I_FRAME),
+ C2Config::picture_type_t(P_FRAME),
+ C2Config::picture_type_t(B_FRAME)}),
+ C2F(mPictureQuantization, m.values[0].min).any(),
+ C2F(mPictureQuantization, m.values[0].max).any()})
+ .withSetter(PictureQuantizationSetter)
+ .build());
}
static C2R InputDelaySetter(
@@ -464,9 +477,69 @@
me.set().matrix = coded.v.matrix;
return C2R::Ok();
}
+ static C2R PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output> &me) {
+ (void)mayBlock;
+
+ // these are the ones we're going to set, so want them to default
+ // to the DEFAULT values for the codec
+ int32_t iMin = HEVC_QP_MIN, pMin = HEVC_QP_MIN, bMin = HEVC_QP_MIN;
+ int32_t iMax = HEVC_QP_MAX, pMax = HEVC_QP_MAX, bMax = HEVC_QP_MAX;
+
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ // layerMin is clamped to [HEVC_QP_MIN, layerMax] to avoid error
+ // cases where layer.min > layer.max
+ int32_t layerMax = std::clamp(layer.max, HEVC_QP_MIN, HEVC_QP_MAX);
+ int32_t layerMin = std::clamp(layer.min, HEVC_QP_MIN, layerMax);
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layerMax;
+ iMin = layerMin;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layerMax;
+ pMin = layerMin;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+ bMax = layerMax;
+ bMin = layerMin;
+ ALOGV("bMin %d bMax %d", bMin, bMax);
+ }
+ }
+
+ ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
+ iMin, iMax, pMin, pMax, bMin, bMax);
+
+ int32_t maxFrameQP = std::min(std::min(iMax, pMax), bMax);
+ int32_t minFrameQP = std::max(std::max(iMin, pMin), bMin);
+ if (minFrameQP > maxFrameQP) {
+ minFrameQP = maxFrameQP;
+ }
+
+ // put them back into the structure
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+ layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+ layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ }
+
+ ALOGV("PictureQuantizationSetter(exit): i = p = b = %d-%d",
+ minFrameQP, maxFrameQP);
+
+ return C2R::Ok();
+ }
std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() {
return mCodedColorAspects;
}
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+ return mPictureQuantization;
+ }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -482,6 +555,7 @@
std::shared_ptr<C2StreamGopTuning::output> mGop;
std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
};
static size_t GetCPUCoreCount() {
@@ -654,12 +728,41 @@
mEncParams.s_coding_tools_prms.i4_max_temporal_layers = 3;
}
- switch (mBitrateMode->value) {
- case C2Config::BITRATE_IGNORE:
- mEncParams.s_config_prms.i4_rate_control_mode = 3;
- mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
- getQpFromQuality(mQuality->value);
+ // we resolved out-of-bound and unspecified values in PictureQuantizationSetter()
+ // so we can start with defaults that are overridden as needed.
+ int32_t maxFrameQP = mEncParams.s_config_prms.i4_max_frame_qp;
+ int32_t minFrameQP = mEncParams.s_config_prms.i4_min_frame_qp;
+
+ for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+
+ // no need to loop, hevc library takes same range for I/P/B picture type
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+ layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+ layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+
+ maxFrameQP = layer.max;
+ minFrameQP = layer.min;
break;
+ }
+ }
+ mEncParams.s_config_prms.i4_max_frame_qp = maxFrameQP;
+ mEncParams.s_config_prms.i4_min_frame_qp = minFrameQP;
+
+ ALOGV("MaxFrameQp: %d MinFrameQp: %d", maxFrameQP, minFrameQP);
+
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+ std::clamp(kDefaultInitQP, minFrameQP, maxFrameQP);
+
+ switch (mBitrateMode->value) {
+ case C2Config::BITRATE_IGNORE: {
+ mEncParams.s_config_prms.i4_rate_control_mode = 3;
+ // ensure initial qp values are within our newly configured bounds
+ int32_t frameQp = getQpFromQuality(mQuality->value);
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+ std::clamp(frameQp, minFrameQP, maxFrameQP);
+ break;
+ }
case C2Config::BITRATE_CONST:
mEncParams.s_config_prms.i4_rate_control_mode = 5;
break;
@@ -723,6 +826,7 @@
mGop = mIntf->getGop_l();
mRequestSync = mIntf->getRequestSync_l();
mColorAspects = mIntf->getCodedColorAspects_l();
+ mQpBounds = mIntf->getPictureQuantization_l();;
}
c2_status_t status = initEncParams();
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 9dbf682..4217a8b 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -42,6 +42,11 @@
#define DEFAULT_B_FRAMES 0
#define DEFAULT_RC_LOOKAHEAD 0
+#define HEVC_QP_MIN 1
+#define HEVC_QP_MAX 51
+
+constexpr int32_t kDefaultInitQP = 32;
+
struct C2SoftHevcEnc : public SimpleC2Component {
class IntfImpl;
@@ -90,6 +95,7 @@
std::shared_ptr<C2StreamGopTuning::output> mGop;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
#ifdef FILE_DUMP_ENABLE
char mInFile[200];
char mOutFile[200];
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index e394670..149c6ee 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -321,6 +321,13 @@
return C2_OK;
}
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
// TODO: Can overall error checking be improved? As in the check for validity of
// work, pool ptr, work->input.buffers.size() == 1, ...
// TODO: Blind removal of 529 samples from the output may not work. Because
@@ -486,17 +493,17 @@
}
}
- int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
- mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
- ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
- outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
- decodedSizes.clear();
- work->worklets.front()->output.flags = work->input.flags;
- work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(
- createLinearBuffer(block, outOffset, outSize - outOffset));
- work->worklets.front()->output.ordinal = work->input.ordinal;
- work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ fillEmptyWork(work);
+ if (samplingRate && numChannels) {
+ int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+ mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
+ ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
+ outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
+ decodedSizes.clear();
+ work->worklets.front()->output.buffers.push_back(
+ createLinearBuffer(block, outOffset, outSize - outOffset));
+ work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ }
if (eos) {
mSignalledOutputEos = true;
ALOGV("signalled EOS");
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index b1cf388..5f9b30b 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -572,7 +572,7 @@
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN32(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -845,20 +845,20 @@
return C2_CORRUPTED;
}
if (mOutBlock &&
- (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+ (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
mOutBlock.reset();
}
if (!mOutBlock) {
uint32_t format = HAL_PIXEL_FORMAT_YV12;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
- pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+ pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
return err;
}
ALOGV("provided (%dx%d) required (%dx%d)",
- mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+ mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
}
return C2_OK;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index fd66304a..8a29c14 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -37,7 +37,7 @@
#define ivdext_ctl_set_num_cores_op_t impeg2d_ctl_set_num_cores_op_t
#define ivdext_ctl_get_seq_info_ip_t impeg2d_ctl_get_seq_info_ip_t
#define ivdext_ctl_get_seq_info_op_t impeg2d_ctl_get_seq_info_op_t
-#define ALIGN32(x) ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
#define MAX_NUM_CORES 4
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index ddd312f..54a1d0e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -228,7 +228,6 @@
const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
mIntf(intfImpl),
- mDecHandle(nullptr),
mOutputBuffer{},
mInitialized(false) {
}
@@ -244,9 +243,7 @@
c2_status_t C2SoftMpeg4Dec::onStop() {
if (mInitialized) {
- if (mDecHandle) {
- PVCleanUpVideoDecoder(mDecHandle);
- }
+ PVCleanUpVideoDecoder(&mVideoDecControls);
mInitialized = false;
}
for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -269,28 +266,15 @@
}
void C2SoftMpeg4Dec::onRelease() {
- if (mInitialized) {
- if (mDecHandle) {
- PVCleanUpVideoDecoder(mDecHandle);
- delete mDecHandle;
- mDecHandle = nullptr;
- }
- mInitialized = false;
- }
+ (void)onStop();
if (mOutBlock) {
mOutBlock.reset();
}
- for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
- if (mOutputBuffer[i]) {
- free(mOutputBuffer[i]);
- mOutputBuffer[i] = nullptr;
- }
- }
}
c2_status_t C2SoftMpeg4Dec::onFlush_sm() {
if (mInitialized) {
- if (PV_TRUE != PVResetVideoDecoder(mDecHandle)) {
+ if (PV_TRUE != PVResetVideoDecoder(&mVideoDecControls)) {
return C2_CORRUPTED;
}
}
@@ -305,14 +289,8 @@
#else
mIsMpeg4 = false;
#endif
- if (!mDecHandle) {
- mDecHandle = new tagvideoDecControls;
- }
- if (!mDecHandle) {
- ALOGE("mDecHandle is null");
- return NO_MEMORY;
- }
- memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+
+ memset(&mVideoDecControls, 0, sizeof(tagvideoDecControls));
/* TODO: bring these values to 352 and 288. It cannot be done as of now
* because, h263 doesn't seem to allow port reconfiguration. In OMX, the
@@ -368,10 +346,6 @@
}
c2_status_t C2SoftMpeg4Dec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
- if (!mDecHandle) {
- ALOGE("not supposed to be here, invalid decoder context");
- return C2_CORRUPTED;
- }
mOutputBufferSize = align(mIntf->getMaxWidth(), 16) * align(mIntf->getMaxHeight(), 16) * 3 / 2;
for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -402,10 +376,10 @@
bool C2SoftMpeg4Dec::handleResChange(const std::unique_ptr<C2Work> &work) {
uint32_t disp_width, disp_height;
- PVGetVideoDimensions(mDecHandle, (int32 *)&disp_width, (int32 *)&disp_height);
+ PVGetVideoDimensions(&mVideoDecControls, (int32 *)&disp_width, (int32 *)&disp_height);
uint32_t buf_width, buf_height;
- PVGetBufferDimensions(mDecHandle, (int32 *)&buf_width, (int32 *)&buf_height);
+ PVGetBufferDimensions(&mVideoDecControls, (int32 *)&buf_width, (int32 *)&buf_height);
CHECK_LE(disp_width, buf_width);
CHECK_LE(disp_height, buf_height);
@@ -426,13 +400,14 @@
}
if (!mIsMpeg4) {
- PVCleanUpVideoDecoder(mDecHandle);
+ PVCleanUpVideoDecoder(&mVideoDecControls);
uint8_t *vol_data[1]{};
int32_t vol_size = 0;
if (!PVInitVideoDecoder(
- mDecHandle, vol_data, &vol_size, 1, mIntf->getMaxWidth(), mIntf->getMaxHeight(), H263_MODE)) {
+ &mVideoDecControls, vol_data, &vol_size, 1, mIntf->getMaxWidth(),
+ mIntf->getMaxHeight(), H263_MODE)) {
ALOGE("Error in PVInitVideoDecoder H263_MODE while resChanged was set to true");
mSignalledError = true;
work->result = C2_CORRUPTED;
@@ -444,40 +419,6 @@
return resChanged;
}
-/* TODO: can remove temporary copy after library supports writing to display
- * buffer Y, U and V plane pointers using stride info. */
-static void copyOutputBufferToYuvPlanarFrame(
- uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, uint8_t *src,
- size_t dstYStride, size_t dstUVStride,
- size_t srcYStride, uint32_t width,
- uint32_t height) {
- size_t srcUVStride = srcYStride / 2;
- uint8_t *srcStart = src;
-
- size_t vStride = align(height, 16);
- for (size_t i = 0; i < height; ++i) {
- memcpy(dstY, src, width);
- src += srcYStride;
- dstY += dstYStride;
- }
-
- /* U buffer */
- src = srcStart + vStride * srcYStride;
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstU, src, width / 2);
- src += srcUVStride;
- dstU += dstUVStride;
- }
-
- /* V buffer */
- src = srcStart + vStride * srcYStride * 5 / 4;
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstV, src, width / 2);
- src += srcUVStride;
- dstV += dstUVStride;
- }
-}
-
void C2SoftMpeg4Dec::process(
const std::unique_ptr<C2Work> &work,
const std::shared_ptr<C2BlockPool> &pool) {
@@ -522,7 +463,7 @@
uint32_t *start_code = (uint32_t *)bitstream;
bool volHeader = *start_code == 0xB0010000;
if (volHeader) {
- PVCleanUpVideoDecoder(mDecHandle);
+ PVCleanUpVideoDecoder(&mVideoDecControls);
mInitialized = false;
}
@@ -537,7 +478,7 @@
}
MP4DecodingMode mode = (mIsMpeg4) ? MPEG4_MODE : H263_MODE;
if (!PVInitVideoDecoder(
- mDecHandle, vol_data, &vol_size, 1,
+ &mVideoDecControls, vol_data, &vol_size, 1,
mIntf->getMaxWidth(), mIntf->getMaxHeight(), mode)) {
ALOGE("PVInitVideoDecoder failed. Unsupported content?");
mSignalledError = true;
@@ -545,7 +486,7 @@
return;
}
mInitialized = true;
- MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+ MP4DecodingMode actualMode = PVGetDecBitstreamMode(&mVideoDecControls);
if (mode != actualMode) {
ALOGE("Decoded mode not same as actual mode of the decoder");
mSignalledError = true;
@@ -553,7 +494,7 @@
return;
}
- PVSetPostProcType(mDecHandle, 0);
+ PVSetPostProcType(&mVideoDecControls, 0);
if (handleResChange(work)) {
ALOGI("Setting width and height");
C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
@@ -590,7 +531,7 @@
return;
}
- uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+ uint32_t yFrameSize = sizeof(uint8) * mVideoDecControls.size;
if (mOutputBufferSize < yFrameSize * 3 / 2){
ALOGE("Too small output buffer: %zu bytes", mOutputBufferSize);
mSignalledError = true;
@@ -599,7 +540,7 @@
}
if (!mFramesConfigured) {
- PVSetReferenceYUV(mDecHandle,mOutputBuffer[1]);
+ PVSetReferenceYUV(&mVideoDecControls,mOutputBuffer[1]);
mFramesConfigured = true;
}
@@ -610,7 +551,7 @@
uint8_t *bitstreamTmp = bitstream;
uint32_t timestamp = workIndex;
if (PVDecodeVopHeader(
- mDecHandle, &bitstreamTmp, ×tamp, &tmpInSize,
+ &mVideoDecControls, &bitstreamTmp, ×tamp, &tmpInSize,
&header_info, &useExtTimestamp,
mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
ALOGE("failed to decode vop header.");
@@ -642,7 +583,7 @@
continue;
}
- if (PVDecodeVopBody(mDecHandle, &tmpInSize) != PV_TRUE) {
+ if (PVDecodeVopBody(&mVideoDecControls, &tmpInSize) != PV_TRUE) {
ALOGE("failed to decode video frame.");
mSignalledError = true;
work->result = C2_CORRUPTED;
@@ -661,11 +602,17 @@
C2PlanarLayout layout = wView.layout();
size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
- (void)copyOutputBufferToYuvPlanarFrame(
- outputBufferY, outputBufferU, outputBufferV,
- mOutputBuffer[mNumSamplesOutput & 1],
- dstYStride, dstUVStride,
- align(mWidth, 16), mWidth, mHeight);
+ size_t srcYStride = align(mWidth, 16);
+ size_t srcUStride = srcYStride / 2;
+ size_t srcVStride = srcYStride / 2;
+ size_t vStride = align(mHeight, 16);
+ const uint8_t *srcY = (const uint8_t *)mOutputBuffer[mNumSamplesOutput & 1];
+ const uint8_t *srcU = (const uint8_t *)srcY + vStride * srcYStride;
+ const uint8_t *srcV = (const uint8_t *)srcY + vStride * srcYStride * 5 / 4;
+
+ convertYUV420Planar8ToYV12(outputBufferY, outputBufferU, outputBufferV, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride, dstUVStride,
+ mWidth, mHeight);
inPos += inSize - (size_t)tmpInSize;
finishWork(workIndex, work);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
index 716a095..fed04c9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
@@ -19,8 +19,8 @@
#include <SimpleC2Component.h>
+#include <mp4dec_api.h>
-struct tagvideoDecControls;
namespace android {
@@ -54,7 +54,7 @@
bool handleResChange(const std::unique_ptr<C2Work> &work);
std::shared_ptr<IntfImpl> mIntf;
- tagvideoDecControls *mDecHandle;
+ tagvideoDecControls mVideoDecControls;
std::shared_ptr<C2GraphicBlock> mOutBlock;
uint8_t *mOutputBuffer[kNumOutputBuffers];
size_t mOutputBufferSize;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 3c87531..3bfec66 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -436,16 +436,18 @@
}
++mNumInputFrames;
- std::unique_ptr<C2StreamInitDataInfo::output> csd =
- C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
- if (!csd) {
- ALOGE("CSD allocation failed");
- mSignalledError = true;
- work->result = C2_NO_MEMORY;
- return;
+ if (outputSize) {
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, outPtr, outputSize);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
}
- memcpy(csd->m.value, outPtr, outputSize);
- work->worklets.front()->output.configUpdate.push_back(std::move(csd));
}
// handle dynamic bitrate change
diff --git a/media/codec2/components/tests/Android.bp b/media/codec2/components/tests/Android.bp
index 3c68eee..be2abf2 100644
--- a/media/codec2/components/tests/Android.bp
+++ b/media/codec2/components/tests/Android.bp
@@ -9,44 +9,13 @@
cc_defaults {
name: "C2SoftCodecTest-defaults",
+ defaults: [ "libcodec2-static-defaults" ],
gtest: true,
host_supported: false,
srcs: [
"C2SoftCodecTest.cpp",
],
- static_libs: [
- "liblog",
- "libion",
- "libfmq",
- "libbase",
- "libutils",
- "libcutils",
- "libcodec2",
- "libhidlbase",
- "libdmabufheap",
- "libcodec2_vndk",
- "libnativewindow",
- "libcodec2_soft_common",
- "libsfplugin_ccodec_utils",
- "libstagefright_foundation",
- "libstagefright_bufferpool@2.0.1",
- "android.hardware.graphics.mapper@2.0",
- "android.hardware.graphics.mapper@3.0",
- "android.hardware.media.bufferpool@2.0",
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.allocator@3.0",
- "android.hardware.graphics.bufferqueue@2.0",
- ],
-
- shared_libs: [
- "libui",
- "libdl",
- "libhardware",
- "libvndksupport",
- "libprocessgroup",
- ],
-
cflags: [
"-Wall",
"-Werror",
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 2953d90..5fc89be 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -25,6 +25,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
#include <SimpleC2Interface.h>
#include "C2SoftVpxDec.h"
@@ -149,8 +150,16 @@
#else
addParameter(
DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
- .withConstValue(new C2StreamProfileLevelInfo::input(0u,
- C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED))
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_VP8_0, C2Config::LEVEL_UNUSED))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ PROFILE_VP8_0
+ ),
+ C2F(mProfileLevel, level).equalTo(
+ LEVEL_UNUSED),
+ })
+ .withSetter(ProfileLevelSetter, mSize)
.build());
#endif
@@ -630,125 +639,6 @@
}
}
-static void copyOutputBufferToYuvPlanarFrame(
- uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride,
- uint32_t width, uint32_t height) {
-
- for (size_t i = 0; i < height; ++i) {
- memcpy(dstY, srcY, width);
- srcY += srcYStride;
- dstY += dstYStride;
- }
-
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstV, srcV, width / 2);
- srcV += srcVStride;
- dstV += dstUVStride;
- }
-
- for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dstU, srcU, width / 2);
- srcU += srcUStride;
- dstU += dstUVStride;
- }
-
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst,
- const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width, size_t height) {
-
- // Converting two lines at a time, slightly faster
- for (size_t y = 0; y < height; y += 2) {
- uint32_t *dstTop = (uint32_t *) dst;
- uint32_t *dstBot = (uint32_t *) (dst + dstStride);
- uint16_t *ySrcTop = (uint16_t*) srcY;
- uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
- uint16_t *uSrc = (uint16_t*) srcU;
- uint16_t *vSrc = (uint16_t*) srcV;
-
- uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
- size_t x = 0;
- for (; x < width - 3; x += 4) {
-
- u01 = *((uint32_t*)uSrc); uSrc += 2;
- v01 = *((uint32_t*)vSrc); vSrc += 2;
-
- y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
- y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
- y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
- y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-
- uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
- uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
- *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
- *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
- *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
- *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
- *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
- *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
- *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
- *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
- }
-
- // There should be at most 2 more pixels to process. Note that we don't
- // need to consider odd case as the buffer is always aligned to even.
- if (x < width) {
- u01 = *uSrc;
- v01 = *vSrc;
- y01 = *((uint32_t*)ySrcTop);
- y45 = *((uint32_t*)ySrcBot);
- uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
- *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
- *dstTop++ = ((y01 >> 16) << 10) | uv0;
- *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
- *dstBot++ = ((y45 >> 16) << 10) | uv0;
- }
-
- srcY += srcYStride * 2;
- srcU += srcUStride;
- srcV += srcVStride;
- dst += dstStride * 2;
- }
-
- return;
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
- uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
- const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
- size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride,
- size_t width, size_t height) {
-
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; ++x) {
- dstY[x] = (uint8_t)(srcY[x] >> 2);
- }
-
- srcY += srcYStride;
- dstY += dstYStride;
- }
-
- for (size_t y = 0; y < (height + 1) / 2; ++y) {
- for (size_t x = 0; x < (width + 1) / 2; ++x) {
- dstU[x] = (uint8_t)(srcU[x] >> 2);
- dstV[x] = (uint8_t)(srcV[x] >> 2);
- }
-
- srcU += srcUStride;
- srcV += srcVStride;
- dstU += dstUVStride;
- dstV += dstUVStride;
- }
- return;
-}
status_t C2SoftVpxDec::outputBuffer(
const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work)
@@ -792,12 +682,13 @@
if (img->fmt == VPX_IMG_FMT_I42016) {
IntfImpl::Lock lock = mIntf->lock();
std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
-
+ bool allowRGBA1010102 = false;
if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
- format = HAL_PIXEL_FORMAT_RGBA_1010102;
+ allowRGBA1010102 = true;
}
+ format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
}
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
@@ -859,24 +750,22 @@
queue->cond.signal();
queue.waitForCondition(queue->cond);
}
+ } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+ srcYStride / 2, srcUStride / 2, srcVStride / 2,
+ dstYStride / 2, dstUVStride / 2, mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+ mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
- copyOutputBufferToYuvPlanarFrame(
- dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+ srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
}
finishWork(((c2_cntr64_t *)img->user_priv)->peekull(), work, std::move(block));
return OK;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7486d27..617769b 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -31,6 +31,255 @@
namespace android {
+C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_ENCODER,
+ C2Component::DOMAIN_VIDEO,
+ MEDIA_MIMETYPE_VIDEO) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(
+ C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(
+ DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(
+ 0u, C2Config::BITRATE_VARIABLE))
+ .withFields({
+ C2F(mBitrateMode, value).oneOf({
+ C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
+ })
+ .withSetter(
+ Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ // TODO: More restriction?
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(
+ Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
+ .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
+ .withFields({
+ C2F(mLayering, m.layerCount).inRange(0, 4),
+ C2F(mLayering, m.bLayerCount).inRange(0, 0),
+ C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
+ })
+ .withSetter(LayeringSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+ .withConstValue(new C2StreamIntraRefreshTuning::output(
+ 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+ .build());
+#ifdef VP9
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ PROFILE_VP9_0
+ ),
+ C2F(mProfileLevel, level).equalTo(
+ LEVEL_VP9_4_1),
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+#else
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_VP8_0, LEVEL_UNUSED))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ PROFILE_VP8_0
+ ),
+ C2F(mProfileLevel, level).equalTo(
+ LEVEL_UNUSED),
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+#endif
+ addParameter(
+ DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter, mColorAspects)
+ .build());
+}
+
+C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value < 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+}
+
+C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
+ const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+ C2P<C2StreamPictureSizeInfo::input>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+}
+
+C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output>& me) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_VP9_0;
+ }
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ me.set().level = LEVEL_VP9_4_1;
+ }
+ return C2R::Ok();
+}
+
+C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
+ C2P<C2StreamTemporalLayeringTuning::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.m.layerCount > 4) {
+ me.set().m.layerCount = 4;
+ }
+ me.set().m.bLayerCount = 0;
+ // ensure ratios are monotonic and clamped between 0 and 1
+ for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
+ me.set().m.bitrateRatios[ix] = c2_clamp(
+ ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
+ }
+ ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
+ return res;
+}
+
+uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
+ if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+}
+C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
+ C2P<C2StreamColorAspectsInfo::input>& me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+}
+C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
+ bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+ const C2P<C2StreamColorAspectsInfo::input>& coded) {
+ (void)mayBlock;
+ me.set().range = coded.v.range;
+ me.set().primaries = coded.v.primaries;
+ me.set().transfer = coded.v.transfer;
+ me.set().matrix = coded.v.matrix;
+ return C2R::Ok();
+}
+
#if 0
static size_t getCpuCoreCount() {
long cpuCoreCount = 1;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index c98b802..e296c8f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -237,245 +237,38 @@
class C2SoftVpxEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
- explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
- : SimpleInterface<void>::BaseParams(
- helper,
- COMPONENT_NAME,
- C2Component::KIND_ENCODER,
- C2Component::DOMAIN_VIDEO,
- MEDIA_MIMETYPE_VIDEO) {
- noPrivateBuffers(); // TODO: account for our buffers here
- noInputReferences();
- noOutputReferences();
- noInputLatency();
- noTimeStretch();
- setDerivedInstance(this);
-
- addParameter(
- DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
- .withConstValue(new C2ComponentAttributesSetting(
- C2Component::ATTRIB_IS_TEMPORAL))
- .build());
-
- addParameter(
- DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
- .withConstValue(new C2StreamUsageTuning::input(
- 0u, (uint64_t)C2MemoryUsage::CPU_READ))
- .build());
-
- addParameter(
- DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
- .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
- .withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
- })
- .withSetter(SizeSetter)
- .build());
-
- addParameter(
- DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
- .withDefault(new C2StreamBitrateModeTuning::output(
- 0u, C2Config::BITRATE_VARIABLE))
- .withFields({
- C2F(mBitrateMode, value).oneOf({
- C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
- })
- .withSetter(
- Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
- .build());
-
- addParameter(
- DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
- .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
- // TODO: More restriction?
- .withFields({C2F(mFrameRate, value).greaterThan(0.)})
- .withSetter(
- Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
- .build());
-
- addParameter(
- DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
- .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
- .withFields({
- C2F(mLayering, m.layerCount).inRange(0, 4),
- C2F(mLayering, m.bLayerCount).inRange(0, 0),
- C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
- })
- .withSetter(LayeringSetter)
- .build());
-
- addParameter(
- DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
- .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
- .withFields({C2F(mSyncFramePeriod, value).any()})
- .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
- .build());
-
- addParameter(
- DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
- .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
- .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
- .withSetter(BitrateSetter)
- .build());
-
- addParameter(
- DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
- .withConstValue(new C2StreamIntraRefreshTuning::output(
- 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
- .build());
-
- addParameter(
- DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
- .withDefault(new C2StreamProfileLevelInfo::output(
- 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
- .withFields({
- C2F(mProfileLevel, profile).equalTo(
- PROFILE_VP9_0
- ),
- C2F(mProfileLevel, level).equalTo(
- LEVEL_VP9_4_1),
- })
- .withSetter(ProfileLevelSetter)
- .build());
-
- addParameter(
- DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
- .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
- .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
- .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
- .build());
-
- addParameter(
- DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
- .withDefault(new C2StreamColorAspectsInfo::input(
- 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
- C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
- .withFields({
- C2F(mColorAspects, range).inRange(
- C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
- C2F(mColorAspects, primaries).inRange(
- C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
- C2F(mColorAspects, transfer).inRange(
- C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
- C2F(mColorAspects, matrix).inRange(
- C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
- })
- .withSetter(ColorAspectsSetter)
- .build());
-
- addParameter(
- DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
- .withDefault(new C2StreamColorAspectsInfo::output(
- 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
- C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
- .withFields({
- C2F(mCodedColorAspects, range).inRange(
- C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
- C2F(mCodedColorAspects, primaries).inRange(
- C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
- C2F(mCodedColorAspects, transfer).inRange(
- C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
- C2F(mCodedColorAspects, matrix).inRange(
- C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
- })
- .withSetter(CodedColorAspectsSetter, mColorAspects)
- .build());
- }
-
- static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
- (void)mayBlock;
- C2R res = C2R::Ok();
- if (me.v.value <= 4096) {
- me.set().value = 4096;
- }
- return res;
- }
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper);
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me);
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
- C2P<C2StreamPictureSizeInfo::input> &me) {
- (void)mayBlock;
- C2R res = C2R::Ok();
- if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
- res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
- me.set().width = oldMe.v.width;
- }
- if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
- res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
- me.set().height = oldMe.v.height;
- }
- return res;
- }
+ C2P<C2StreamPictureSizeInfo::input> &me);
static C2R ProfileLevelSetter(
bool mayBlock,
- C2P<C2StreamProfileLevelInfo::output> &me) {
- (void)mayBlock;
- if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
- me.set().profile = PROFILE_VP9_0;
- }
- if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
- me.set().level = LEVEL_VP9_4_1;
- }
- return C2R::Ok();
- }
+ C2P<C2StreamProfileLevelInfo::output> &me);
- static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me) {
- (void)mayBlock;
- C2R res = C2R::Ok();
- if (me.v.m.layerCount > 4) {
- me.set().m.layerCount = 4;
- }
- me.set().m.bLayerCount = 0;
- // ensure ratios are monotonic and clamped between 0 and 1
- for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
- me.set().m.bitrateRatios[ix] = c2_clamp(
- ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
- }
- ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
- return res;
- }
+ static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
// unsafe getters
std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
- std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
+ return mIntraRefresh;
+ }
std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
- std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const { return mBitrateMode; }
- std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
- std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const { return mLayering; }
- uint32_t getSyncFramePeriod() const {
- if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
- return 0;
- }
- double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
- return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+ return mBitrateMode;
}
- static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
- (void)mayBlock;
- if (me.v.range > C2Color::RANGE_OTHER) {
- me.set().range = C2Color::RANGE_OTHER;
- }
- if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
- me.set().primaries = C2Color::PRIMARIES_OTHER;
- }
- if (me.v.transfer > C2Color::TRANSFER_OTHER) {
- me.set().transfer = C2Color::TRANSFER_OTHER;
- }
- if (me.v.matrix > C2Color::MATRIX_OTHER) {
- me.set().matrix = C2Color::MATRIX_OTHER;
- }
- return C2R::Ok();
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+ return mRequestSync;
}
+ std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const {
+ return mLayering;
+ }
+ uint32_t getSyncFramePeriod() const;
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me);
static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
- const C2P<C2StreamColorAspectsInfo::input> &coded) {
- (void)mayBlock;
- me.set().range = coded.v.range;
- me.set().primaries = coded.v.primaries;
- me.set().transfer = coded.v.transfer;
- me.set().matrix = coded.v.matrix;
- return C2R::Ok();
- }
+ const C2P<C2StreamColorAspectsInfo::input> &coded);
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 2cc7ab7..70e742c 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -59,6 +59,7 @@
enum drc_compression_mode_t : int32_t; ///< DRC compression mode
enum drc_effect_type_t : int32_t; ///< DRC effect type
enum drc_album_mode_t : int32_t; ///< DRC album mode
+ enum hdr_dynamic_metadata_type_t : uint32_t; ///< HDR dynamic metadata type
enum intra_refresh_mode_t : uint32_t; ///< intra refresh modes
enum level_t : uint32_t; ///< coding level
enum ordinal_key_t : uint32_t; ///< work ordering keys
@@ -189,10 +190,13 @@
kParamIndexPictureTypeMask,
kParamIndexPictureType,
+ // deprecated
kParamIndexHdr10PlusMetadata,
kParamIndexPictureQuantization,
+ kParamIndexHdrDynamicMetadata,
+
/* ------------------------------------ video components ------------------------------------ */
kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -270,6 +274,9 @@
// encoding quality requirements
kParamIndexEncodingQualityLevel, // encoders, enum
+
+ // encoding statistics, average block qp of a frame
+ kParamIndexAverageBlockQuantization, // int32
};
}
@@ -680,6 +687,9 @@
LEVEL_DV_MAIN_UHD_30, ///< Dolby Vision main tier uhd30
LEVEL_DV_MAIN_UHD_48, ///< Dolby Vision main tier uhd48
LEVEL_DV_MAIN_UHD_60, ///< Dolby Vision main tier uhd60
+ LEVEL_DV_MAIN_UHD_120, ///< Dolby Vision main tier uhd120
+ LEVEL_DV_MAIN_8K_30, ///< Dolby Vision main tier 8k30
+ LEVEL_DV_MAIN_8K_60, ///< Dolby Vision main tier 8k60
LEVEL_DV_HIGH_HD_24 = _C2_PL_DV_BASE + 0x100, ///< Dolby Vision high tier hd24
LEVEL_DV_HIGH_HD_30, ///< Dolby Vision high tier hd30
@@ -690,6 +700,9 @@
LEVEL_DV_HIGH_UHD_30, ///< Dolby Vision high tier uhd30
LEVEL_DV_HIGH_UHD_48, ///< Dolby Vision high tier uhd48
LEVEL_DV_HIGH_UHD_60, ///< Dolby Vision high tier uhd60
+ LEVEL_DV_HIGH_UHD_120, ///< Dolby Vision high tier uhd120
+ LEVEL_DV_HIGH_8K_30, ///< Dolby Vision high tier 8k30
+ LEVEL_DV_HIGH_8K_60, ///< Dolby Vision high tier 8k60
// AV1 levels
LEVEL_AV1_2 = _C2_PL_AV1_BASE , ///< AV1 Level 2
@@ -1602,16 +1615,54 @@
C2FIELD(maxFall, "max-fall")
};
typedef C2StreamParam<C2Info, C2HdrStaticMetadataStruct, kParamIndexHdrStaticMetadata>
- C2StreamHdrStaticInfo;
+ C2StreamHdrStaticMetadataInfo;
+typedef C2StreamParam<C2Info, C2HdrStaticMetadataStruct, kParamIndexHdrStaticMetadata>
+ C2StreamHdrStaticInfo; // deprecated
constexpr char C2_PARAMKEY_HDR_STATIC_INFO[] = "raw.hdr-static-info";
/**
* HDR10+ Metadata Info.
+ *
+ * Deprecated. Use C2StreamHdrDynamicMetadataInfo with
+ * HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40
*/
typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexHdr10PlusMetadata>
- C2StreamHdr10PlusInfo;
-constexpr char C2_PARAMKEY_INPUT_HDR10_PLUS_INFO[] = "input.hdr10-plus-info";
-constexpr char C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO[] = "output.hdr10-plus-info";
+ C2StreamHdr10PlusInfo; // deprecated
+constexpr char C2_PARAMKEY_INPUT_HDR10_PLUS_INFO[] = "input.hdr10-plus-info"; // deprecated
+constexpr char C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO[] = "output.hdr10-plus-info"; // deprecated
+
+/**
+ * HDR dynamic metadata types
+ */
+C2ENUM(C2Config::hdr_dynamic_metadata_type_t, uint32_t,
+ HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10, ///< SMPTE ST 2094-10
+ HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40, ///< SMPTE ST 2094-40
+)
+
+struct C2HdrDynamicMetadataStruct {
+ inline C2HdrDynamicMetadataStruct() { memset(this, 0, sizeof(*this)); }
+
+ inline C2HdrDynamicMetadataStruct(
+ size_t flexCount, C2Config::hdr_dynamic_metadata_type_t type)
+ : type_(type) {
+ memset(data, 0, flexCount);
+ }
+
+ C2Config::hdr_dynamic_metadata_type_t type_;
+ uint8_t data[];
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(HdrDynamicMetadata, data)
+ C2FIELD(type_, "type")
+ C2FIELD(data, "data")
+};
+
+/**
+ * Dynamic HDR Metadata Info.
+ */
+typedef C2StreamParam<C2Info, C2HdrDynamicMetadataStruct, kParamIndexHdrDynamicMetadata>
+ C2StreamHdrDynamicMetadataInfo;
+constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
+constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
/* ------------------------------------ block-based coding ----------------------------------- */
@@ -1673,7 +1724,7 @@
SYNC_FRAME = (1 << 0), ///< sync frame, e.g. IDR
I_FRAME = (1 << 1), ///< intra frame that is completely encoded
P_FRAME = (1 << 2), ///< inter predicted frame from previous frames
- B_FRAME = (1 << 3), ///< backward predicted (out-of-order) frame
+ B_FRAME = (1 << 3), ///< bidirectional predicted (out-of-order) frame
)
/**
@@ -2411,6 +2462,17 @@
S_HANDHELD = 1 // corresponds to VMAF=70
);
+/**
+ * Video Encoding Statistics Export
+ */
+
+/**
+ * Average block QP exported from video encoder.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<int32_t>, kParamIndexAverageBlockQuantization>
+ C2AndroidStreamAverageBlockQuantizationInfo;
+constexpr char C2_PARAMKEY_AVERAGE_QP[] = "coded.average-qp";
+
/// @}
#endif // C2CONFIG_H_
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index bd1fac6..3adc212 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -28,43 +28,12 @@
cc_defaults {
name: "C2Fuzzer-defaults",
+ defaults: [ "libcodec2-static-defaults" ],
+
srcs: [
"C2Fuzzer.cpp",
],
- static_libs: [
- "liblog",
- "libion",
- "libfmq",
- "libbase",
- "libutils",
- "libcutils",
- "libcodec2",
- "libhidlbase",
- "libdmabufheap",
- "libcodec2_vndk",
- "libnativewindow",
- "libcodec2_soft_common",
- "libsfplugin_ccodec_utils",
- "libstagefright_foundation",
- "libstagefright_bufferpool@2.0.1",
- "android.hardware.graphics.mapper@2.0",
- "android.hardware.graphics.mapper@3.0",
- "android.hardware.media.bufferpool@2.0",
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.allocator@3.0",
- "android.hardware.graphics.bufferqueue@2.0",
- ],
-
- shared_libs: [
- "libui",
- "libdl",
- "libbinder",
- "libhardware",
- "libvndksupport",
- "libprocessgroup",
- ],
-
cflags: [
"-Wall",
"-Werror",
diff --git a/media/codec2/fuzzer/C2Fuzzer.cpp b/media/codec2/fuzzer/C2Fuzzer.cpp
index 51e1013..e469d8b 100644
--- a/media/codec2/fuzzer/C2Fuzzer.cpp
+++ b/media/codec2/fuzzer/C2Fuzzer.cpp
@@ -194,12 +194,12 @@
}
std::vector<C2Param*> configParams;
+ C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
if (domain.value == DOMAIN_VIDEO) {
- C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
configParams.push_back(&inputSize);
} else if (domain.value == DOMAIN_AUDIO) {
- C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
- C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
configParams.push_back(&sampleRateInfo);
configParams.push_back(&channelCountInfo);
}
@@ -239,17 +239,17 @@
}
void Codec2Fuzzer::decodeFrames(const uint8_t* data, size_t size) {
- mBufferSource = new BufferSource(data, size);
- if (!mBufferSource) {
+ std::unique_ptr<BufferSource> bufferSource = std::make_unique<BufferSource>(data, size);
+ if (!bufferSource) {
return;
}
- mBufferSource->parse();
+ bufferSource->parse();
c2_status_t status = C2_OK;
size_t numFrames = 0;
- while (!mBufferSource->isEos()) {
+ while (!bufferSource->isEos()) {
uint8_t* frame = nullptr;
size_t frameSize = 0;
- FrameData frameData = mBufferSource->getFrame();
+ FrameData frameData = bufferSource->getFrame();
frame = std::get<0>(frameData);
frameSize = std::get<1>(frameData);
@@ -298,7 +298,6 @@
mConditionalVariable.wait_for(waitForDecodeComplete, kC2FuzzerTimeOut, [this] { return mEos; });
std::list<std::unique_ptr<C2Work>> c2flushedWorks;
mComponent->flush_sm(C2Component::FLUSH_COMPONENT, &c2flushedWorks);
- delete mBufferSource;
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
diff --git a/media/codec2/fuzzer/C2Fuzzer.h b/media/codec2/fuzzer/C2Fuzzer.h
index d5ac81a..da76885 100644
--- a/media/codec2/fuzzer/C2Fuzzer.h
+++ b/media/codec2/fuzzer/C2Fuzzer.h
@@ -104,7 +104,6 @@
static constexpr size_t kMarkerSuffixSize = 3;
};
- BufferSource* mBufferSource;
bool mEos = false;
C2BlockPool::local_id_t mBlockPoolId;
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
deleted file mode 120000
index 136279c..0000000
--- a/media/codec2/hidl/1.0/vts/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/OWNERS b/media/codec2/hidl/1.0/vts/OWNERS
index dbe89cf..32b11b8 100644
--- a/media/codec2/hidl/1.0/vts/OWNERS
+++ b/media/codec2/hidl/1.0/vts/OWNERS
@@ -1,8 +1,5 @@
+# Bug component: 25690
# Media team
lajos@google.com
-pawin@google.com
taklee@google.com
wonsik@google.com
-
-# VTS team
-dshi@google.com
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 9e3a823..d47ef67 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -465,6 +465,11 @@
if (mMime.find("raw") != std::string::npos) {
bitStreamInfo[0] = 8000;
bitStreamInfo[1] = 1;
+ } else if ((mMime.find("g711-alaw") != std::string::npos) ||
+ (mMime.find("g711-mlaw") != std::string::npos)) {
+ // g711 test data is all 1-channel and has no embedded config info.
+ bitStreamInfo[0] = 8000;
+ bitStreamInfo[1] = 1;
} else {
ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
}
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ffec897..275a721 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -61,6 +61,7 @@
public:
virtual void SetUp() override {
getParams();
+ mDisableTest = false;
mEos = false;
mClient = android::Codec2Client::CreateFromService(mInstanceName.c_str());
ASSERT_NE(mClient, nullptr);
@@ -73,6 +74,14 @@
for (int i = 0; i < MAX_INPUT_BUFFERS; ++i) {
mWorkQueue.emplace_back(new C2Work);
}
+
+ C2SecureModeTuning secureModeTuning{};
+ mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
+ if (secureModeTuning.value != C2Config::SM_UNPROTECTED) {
+ mDisableTest = true;
+ }
+
+ if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
virtual void TearDown() override {
@@ -105,6 +114,7 @@
std::string mInstanceName;
std::string mComponentName;
bool mEos;
+ bool mDisableTest;
std::mutex mQueueLock;
std::condition_variable mQueueCondition;
std::list<std::unique_ptr<C2Work>> mWorkQueue;
@@ -324,6 +334,7 @@
};
TEST_P(Codec2ComponentInputTests, InputBufferTest) {
+ if (mDisableTest) GTEST_SKIP() << "Test is disabled";
description("Tests for different inputs");
uint32_t flags = std::get<2>(GetParam());
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 70c63f2..d5124fd 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -49,6 +49,11 @@
std::weak_ptr<FilterWrapper> filterWrapper)
: mIntf(intf), mFilterWrapper(filterWrapper) {
takeFilters(std::move(filters));
+ for (size_t i = 0; i < mFilters.size(); ++i) {
+ mControlParamTypes.insert(
+ mFilters[i].desc.controlParams.begin(),
+ mFilters[i].desc.controlParams.end());
+ }
}
~WrappedDecoderInterface() override = default;
@@ -187,7 +192,12 @@
}
std::vector<C2Param *> stackParamsForIntf;
- std::copy_n(stackParamsList.begin(), stackParamsList.size(), stackParamsForIntf.begin());
+ for (C2Param *param : stackParamsList) {
+ if (mControlParamTypes.count(param->type()) != 0) {
+ continue;
+ }
+ stackParamsForIntf.push_back(param);
+ }
// Gather heap params that did not get queried from the filter interfaces above.
// These need to be queried from the decoder interface.
@@ -197,6 +207,9 @@
if (mTypeToIndexForQuery.find(type) != mTypeToIndexForQuery.end()) {
continue;
}
+ if (mControlParamTypes.count(type) != 0) {
+ continue;
+ }
heapParamIndicesForIntf.push_back(heapParamIndices[j]);
}
@@ -251,11 +264,14 @@
std::vector<C2Param *> paramsForFilter;
for (C2Param* param : params) {
auto it = mTypeToIndexForConfig.find(param->type().type());
- if (it != mTypeToIndexForConfig.end() && it->second != i) {
+ if (it == mTypeToIndexForConfig.end() || it->second != i) {
continue;
}
paramsForFilter.push_back(param);
}
+ if (paramsForFilter.empty()) {
+ continue;
+ }
c2_status_t err = filter->config_vb(paramsForFilter, mayBlock, &filterFailures);
if (err != C2_OK) {
LOG(err == C2_BAD_INDEX ? VERBOSE : WARNING)
@@ -356,6 +372,7 @@
std::weak_ptr<FilterWrapper> mFilterWrapper;
std::map<uint32_t, size_t> mTypeToIndexForQuery;
std::map<uint32_t, size_t> mTypeToIndexForConfig;
+ std::set<C2Param::Type> mControlParamTypes;
c2_status_t transferParams_l(
const std::shared_ptr<C2ComponentInterface> &curr,
@@ -430,6 +447,10 @@
LOG(DEBUG) << "WrappedDecoderInterface: FilterWrapper not found";
return C2_OK;
}
+ if (!filterWrapper->isFilteringEnabled(next)) {
+ LOG(VERBOSE) << "WrappedDecoderInterface: filtering not enabled";
+ return C2_OK;
+ }
std::vector<std::unique_ptr<C2Param>> params;
c2_status_t err = filterWrapper->queryParamsForPreviousComponent(next, ¶ms);
if (err != C2_OK) {
@@ -594,6 +615,8 @@
}
}
mRunningFilters.clear();
+ std::vector<FilterWrapper::Component> filters(mFilters);
+ mIntf->takeFilters(std::move(filters));
return result;
}
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index bb9f51f..b36e80a 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -52,7 +52,7 @@
// minijail is used to protect against unexpected system calls.
shared_libs: [
- "libavservices_minijail_vendor",
+ "libavservices_minijail",
"libbinder",
],
required: ["android.hardware.media.c2@1.2-default-seccomp_policy"],
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
index 03f6e3d..12da593 100644
--- a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
+++ b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
@@ -3,5 +3,5 @@
user mediacodec
group camera mediadrm drmrpc
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 2bc748f..feeddb5 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -42,12 +42,14 @@
"android.hardware.drm@1.0",
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
+ "android.hardware.graphics.mapper@4.0",
"libbase",
"libbinder",
"libcodec2",
"libcodec2_client",
"libcodec2_vndk",
"libcutils",
+ "libgralloctypes",
"libgui",
"libhidlallocatorutils",
"libhidlbase",
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index c049187..ed7d69c 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -42,6 +42,7 @@
#include "utils/Codec2Mapper.h"
#include "C2OMXNode.h"
+#include "Codec2Buffer.h"
namespace android {
@@ -466,6 +467,18 @@
new Buffer2D(block->share(
C2Rect(block->width(), block->height()), ::C2Fence())));
work->input.buffers.push_back(c2Buffer);
+ std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+ GetHdrMetadataFromGralloc4Handle(
+ block->handle(),
+ &staticInfo,
+ &dynamicInfo);
+ if (staticInfo && *staticInfo) {
+ c2Buffer->setInfo(staticInfo);
+ }
+ if (dynamicInfo && *dynamicInfo) {
+ c2Buffer->setInfo(dynamicInfo);
+ }
}
work->worklets.clear();
work->worklets.emplace_back(new C2Worklet);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 82460c9..5df28f0 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1018,29 +1018,31 @@
} else {
pixelFormatInfo = nullptr;
}
- std::optional<uint32_t> flexPixelFormat{};
- std::optional<uint32_t> flexPlanarPixelFormat{};
- std::optional<uint32_t> flexSemiPlanarPixelFormat{};
+ // bit depth -> format
+ std::map<uint32_t, uint32_t> flexPixelFormat;
+ std::map<uint32_t, uint32_t> flexPlanarPixelFormat;
+ std::map<uint32_t, uint32_t> flexSemiPlanarPixelFormat;
if (pixelFormatInfo && *pixelFormatInfo) {
for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
const C2FlexiblePixelFormatDescriptorStruct &desc =
pixelFormatInfo->m.values[i];
- if (desc.bitDepth != 8
- || desc.subsampling != C2Color::YUV_420
+ if (desc.subsampling != C2Color::YUV_420
// TODO(b/180076105): some device report wrong layout
// || desc.layout == C2Color::INTERLEAVED_PACKED
// || desc.layout == C2Color::INTERLEAVED_ALIGNED
|| desc.layout == C2Color::UNKNOWN_LAYOUT) {
continue;
}
- if (!flexPixelFormat) {
- flexPixelFormat = desc.pixelFormat;
+ if (flexPixelFormat.count(desc.bitDepth) == 0) {
+ flexPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
}
- if (desc.layout == C2Color::PLANAR_PACKED && !flexPlanarPixelFormat) {
- flexPlanarPixelFormat = desc.pixelFormat;
+ if (desc.layout == C2Color::PLANAR_PACKED
+ && flexPlanarPixelFormat.count(desc.bitDepth) == 0) {
+ flexPlanarPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
}
- if (desc.layout == C2Color::SEMIPLANAR_PACKED && !flexSemiPlanarPixelFormat) {
- flexSemiPlanarPixelFormat = desc.pixelFormat;
+ if (desc.layout == C2Color::SEMIPLANAR_PACKED
+ && flexSemiPlanarPixelFormat.count(desc.bitDepth) == 0) {
+ flexSemiPlanarPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
}
}
}
@@ -1050,7 +1052,7 @@
if (!(config->mDomain & Config::IS_ENCODER)) {
if (surface == nullptr) {
const char *prefix = "";
- if (flexSemiPlanarPixelFormat) {
+ if (flexSemiPlanarPixelFormat.count(8) != 0) {
format = COLOR_FormatYUV420SemiPlanar;
prefix = "semi-";
} else {
@@ -1067,17 +1069,34 @@
if ((config->mDomain & Config::IS_ENCODER) || !surface) {
switch (format) {
case COLOR_FormatYUV420Flexible:
- format = flexPixelFormat.value_or(COLOR_FormatYUV420Planar);
+ format = COLOR_FormatYUV420Planar;
+ if (flexPixelFormat.count(8) != 0) {
+ format = flexPixelFormat[8];
+ }
break;
case COLOR_FormatYUV420Planar:
case COLOR_FormatYUV420PackedPlanar:
- format = flexPlanarPixelFormat.value_or(
- flexPixelFormat.value_or(format));
+ if (flexPlanarPixelFormat.count(8) != 0) {
+ format = flexPlanarPixelFormat[8];
+ } else if (flexPixelFormat.count(8) != 0) {
+ format = flexPixelFormat[8];
+ }
break;
case COLOR_FormatYUV420SemiPlanar:
case COLOR_FormatYUV420PackedSemiPlanar:
- format = flexSemiPlanarPixelFormat.value_or(
- flexPixelFormat.value_or(format));
+ if (flexSemiPlanarPixelFormat.count(8) != 0) {
+ format = flexSemiPlanarPixelFormat[8];
+ } else if (flexPixelFormat.count(8) != 0) {
+ format = flexPixelFormat[8];
+ }
+ break;
+ case COLOR_FormatYUVP010:
+ format = COLOR_FormatYUVP010;
+ if (flexSemiPlanarPixelFormat.count(10) != 0) {
+ format = flexSemiPlanarPixelFormat[10];
+ } else if (flexPixelFormat.count(10) != 0) {
+ format = flexPixelFormat[10];
+ }
break;
default:
// No-op
@@ -1213,11 +1232,25 @@
std::initializer_list<C2Param::Index> indices {
colorAspectsRequestIndex.withStream(0u),
};
- c2_status_t c2err = comp->query(
- { &usage, &maxInputSize, &prepend },
- indices,
- C2_DONT_BLOCK,
- ¶ms);
+ int32_t colorTransferRequest = 0;
+ if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
+ && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
+ colorTransferRequest = 0;
+ }
+ c2_status_t c2err = C2_OK;
+ if (colorTransferRequest != 0) {
+ c2err = comp->query(
+ { &usage, &maxInputSize, &prepend },
+ indices,
+ C2_DONT_BLOCK,
+ ¶ms);
+ } else {
+ c2err = comp->query(
+ { &usage, &maxInputSize, &prepend },
+ {},
+ C2_DONT_BLOCK,
+ ¶ms);
+ }
if (c2err != C2_OK && c2err != C2_BAD_INDEX) {
ALOGE("Failed to query component interface: %d", c2err);
return UNKNOWN_ERROR;
@@ -1332,8 +1365,8 @@
}
}
- // set channel-mask
if (config->mDomain & Config::IS_AUDIO) {
+ // set channel-mask
int32_t mask;
if (msg->findInt32(KEY_CHANNEL_MASK, &mask)) {
if (config->mDomain & Config::IS_ENCODER) {
@@ -1342,6 +1375,15 @@
config->mOutputFormat->setInt32(KEY_CHANNEL_MASK, mask);
}
}
+
+ // set PCM encoding
+ int32_t pcmEncoding = kAudioEncodingPcm16bit;
+ msg->findInt32(KEY_PCM_ENCODING, &pcmEncoding);
+ if (encoder) {
+ config->mInputFormat->setInt32("android._config-pcm-encoding", pcmEncoding);
+ } else {
+ config->mOutputFormat->setInt32("android._config-pcm-encoding", pcmEncoding);
+ }
}
std::unique_ptr<C2Param> colorTransferRequestParam;
@@ -1351,11 +1393,6 @@
colorTransferRequestParam = std::move(param);
}
}
- int32_t colorTransferRequest = 0;
- if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
- && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
- colorTransferRequest = 0;
- }
if (colorTransferRequest != 0) {
if (colorTransferRequestParam && *colorTransferRequestParam) {
@@ -1421,6 +1458,31 @@
}
}
+ if (config->mTunneled) {
+ config->mOutputFormat->setInt32("android._tunneled", 1);
+ }
+
+ // Convert an encoding statistics level to corresponding encoding statistics
+ // kinds
+ int32_t encodingStatisticsLevel = VIDEO_ENCODING_STATISTICS_LEVEL_NONE;
+ if ((config->mDomain & Config::IS_ENCODER)
+ && (config->mDomain & Config::IS_VIDEO)
+ && msg->findInt32(KEY_VIDEO_ENCODING_STATISTICS_LEVEL, &encodingStatisticsLevel)) {
+ // Higher level include all the enc stats belong to lower level.
+ switch (encodingStatisticsLevel) {
+ // case VIDEO_ENCODING_STATISTICS_LEVEL_2: // reserved for the future level 2
+ // with more enc stat kinds
+ // Future extended encoding statistics for the level 2 should be added here
+ case VIDEO_ENCODING_STATISTICS_LEVEL_1:
+ config->subscribeToConfigUpdate(comp,
+ {kParamIndexAverageBlockQuantization, kParamIndexPictureType});
+ break;
+ case VIDEO_ENCODING_STATISTICS_LEVEL_NONE:
+ break;
+ }
+ }
+ ALOGD("encoding statistics level = %d", encodingStatisticsLevel);
+
ALOGD("setup formats input: %s",
config->mInputFormat->debugString().c_str());
ALOGD("setup formats output: %s",
@@ -1896,9 +1958,11 @@
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
+ sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+ status_t err = OK;
+
if (config->mTunneled && config->mSidebandHandle != nullptr) {
- sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
- status_t err = native_window_set_sideband_stream(
+ err = native_window_set_sideband_stream(
nativeWindow.get(),
const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
if (err != OK) {
@@ -1906,6 +1970,15 @@
nativeWindow.get(), config->mSidebandHandle->handle(), err);
return err;
}
+ } else {
+ // Explicitly reset the sideband handle of the window for
+ // non-tunneled video in case the window was previously used
+ // for a tunneled video playback.
+ err = native_window_set_sideband_stream(nativeWindow.get(), nullptr);
+ if (err != OK) {
+ ALOGE("native_window_set_sideband_stream(nullptr) failed! (err %d).", err);
+ return err;
+ }
}
}
return mChannel->setSurface(surface);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6f7b7f7..99aa593 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -254,7 +254,7 @@
bool released = input->buffers->releaseBuffer(buffer, nullptr, true);
ALOGV("[%s] queueInputBuffer: buffer copied; %sreleased",
mName, released ? "" : "not ");
- buffer.clear();
+ buffer = copy;
} else {
ALOGW("[%s] queueInputBuffer: failed to copy a buffer; this may cause input "
"buffer starvation on component.", mName);
@@ -282,6 +282,12 @@
}
}
} else if (eos) {
+ Mutexed<Input>::Locked input(mInput);
+ if (input->frameReassembler) {
+ usesFrameReassembler = true;
+ // drain any pending items with eos
+ input->frameReassembler.process(buffer, &items);
+ }
flags |= C2FrameData::FLAG_END_OF_STREAM;
}
if (usesFrameReassembler) {
@@ -341,10 +347,10 @@
} else {
Mutexed<Input>::Locked input(mInput);
bool released = false;
- if (buffer) {
- released = input->buffers->releaseBuffer(buffer, nullptr, true);
- } else if (copy) {
+ if (copy) {
released = input->extraBuffers.releaseSlot(copy, nullptr, true);
+ } else if (buffer) {
+ released = input->buffers->releaseBuffer(buffer, nullptr, true);
}
ALOGV("[%s] queueInputBuffer: buffer%s %sreleased",
mName, (buffer == nullptr) ? "(copy)" : "", released ? "" : "not ");
@@ -834,6 +840,35 @@
hdr10PlusInfo.reset();
}
+ // HDR dynamic info
+ std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> hdrDynamicInfo =
+ std::static_pointer_cast<const C2StreamHdrDynamicMetadataInfo::output>(
+ c2Buffer->getInfo(C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE));
+ // TODO: make this sticky & enable unset
+ if (hdrDynamicInfo && hdrDynamicInfo->flexCount() == 0) {
+ hdrDynamicInfo.reset();
+ }
+
+ if (hdr10PlusInfo) {
+ // C2StreamHdr10PlusInfo is deprecated; components should use
+ // C2StreamHdrDynamicMetadataInfo
+ // TODO: #metric
+ if (hdrDynamicInfo) {
+ // It is unexpected that C2StreamHdr10PlusInfo and
+ // C2StreamHdrDynamicMetadataInfo is both present.
+ // C2StreamHdrDynamicMetadataInfo takes priority.
+ // TODO: #metric
+ } else {
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::output> info =
+ C2StreamHdrDynamicMetadataInfo::output::AllocShared(
+ hdr10PlusInfo->flexCount(),
+ 0u,
+ C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+ memcpy(info->m.data, hdr10PlusInfo->m.value, hdr10PlusInfo->flexCount());
+ hdrDynamicInfo = info;
+ }
+ }
+
std::vector<C2ConstGraphicBlock> blocks = c2Buffer->data().graphicBlocks();
if (blocks.size() != 1u) {
ALOGD("[%s] expected 1 graphic block, but got %zu", mName, blocks.size());
@@ -853,7 +888,7 @@
videoScalingMode,
transform,
Fence::NO_FENCE, 0);
- if (hdrStaticInfo || hdr10PlusInfo) {
+ if (hdrStaticInfo || hdrDynamicInfo) {
HdrMetadata hdr;
if (hdrStaticInfo) {
// If mastering max and min luminance fields are 0, do not use them.
@@ -890,13 +925,16 @@
hdr.cta8613 = cta861_meta;
}
}
- if (hdr10PlusInfo) {
+ if (hdrDynamicInfo
+ && hdrDynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
hdr.validTypes |= HdrMetadata::HDR10PLUS;
hdr.hdr10plus.assign(
- hdr10PlusInfo->m.value,
- hdr10PlusInfo->m.value + hdr10PlusInfo->flexCount());
+ hdrDynamicInfo->m.data,
+ hdrDynamicInfo->m.data + hdrDynamicInfo->flexCount());
}
qbi.setHdrMetadata(hdr);
+
+ SetHdrMetadataToGralloc4Handle(hdrStaticInfo, hdrDynamicInfo, block.handle());
}
// we don't have dirty regions
qbi.setSurfaceDamage(Region::INVALID_REGION);
@@ -1382,6 +1420,12 @@
}
}
}
+
+ int32_t tunneled = 0;
+ if (!outputFormat->findInt32("android._tunneled", &tunneled)) {
+ tunneled = 0;
+ }
+ mTunneled = (tunneled != 0);
}
// Set up pipeline control. This has to be done after mInputBuffers and
@@ -1899,10 +1943,21 @@
}
}
+ bool drop = false;
+ if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+ ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+ drop = true;
+ }
+
if (notifyClient && !buffer && !flags) {
- ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
- mName, work->input.ordinal.frameIndex.peekull());
- notifyClient = false;
+ if (mTunneled && drop && outputFormat) {
+ ALOGV("[%s] onWorkDone: Keep tunneled, drop frame with format change (%lld)",
+ mName, work->input.ordinal.frameIndex.peekull());
+ } else {
+ ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
+ mName, work->input.ordinal.frameIndex.peekull());
+ notifyClient = false;
+ }
}
if (buffer) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 4db69cb..26eef30 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -324,6 +324,8 @@
return mCrypto != nullptr || mDescrambler != nullptr;
}
std::atomic_bool mSendEncryptedInfoBuffer;
+
+ std::atomic_bool mTunneled;
};
// Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 333a2ca..97e1a01 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -21,6 +21,7 @@
#include <C2PlatformSupport.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/MediaDefs.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
@@ -132,6 +133,7 @@
if (!copy->copy(c2buffer)) {
return nullptr;
}
+ copy->meta()->extend(buffer->meta());
return copy;
}
@@ -199,6 +201,56 @@
mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
}
+bool OutputBuffers::convert(
+ const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst) {
+ if (!src || src->data().type() != C2BufferData::LINEAR) {
+ return false;
+ }
+ int32_t configEncoding = kAudioEncodingPcm16bit;
+ int32_t codecEncoding = kAudioEncodingPcm16bit;
+ if (mFormat->findInt32("android._codec-pcm-encoding", &codecEncoding)
+ && mFormat->findInt32("android._config-pcm-encoding", &configEncoding)) {
+ if (mSrcEncoding != codecEncoding || mDstEncoding != configEncoding) {
+ if (codecEncoding != configEncoding) {
+ mDataConverter = AudioConverter::Create(
+ (AudioEncoding)codecEncoding, (AudioEncoding)configEncoding);
+ ALOGD_IF(mDataConverter, "[%s] Converter created from %d to %d",
+ mName, codecEncoding, configEncoding);
+ mFormatWithConverter = mFormat->dup();
+ mFormatWithConverter->setInt32(KEY_PCM_ENCODING, configEncoding);
+ } else {
+ mDataConverter = nullptr;
+ mFormatWithConverter = nullptr;
+ }
+ mSrcEncoding = codecEncoding;
+ mDstEncoding = configEncoding;
+ }
+ if (int encoding; !mFormat->findInt32(KEY_PCM_ENCODING, &encoding)
+ || encoding != mDstEncoding) {
+ }
+ }
+ if (!mDataConverter) {
+ return false;
+ }
+ sp<MediaCodecBuffer> srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+ if (!srcBuffer) {
+ return false;
+ }
+ if (!*dst) {
+ *dst = new Codec2Buffer(
+ mFormat,
+ new ABuffer(mDataConverter->targetSize(srcBuffer->size())));
+ }
+ sp<MediaCodecBuffer> dstBuffer = *dst;
+ status_t err = mDataConverter->convert(srcBuffer, dstBuffer);
+ if (err != OK) {
+ ALOGD("[%s] buffer conversion failed: %d", mName, err);
+ return false;
+ }
+ dstBuffer->setFormat(mFormatWithConverter);
+ return true;
+}
+
void OutputBuffers::clearStash() {
mPending.clear();
mReorderStash.clear();
@@ -1078,7 +1130,7 @@
return err;
}
c2Buffer->setFormat(mFormat);
- if (!c2Buffer->copy(buffer)) {
+ if (!convert(buffer, &c2Buffer) && !c2Buffer->copy(buffer)) {
ALOGD("[%s] copy buffer failed", mName);
return WOULD_BLOCK;
}
@@ -1194,9 +1246,12 @@
const std::shared_ptr<C2Buffer> &buffer,
size_t *index,
sp<MediaCodecBuffer> *clientBuffer) {
- sp<Codec2Buffer> newBuffer = wrap(buffer);
- if (newBuffer == nullptr) {
- return NO_MEMORY;
+ sp<Codec2Buffer> newBuffer;
+ if (!convert(buffer, &newBuffer)) {
+ newBuffer = wrap(buffer);
+ if (newBuffer == nullptr) {
+ return NO_MEMORY;
+ }
}
newBuffer->setFormat(mFormat);
*index = mImpl.assignSlot(newBuffer);
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 995d3a4..c8e9930 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -18,9 +18,11 @@
#define CCODEC_BUFFERS_H_
+#include <optional>
#include <string>
#include <C2Config.h>
+#include <DataConverter.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/MediaCodecBuffer.h>
@@ -382,6 +384,14 @@
*/
void submit(const sp<MediaCodecBuffer> &buffer);
+ /**
+ * Apply DataConverter from |src| to |*dst| if needed. If |*dst| is nullptr,
+ * a new buffer is allocated.
+ *
+ * Returns true if conversion was needed and executed; false otherwise.
+ */
+ bool convert(const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst);
+
private:
// SkipCutBuffer
int32_t mDelay;
@@ -391,6 +401,12 @@
void setSkipCutBuffer(int32_t skip, int32_t cut);
+ // DataConverter
+ sp<DataConverter> mDataConverter;
+ sp<AMessage> mFormatWithConverter;
+ std::optional<int32_t> mSrcEncoding;
+ std::optional<int32_t> mDstEncoding;
+
// Output stash
// Struct for an entry in the output stash (mPending and mReorderStash)
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 09e24be..dd37c4b 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -657,24 +657,29 @@
add(ConfigMapper(KEY_SAMPLE_RATE, C2_PARAMKEY_CODED_SAMPLE_RATE, "value")
.limitTo(D::AUDIO & D::CODED));
- add(ConfigMapper(KEY_PCM_ENCODING, C2_PARAMKEY_PCM_ENCODING, "value")
+ auto pcmEncodingMapper = [](C2Value v) -> C2Value {
+ int32_t value;
+ C2Config::pcm_encoding_t to;
+ if (v.get(&value) && C2Mapper::map(value, &to)) {
+ return to;
+ }
+ return C2Value();
+ };
+ auto pcmEncodingReverse = [](C2Value v) -> C2Value {
+ C2Config::pcm_encoding_t value;
+ int32_t to;
+ using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(value)>::type;
+ if (v.get((C2ValueType*)&value) && C2Mapper::map(value, &to)) {
+ return to;
+ }
+ return C2Value();
+ };
+ add(ConfigMapper(KEY_PCM_ENCODING, C2_PARAMKEY_PCM_ENCODING, "value")
.limitTo(D::AUDIO)
- .withMappers([](C2Value v) -> C2Value {
- int32_t value;
- C2Config::pcm_encoding_t to;
- if (v.get(&value) && C2Mapper::map(value, &to)) {
- return to;
- }
- return C2Value();
- }, [](C2Value v) -> C2Value {
- C2Config::pcm_encoding_t value;
- int32_t to;
- using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(value)>::type;
- if (v.get((C2ValueType*)&value) && C2Mapper::map(value, &to)) {
- return to;
- }
- return C2Value();
- }));
+ .withMappers(pcmEncodingMapper, pcmEncodingReverse));
+ add(ConfigMapper("android._codec-pcm-encoding", C2_PARAMKEY_PCM_ENCODING, "value")
+ .limitTo(D::AUDIO & D::READ)
+ .withMappers(pcmEncodingMapper, pcmEncodingReverse));
add(ConfigMapper(KEY_IS_ADTS, C2_PARAMKEY_AAC_PACKAGING, "value")
.limitTo(D::AUDIO & D::CODED)
@@ -951,6 +956,12 @@
return value == 0 ? C2_FALSE : C2_TRUE;
}));
+ add(ConfigMapper(KEY_VIDEO_QP_AVERAGE, C2_PARAMKEY_AVERAGE_QP, "value")
+ .limitTo(D::ENCODER & D::VIDEO & D::READ));
+
+ add(ConfigMapper(KEY_PICTURE_TYPE, C2_PARAMKEY_PICTURE_TYPE, "value")
+ .limitTo(D::ENCODER & D::VIDEO & D::READ));
+
/* still to do
constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 417b773..88e6239 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -363,11 +363,6 @@
const std::vector<std::string> &names,
c2_blocking_t blocking = C2_DONT_BLOCK);
-private:
-
- /// initializes the standard MediaCodec to Codec 2.0 params mapping
- void initializeStandardParams();
-
/// Adds indices to the subscribed indices, and updated subscription to component
/// \param blocking blocking mode to use with the component
status_t subscribeToConfigUpdate(
@@ -375,6 +370,11 @@
const std::vector<C2Param::Index> &indices,
c2_blocking_t blocking = C2_DONT_BLOCK);
+private:
+
+ /// initializes the standard MediaCodec to Codec 2.0 params mapping
+ void initializeStandardParams();
+
/// Gets SDK format from codec 2.0 reflected configuration
/// \param domain input/output bitmask
sp<AMessage> getFormatForDomain(
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 4070478..2d3c70a 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -18,9 +18,14 @@
#define LOG_TAG "Codec2Buffer"
#include <utils/Log.h>
+#include <aidl/android/hardware/graphics/common/Cta861_3.h>
+#include <aidl/android/hardware/graphics/common/Smpte2086.h>
#include <android-base/properties.h>
#include <android/hardware/cas/native/1.0/types.h>
#include <android/hardware/drm/1.0/types.h>
+#include <android/hardware/graphics/common/1.2/types.h>
+#include <android/hardware/graphics/mapper/4.0/IMapper.h>
+#include <gralloctypes/Gralloc4.h>
#include <hidlmemory/FrameworkUtils.h>
#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/CodecBase.h>
@@ -358,21 +363,22 @@
break;
case COLOR_FormatYUVP010:
+ // stride is in bytes
mediaImage->mPlane[mediaImage->Y].mOffset = 0;
mediaImage->mPlane[mediaImage->Y].mColInc = 2;
- mediaImage->mPlane[mediaImage->Y].mRowInc = stride * 2;
+ mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
- mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride * 2;
+ mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
mediaImage->mPlane[mediaImage->U].mColInc = 4;
- mediaImage->mPlane[mediaImage->U].mRowInc = stride * 2;
+ mediaImage->mPlane[mediaImage->U].mRowInc = stride;
mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
- mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 2 + 2;
+ mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
mediaImage->mPlane[mediaImage->V].mColInc = 4;
- mediaImage->mPlane[mediaImage->V].mRowInc = stride * 2;
+ mediaImage->mPlane[mediaImage->V].mRowInc = stride;
mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
if (tryWrapping) {
@@ -533,8 +539,8 @@
mInitCheck = BAD_VALUE;
return;
}
- bufferSize += stride * vStride
- / plane.rowSampling / plane.colSampling * divUp(mAllocatedDepth, 8u);
+ // stride is in bytes
+ bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
}
mBackBufferSize = bufferSize;
@@ -787,8 +793,14 @@
ALOGD("format had no width / height");
return nullptr;
}
- // NOTE: we currently only support YUV420 formats for byte-buffer mode.
- sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * 3 / 2));
+ int32_t colorFormat = COLOR_FormatYUV420Flexible;
+ int32_t bpp = 12; // 8(Y) + 2(U) + 2(V)
+ if (format->findInt32(KEY_COLOR_FORMAT, &colorFormat)) {
+ if (colorFormat == COLOR_FormatYUVP010) {
+ bpp = 24; // 16(Y) + 4(U) + 4(V)
+ }
+ }
+ sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
return new ConstGraphicBlockBuffer(
format,
aBuffer,
@@ -941,4 +953,218 @@
return const_cast<native_handle_t *>(mBlock->handle());
}
+using ::aidl::android::hardware::graphics::common::Cta861_3;
+using ::aidl::android::hardware::graphics::common::Smpte2086;
+
+using ::android::gralloc4::MetadataType_Cta861_3;
+using ::android::gralloc4::MetadataType_Smpte2086;
+using ::android::gralloc4::MetadataType_Smpte2094_40;
+
+using ::android::hardware::Return;
+using ::android::hardware::hidl_vec;
+
+using Error4 = ::android::hardware::graphics::mapper::V4_0::Error;
+using IMapper4 = ::android::hardware::graphics::mapper::V4_0::IMapper;
+
+namespace {
+
+sp<IMapper4> GetMapper4() {
+ static sp<IMapper4> sMapper = IMapper4::getService();
+ return sMapper;
+}
+
+class NativeHandleDeleter {
+public:
+ explicit NativeHandleDeleter(native_handle_t *handle) : mHandle(handle) {}
+ ~NativeHandleDeleter() {
+ if (mHandle) {
+ native_handle_delete(mHandle);
+ }
+ }
+private:
+ native_handle_t *mHandle;
+};
+
+} // namspace
+
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+ const C2Handle *const handle,
+ std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
+ c2_status_t err = C2_OK;
+ native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
+ if (nativeHandle == nullptr) {
+ // Nothing to do
+ return err;
+ }
+ // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+ // does not clone the fds. Thus we need to delete the handle
+ // without closing it when going out of scope.
+ // NativeHandle cannot solve this problem, as it would close and
+ // delete the handle, while we need delete only.
+ NativeHandleDeleter nhd(nativeHandle);
+ sp<IMapper4> mapper = GetMapper4();
+ if (!mapper) {
+ // Gralloc4 not supported; nothing to do
+ return err;
+ }
+ Error4 mapperErr = Error4::NONE;
+ if (staticInfo) {
+ staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
+ memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
+ (*staticInfo)->maxCll = 0;
+ (*staticInfo)->maxFall = 0;
+ IMapper4::get_cb cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+ mapperErr = err;
+ if (err != Error4::NONE) {
+ return;
+ }
+
+ std::optional<Smpte2086> smpte2086;
+ gralloc4::decodeSmpte2086(vec, &smpte2086);
+ if (smpte2086) {
+ (*staticInfo)->mastering.red.x = smpte2086->primaryRed.x;
+ (*staticInfo)->mastering.red.y = smpte2086->primaryRed.y;
+ (*staticInfo)->mastering.green.x = smpte2086->primaryGreen.x;
+ (*staticInfo)->mastering.green.y = smpte2086->primaryGreen.y;
+ (*staticInfo)->mastering.blue.x = smpte2086->primaryBlue.x;
+ (*staticInfo)->mastering.blue.y = smpte2086->primaryBlue.y;
+ (*staticInfo)->mastering.white.x = smpte2086->whitePoint.x;
+ (*staticInfo)->mastering.white.y = smpte2086->whitePoint.y;
+
+ (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
+ (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
+ } else {
+ mapperErr = Error4::BAD_VALUE;
+ }
+ };
+ Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2086, cb);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (mapperErr != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
+ cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+ mapperErr = err;
+ if (err != Error4::NONE) {
+ return;
+ }
+
+ std::optional<Cta861_3> cta861_3;
+ gralloc4::decodeCta861_3(vec, &cta861_3);
+ if (cta861_3) {
+ (*staticInfo)->maxCll = cta861_3->maxContentLightLevel;
+ (*staticInfo)->maxFall = cta861_3->maxFrameAverageLightLevel;
+ } else {
+ mapperErr = Error4::BAD_VALUE;
+ }
+ };
+ ret = mapper->get(nativeHandle, MetadataType_Cta861_3, cb);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (mapperErr != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
+ }
+ if (dynamicInfo) {
+ dynamicInfo->reset();
+ IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+ mapperErr = err;
+ if (err != Error4::NONE) {
+ return;
+ }
+ if (!dynamicInfo) {
+ return;
+ }
+ *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
+ vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+ memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
+ };
+ Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2094_40, cb);
+ if (!ret.isOk() || mapperErr != Error4::NONE) {
+ dynamicInfo->reset();
+ }
+ }
+
+ return err;
+}
+
+c2_status_t SetHdrMetadataToGralloc4Handle(
+ const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+ const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+ const C2Handle *const handle) {
+ c2_status_t err = C2_OK;
+ native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
+ if (nativeHandle == nullptr) {
+ // Nothing to do
+ return err;
+ }
+ // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+ // does not clone the fds. Thus we need to delete the handle
+ // without closing it when going out of scope.
+ NativeHandleDeleter nhd(nativeHandle);
+ sp<IMapper4> mapper = GetMapper4();
+ if (!mapper) {
+ // Gralloc4 not supported; nothing to do
+ return err;
+ }
+ if (staticInfo && *staticInfo) {
+ std::optional<Smpte2086> smpte2086 = Smpte2086{
+ {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
+ {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
+ {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
+ {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
+ staticInfo->mastering.maxLuminance,
+ staticInfo->mastering.minLuminance,
+ };
+ hidl_vec<uint8_t> vec;
+ if (gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
+ Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Smpte2086, vec);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (ret != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
+ }
+ std::optional<Cta861_3> cta861_3 = Cta861_3{
+ staticInfo->maxCll,
+ staticInfo->maxFall,
+ };
+ if (gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
+ Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Cta861_3, vec);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (ret != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
+ }
+ }
+ if (dynamicInfo && *dynamicInfo) {
+ hidl_vec<uint8_t> vec;
+ vec.resize(dynamicInfo->flexCount());
+ memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+ std::optional<IMapper4::MetadataType> metadataType;
+ switch (dynamicInfo->m.type_) {
+ case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
+ // TODO
+ break;
+ case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40:
+ metadataType = MetadataType_Smpte2094_40;
+ break;
+ }
+ if (metadataType) {
+ Return<Error4> ret = mapper->set(nativeHandle, *metadataType, vec);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (ret != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
+ } else {
+ err = C2_BAD_VALUE;
+ }
+ }
+
+ return err;
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index dc788cd..b02b042 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -19,6 +19,7 @@
#define CODEC2_BUFFER_H_
#include <C2Buffer.h>
+#include <C2Config.h>
#include <binder/IMemory.h>
#include <media/hardware/VideoAPI.h>
@@ -391,6 +392,36 @@
int32_t mHeapSeqNum;
};
+/**
+ * Get HDR metadata from Gralloc4 handle.
+ *
+ * \param[in] handle handle of the allocation
+ * \param[out] staticInfo HDR static info to be filled. Ignored if null;
+ * if |handle| is invalid or does not contain the metadata,
+ * the shared_ptr is reset.
+ * \param[out] dynamicInfo HDR dynamic info to be filled. Ignored if null;
+ * if |handle| is invalid or does not contain the metadata,
+ * the shared_ptr is reset.
+ * \return C2_OK if successful
+ */
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+ const C2Handle *const handle,
+ std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
+
+/**
+ * Set HDR metadata to Gralloc4 handle.
+ *
+ * \param[in] staticInfo HDR static info to set. Ignored if null or invalid.
+ * \param[in] dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
+ * \param[out] handle handle of the allocation.
+ * \return C2_OK if successful
+ */
+c2_status_t SetHdrMetadataToGralloc4Handle(
+ const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+ const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+ const C2Handle *const handle);
+
} // namespace android
#endif // CODEC2_BUFFER_H_
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 7c4bfb6..63bd64b 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -54,6 +54,9 @@
using Traits = C2Component::Traits;
+// HAL pixel format -> framework color format
+typedef std::map<uint32_t, int32_t> PixelFormatMap;
+
namespace /* unnamed */ {
bool hasPrefix(const std::string& s, const char* prefix) {
@@ -67,6 +70,26 @@
s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
}
+std::optional<int32_t> findFrameworkColorFormat(
+ const C2FlexiblePixelFormatDescriptorStruct &desc) {
+ switch (desc.bitDepth) {
+ case 8u:
+ if (desc.layout == C2Color::PLANAR_PACKED
+ || desc.layout == C2Color::SEMIPLANAR_PACKED) {
+ return COLOR_FormatYUV420Flexible;
+ }
+ break;
+ case 10u:
+ if (desc.layout == C2Color::SEMIPLANAR_PACKED) {
+ return COLOR_FormatYUVP010;
+ }
+ break;
+ default:
+ break;
+ }
+ return std::nullopt;
+}
+
// returns true if component advertised supported profile level(s)
bool addSupportedProfileLevels(
std::shared_ptr<Codec2Client::Interface> intf,
@@ -96,9 +119,12 @@
return false;
}
- // determine if codec supports HDR
+ // determine if codec supports HDR; imply 10-bit support
bool supportsHdr = false;
+ // determine if codec supports HDR10Plus; imply 10-bit support
bool supportsHdr10Plus = false;
+ // determine if codec supports 10-bit format
+ bool supports10Bit = false;
std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
c2_status_t err1 = intf->querySupportedParams(¶mDescs);
@@ -126,6 +152,10 @@
supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
+ // HDR support implies 10-bit support.
+ // TODO: directly check this from the component interface
+ supports10Bit = (supportsHdr || supportsHdr10Plus);
+
bool added = false;
for (C2Value::Primitive profile : profileQuery[0].values.values) {
@@ -165,6 +195,12 @@
}
}
}
+ if (supports10Bit) {
+ auto bitnessMapper = C2Mapper::GetBitDepthProfileLevelMapper(trait.mediaType, 10);
+ if (bitnessMapper && bitnessMapper->mapProfile(pl.profile, &sdkProfile)) {
+ caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
+ }
+ }
} else if (!mapper) {
caps->addProfileLevel(pl.profile, pl.level);
}
@@ -198,27 +234,69 @@
void addSupportedColorFormats(
std::shared_ptr<Codec2Client::Interface> intf,
MediaCodecInfo::CapabilitiesWriter *caps,
- const Traits& trait, const std::string &mediaType) {
- (void)intf;
-
+ const Traits& trait, const std::string &mediaType,
+ const PixelFormatMap &pixelFormatMap) {
// TODO: get this from intf() as well, but how do we map them to
// MediaCodec color formats?
bool encoder = trait.kind == C2Component::KIND_ENCODER;
if (mediaType.find("video") != std::string::npos
|| mediaType.find("image") != std::string::npos) {
+
+ std::vector<C2FieldSupportedValuesQuery> query;
+ if (encoder) {
+ C2StreamPixelFormatInfo::input pixelFormat;
+ query.push_back(C2FieldSupportedValuesQuery::Possible(
+ C2ParamField::Make(pixelFormat, pixelFormat.value)));
+ } else {
+ C2StreamPixelFormatInfo::output pixelFormat;
+ query.push_back(C2FieldSupportedValuesQuery::Possible(
+ C2ParamField::Make(pixelFormat, pixelFormat.value)));
+ }
+ std::list<int32_t> supportedColorFormats;
+ if (intf->querySupportedValues(query, C2_DONT_BLOCK) == C2_OK) {
+ if (query[0].status == C2_OK) {
+ const C2FieldSupportedValues &fsv = query[0].values;
+ if (fsv.type == C2FieldSupportedValues::VALUES) {
+ for (C2Value::Primitive value : fsv.values) {
+ auto it = pixelFormatMap.find(value.u32);
+ if (it != pixelFormatMap.end()) {
+ auto it2 = std::find(
+ supportedColorFormats.begin(),
+ supportedColorFormats.end(),
+ it->second);
+ if (it2 == supportedColorFormats.end()) {
+ supportedColorFormats.push_back(it->second);
+ }
+ }
+ }
+ }
+ }
+ }
+ auto addDefaultColorFormat = [caps, &supportedColorFormats](int32_t colorFormat) {
+ caps->addColorFormat(colorFormat);
+ auto it = std::find(
+ supportedColorFormats.begin(), supportedColorFormats.end(), colorFormat);
+ if (it != supportedColorFormats.end()) {
+ supportedColorFormats.erase(it);
+ }
+ };
+
// vendor video codecs prefer opaque format
if (trait.name.find("android") == std::string::npos) {
- caps->addColorFormat(COLOR_FormatSurface);
+ addDefaultColorFormat(COLOR_FormatSurface);
}
- caps->addColorFormat(COLOR_FormatYUV420Flexible);
- caps->addColorFormat(COLOR_FormatYUV420Planar);
- caps->addColorFormat(COLOR_FormatYUV420SemiPlanar);
- caps->addColorFormat(COLOR_FormatYUV420PackedPlanar);
- caps->addColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
+ addDefaultColorFormat(COLOR_FormatYUV420Flexible);
+ addDefaultColorFormat(COLOR_FormatYUV420Planar);
+ addDefaultColorFormat(COLOR_FormatYUV420SemiPlanar);
+ addDefaultColorFormat(COLOR_FormatYUV420PackedPlanar);
+ addDefaultColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
// framework video encoders must support surface format, though it is unclear
// that they will be able to map it if it is opaque
if (encoder && trait.name.find("android") != std::string::npos) {
- caps->addColorFormat(COLOR_FormatSurface);
+ addDefaultColorFormat(COLOR_FormatSurface);
+ }
+ for (int32_t colorFormat : supportedColorFormats) {
+ caps->addColorFormat(colorFormat);
}
}
}
@@ -410,6 +488,7 @@
}
}
+ std::map<std::string, PixelFormatMap> nameToPixelFormatMap;
for (const Traits& trait : traits) {
C2Component::rank_t rank = trait.rank;
@@ -423,8 +502,9 @@
nameAndAliases.insert(nameAndAliases.begin(), trait.name);
for (const std::string &nameOrAlias : nameAndAliases) {
bool isAlias = trait.name != nameOrAlias;
+ std::shared_ptr<Codec2Client> client;
std::shared_ptr<Codec2Client::Interface> intf =
- Codec2Client::CreateInterfaceByName(nameOrAlias.c_str());
+ Codec2Client::CreateInterfaceByName(nameOrAlias.c_str(), &client);
if (!intf) {
ALOGD("could not create interface for %s'%s'",
isAlias ? "alias " : "",
@@ -618,7 +698,40 @@
caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
}
}
- addSupportedColorFormats(intf, caps.get(), trait, mediaType);
+
+ auto it = nameToPixelFormatMap.find(client->getServiceName());
+ if (it == nameToPixelFormatMap.end()) {
+ it = nameToPixelFormatMap.try_emplace(client->getServiceName()).first;
+ PixelFormatMap &pixelFormatMap = it->second;
+ pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_420_888] = COLOR_FormatYUV420Flexible;
+ pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_P010] = COLOR_FormatYUVP010;
+ pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_1010102] = COLOR_Format32bitABGR2101010;
+ pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_FP16] = COLOR_Format64bitABGRFloat;
+
+ std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ if (client->query(
+ {},
+ {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
+ C2_MAY_BLOCK,
+ &heapParams) == C2_OK
+ && heapParams.size() == 1u) {
+ pixelFormatInfo.reset(C2StoreFlexiblePixelFormatDescriptorsInfo::From(
+ heapParams[0].release()));
+ }
+ if (pixelFormatInfo && *pixelFormatInfo) {
+ for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
+ C2FlexiblePixelFormatDescriptorStruct &desc =
+ pixelFormatInfo->m.values[i];
+ std::optional<int32_t> colorFormat = findFrameworkColorFormat(desc);
+ if (colorFormat) {
+ pixelFormatMap[desc.pixelFormat] = *colorFormat;
+ }
+ }
+ }
+ }
+ addSupportedColorFormats(
+ intf, caps.get(), trait, mediaType, it->second);
}
}
}
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index af054c7..cb8b6ab 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -88,8 +88,7 @@
const sp<MediaCodecBuffer> &buffer,
std::list<std::unique_ptr<C2Work>> *items) {
int64_t timeUs;
- if (buffer->size() == 0u
- || !buffer->meta()->findInt64("timeUs", &timeUs)) {
+ if (!buffer->meta()->findInt64("timeUs", &timeUs)) {
return C2_BAD_VALUE;
}
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 7c660dc..3615289 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -224,6 +224,17 @@
Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
mInputBitrate)
.build());
+
+ addParameter(
+ DefineParam(mOutputProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_UNUSED, LEVEL_UNUSED))
+ .withFields({
+ C2F(mOutputProfileLevel, profile).any(),
+ C2F(mOutputProfileLevel, level).any(),
+ })
+ .withSetter(Setter<C2StreamProfileLevelInfo::output>)
+ .build());
}
// TODO: more SDK params
@@ -241,6 +252,8 @@
std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
template<typename T>
static C2R Setter(bool, C2P<T> &) {
@@ -576,4 +589,51 @@
<< "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
}
+typedef std::tuple<std::string, C2Config::profile_t, int32_t> HdrProfilesParams;
+
+class HdrProfilesTest
+ : public CCodecConfigTest,
+ public ::testing::WithParamInterface<HdrProfilesParams> {
+};
+
+TEST_P(HdrProfilesTest, SetFromSdk) {
+ HdrProfilesParams params = GetParam();
+ std::string mediaType = std::get<0>(params);
+ C2Config::profile_t c2Profile = std::get<1>(params);
+ int32_t sdkProfile = std::get<2>(params);
+
+ init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, mediaType.c_str());
+
+ ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_PROFILE, sdkProfile);
+
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+ mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+
+ ASSERT_EQ(1u, configUpdate.size());
+ C2StreamProfileLevelInfo::input *pl =
+ FindParam<std::remove_pointer<decltype(pl)>::type>(configUpdate);
+ ASSERT_NE(nullptr, pl);
+ ASSERT_EQ(c2Profile, pl->profile);
+}
+
+HdrProfilesParams kHdrProfilesParams[] = {
+ std::make_tuple(MIMETYPE_VIDEO_HEVC, PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10),
+ std::make_tuple(MIMETYPE_VIDEO_HEVC, PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus),
+ std::make_tuple(MIMETYPE_VIDEO_VP9, PROFILE_VP9_2, VP9Profile2HDR),
+ std::make_tuple(MIMETYPE_VIDEO_VP9, PROFILE_VP9_2, VP9Profile2HDR10Plus),
+ std::make_tuple(MIMETYPE_VIDEO_VP9, PROFILE_VP9_3, VP9Profile3HDR),
+ std::make_tuple(MIMETYPE_VIDEO_VP9, PROFILE_VP9_3, VP9Profile3HDR10Plus),
+ std::make_tuple(MIMETYPE_VIDEO_AV1, PROFILE_AV1_0, AV1ProfileMain10HDR10),
+ std::make_tuple(MIMETYPE_VIDEO_AV1, PROFILE_AV1_0, AV1ProfileMain10HDR10Plus),
+};
+
+INSTANTIATE_TEST_SUITE_P(
+ CCodecConfig,
+ HdrProfilesTest,
+ ::testing::ValuesIn(kHdrProfilesParams));
+
} // namespace android
diff --git a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
index 6738ee7..0be934a 100644
--- a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
+++ b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
@@ -53,7 +53,8 @@
C2Config::pcm_encoding_t encoding,
size_t inputFrameSizeInBytes,
size_t count,
- size_t expectedOutputSize) {
+ size_t expectedOutputSize,
+ bool separateEos) {
FrameReassembler frameReassembler;
frameReassembler.init(
mPool,
@@ -67,7 +68,7 @@
size_t inputIndex = 0, outputIndex = 0;
size_t expectCount = 0;
- for (size_t i = 0; i < count; ++i) {
+ for (size_t i = 0; i < count + (separateEos ? 1 : 0); ++i) {
sp<MediaCodecBuffer> buffer = new MediaCodecBuffer(
new AMessage, new ABuffer(inputFrameSizeInBytes));
buffer->setRange(0, inputFrameSizeInBytes);
@@ -77,8 +78,12 @@
if (i == count - 1) {
buffer->meta()->setInt32("eos", 1);
}
- for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
- buffer->base()[j] = (inputIndex & 0xFF);
+ if (i == count && separateEos) {
+ buffer->setRange(0, 0);
+ } else {
+ for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
+ buffer->base()[j] = (inputIndex & 0xFF);
+ }
}
std::list<std::unique_ptr<C2Work>> items;
ASSERT_EQ(C2_OK, frameReassembler.process(buffer, &items));
@@ -105,7 +110,8 @@
ASSERT_EQ(encoderFrameSize * BytesPerSample(encoding), view.capacity());
for (size_t j = 0; j < view.capacity(); ++j, ++outputIndex) {
ASSERT_TRUE(outputIndex < inputIndex
- || inputIndex == inputFrameSizeInBytes * count);
+ || inputIndex == inputFrameSizeInBytes * count)
+ << "inputIndex = " << inputIndex << " outputIndex = " << outputIndex;
uint8_t expected = outputIndex < inputIndex ? (outputIndex & 0xFF) : 0;
if (expectCount < 10) {
++expectCount;
@@ -137,204 +143,239 @@
// Push frames with exactly the same size as the encoder requested.
TEST_F(FrameReassemblerTest, PushExactFrameSize) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
- 10 /* count */,
- 10240 /* expected output size = 10 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
- 10 /* count */,
- 20480 /* expected output size = 10 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
- 10 /* count */,
- 40960 /* expected output size = 10 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
+ 10 /* count */,
+ 10240 /* expected output size = 10 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
+ 10 /* count */,
+ 20480 /* expected output size = 10 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
+ 10 /* count */,
+ 40960 /* expected output size = 10 * 4096 bytes/frame */,
+ separateEos);
+ }
}
// Push frames with half the size that the encoder requested.
TEST_F(FrameReassemblerTest, PushHalfFrameSize) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes per sample */,
- 10 /* count */,
- 5120 /* expected output size = 5 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes per sample */,
- 10 /* count */,
- 10240 /* expected output size = 5 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes per sample */,
- 10 /* count */,
- 20480 /* expected output size = 5 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes/sample */,
+ 10 /* count */,
+ 5120 /* expected output size = 5 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes/sample */,
+ 10 /* count */,
+ 10240 /* expected output size = 5 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes/sample */,
+ 10 /* count */,
+ 20480 /* expected output size = 5 * 4096 bytes/frame */,
+ separateEos);
+ }
}
// Push frames with twice the size that the encoder requested.
TEST_F(FrameReassemblerTest, PushDoubleFrameSize) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes per sample */,
- 10 /* count */,
- 20480 /* expected output size = 20 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes per sample */,
- 10 /* count */,
- 40960 /* expected output size = 20 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes per sample */,
- 10 /* count */,
- 81920 /* expected output size = 20 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes/sample */,
+ 10 /* count */,
+ 20480 /* expected output size = 20 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes/sample */,
+ 10 /* count */,
+ 40960 /* expected output size = 20 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes/sample */,
+ 10 /* count */,
+ 81920 /* expected output size = 20 * 4096 bytes/frame */,
+ separateEos);
+ }
}
// Push frames with a little bit larger (+5 samples) than the requested size.
TEST_F(FrameReassemblerTest, PushLittleLargerFrameSize) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes per sample */,
- 10 /* count */,
- 11264 /* expected output size = 11 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes per sample */,
- 10 /* count */,
- 22528 /* expected output size = 11 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes per sample */,
- 10 /* count */,
- 45056 /* expected output size = 11 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes/sample */,
+ 10 /* count */,
+ 11264 /* expected output size = 11 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes/sample */,
+ 10 /* count */,
+ 22528 /* expected output size = 11 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes/sample */,
+ 10 /* count */,
+ 45056 /* expected output size = 11 * 4096 bytes/frame */,
+ separateEos);
+ }
}
// Push frames with a little bit smaller (-5 samples) than the requested size.
TEST_F(FrameReassemblerTest, PushLittleSmallerFrameSize) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes per sample */,
- 10 /* count */,
- 10240 /* expected output size = 10 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes per sample */,
- 10 /* count */,
- 20480 /* expected output size = 10 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes per sample */,
- 10 /* count */,
- 40960 /* expected output size = 10 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes/sample */,
+ 10 /* count */,
+ 10240 /* expected output size = 10 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes/sample */,
+ 10 /* count */,
+ 20480 /* expected output size = 10 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes/sample */,
+ 10 /* count */,
+ 40960 /* expected output size = 10 * 4096 bytes/frame */,
+ separateEos);
+ }
}
// Push single-byte frames
TEST_F(FrameReassemblerTest, PushSingleByte) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 1 /* input frame size in bytes */,
- 100000 /* count */,
- 100352 /* expected output size = 98 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 1 /* input frame size in bytes */,
- 100000 /* count */,
- 100352 /* expected output size = 49 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 1 /* input frame size in bytes */,
- 100000 /* count */,
- 102400 /* expected output size = 25 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 1 /* input frame size in bytes */,
+ 100000 /* count */,
+ 100352 /* expected output size = 98 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 1 /* input frame size in bytes */,
+ 100000 /* count */,
+ 100352 /* expected output size = 49 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 1 /* input frame size in bytes */,
+ 100000 /* count */,
+ 102400 /* expected output size = 25 * 4096 bytes/frame */,
+ separateEos);
+ }
}
// Push one big chunk.
TEST_F(FrameReassemblerTest, PushBigChunk) {
ASSERT_EQ(OK, initStatus());
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_8,
- 100000 /* input frame size in bytes */,
- 1 /* count */,
- 100352 /* expected output size = 98 * 1024 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_16,
- 100000 /* input frame size in bytes */,
- 1 /* count */,
- 100352 /* expected output size = 49 * 2048 bytes/frame */);
- testPushSameSize(
- 1024 /* frame size in samples */,
- 48000 /* sample rate */,
- 1 /* channel count */,
- PCM_FLOAT,
- 100000 /* input frame size in bytes */,
- 1 /* count */,
- 102400 /* expected output size = 25 * 4096 bytes/frame */);
+ for (bool separateEos : {false, true}) {
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_8,
+ 100000 /* input frame size in bytes */,
+ 1 /* count */,
+ 100352 /* expected output size = 98 * 1024 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_16,
+ 100000 /* input frame size in bytes */,
+ 1 /* count */,
+ 100352 /* expected output size = 49 * 2048 bytes/frame */,
+ separateEos);
+ testPushSameSize(
+ 1024 /* frame size in samples */,
+ 48000 /* sample rate */,
+ 1 /* channel count */,
+ PCM_FLOAT,
+ 100000 /* input frame size in bytes */,
+ 1 /* count */,
+ 102400 /* expected output size = 25 * 4096 bytes/frame */,
+ separateEos);
+ }
}
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 5f87c66..bff9db5 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -88,7 +88,7 @@
uint32_t planeW = img->mWidth / plane.colSampling;
uint32_t planeH = img->mHeight / plane.rowSampling;
- bool canCopyByRow = (plane.colInc == 1) && (img->mPlane[i].mColInc == 1);
+ bool canCopyByRow = (plane.colInc == bpp) && (img->mPlane[i].mColInc == bpp);
bool canCopyByPlane = canCopyByRow && (plane.rowInc == img->mPlane[i].mRowInc);
if (canCopyByPlane) {
MemCopier<ToMediaImage, 0>::copy(imgRow, viewRow, plane.rowInc * planeH);
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 4d939fa..93f29ca 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -18,6 +18,9 @@
#define LOG_TAG "Codec2Mapper"
#include <utils/Log.h>
+#include <map>
+#include <optional>
+
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SurfaceUtils.h>
#include <media/stagefright/foundation/ALookup.h>
@@ -167,6 +170,9 @@
{ C2Config::LEVEL_DV_MAIN_UHD_30, DolbyVisionLevelUhd30 },
{ C2Config::LEVEL_DV_MAIN_UHD_48, DolbyVisionLevelUhd48 },
{ C2Config::LEVEL_DV_MAIN_UHD_60, DolbyVisionLevelUhd60 },
+ { C2Config::LEVEL_DV_MAIN_UHD_120, DolbyVisionLevelUhd120 },
+ { C2Config::LEVEL_DV_MAIN_8K_30, DolbyVisionLevel8k30 },
+ { C2Config::LEVEL_DV_MAIN_8K_60, DolbyVisionLevel8k60 },
// high tiers are not yet supported on android, for now map them to main tier
{ C2Config::LEVEL_DV_HIGH_HD_24, DolbyVisionLevelHd24 },
@@ -178,6 +184,9 @@
{ C2Config::LEVEL_DV_HIGH_UHD_30, DolbyVisionLevelUhd30 },
{ C2Config::LEVEL_DV_HIGH_UHD_48, DolbyVisionLevelUhd48 },
{ C2Config::LEVEL_DV_HIGH_UHD_60, DolbyVisionLevelUhd60 },
+ { C2Config::LEVEL_DV_HIGH_UHD_120, DolbyVisionLevelUhd120 },
+ { C2Config::LEVEL_DV_HIGH_8K_30, DolbyVisionLevel8k30 },
+ { C2Config::LEVEL_DV_HIGH_8K_60, DolbyVisionLevel8k60 },
};
ALookup<C2Config::profile_t, int32_t> sDolbyVisionProfiles = {
@@ -255,6 +264,8 @@
{ C2Config::PROFILE_HEVC_MAIN_STILL, HEVCProfileMainStill },
{ C2Config::PROFILE_HEVC_MAIN_INTRA, HEVCProfileMain },
{ C2Config::PROFILE_HEVC_MAIN_10_INTRA, HEVCProfileMain10 },
+ { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10 },
+ { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
};
ALookup<C2Config::profile_t, int32_t> sHevcHdrProfiles = {
@@ -381,15 +392,17 @@
{ C2Config::LEVEL_AV1_7_3, AV1Level73 },
};
-
ALookup<C2Config::profile_t, int32_t> sAv1Profiles = {
- // TODO: will need to disambiguate between Main8 and Main10
{ C2Config::PROFILE_AV1_0, AV1ProfileMain8 },
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
+ALookup<C2Config::profile_t, int32_t> sAv1TenbitProfiles = {
+ { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
+};
+
ALookup<C2Config::profile_t, int32_t> sAv1HdrProfiles = {
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
};
@@ -398,6 +411,30 @@
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
+// HAL_PIXEL_FORMAT_* -> COLOR_Format*
+ALookup<uint32_t, int32_t> sPixelFormats = {
+ { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
+
+ // YCBCR_420_888 maps to YUV420Flexible and vice versa
+ { HAL_PIXEL_FORMAT_YCBCR_420_888, COLOR_FormatYUV420Flexible },
+
+ // Fallback matches for YCBCR_420_888
+ { HAL_PIXEL_FORMAT_YCBCR_420_888, COLOR_FormatYUV420Planar },
+ { HAL_PIXEL_FORMAT_YCBCR_420_888, COLOR_FormatYUV420SemiPlanar },
+ { HAL_PIXEL_FORMAT_YCBCR_420_888, COLOR_FormatYUV420PackedPlanar },
+ { HAL_PIXEL_FORMAT_YCBCR_420_888, COLOR_FormatYUV420PackedSemiPlanar },
+
+ // Fallback matches for YUV420Flexible
+ { HAL_PIXEL_FORMAT_YCRCB_420_SP, COLOR_FormatYUV420Flexible },
+ { HAL_PIXEL_FORMAT_YV12, COLOR_FormatYUV420Flexible },
+
+ { HAL_PIXEL_FORMAT_YCBCR_422_SP, COLOR_FormatYUV422PackedSemiPlanar },
+ { HAL_PIXEL_FORMAT_YCBCR_422_I, COLOR_FormatYUV422PackedPlanar },
+ { HAL_PIXEL_FORMAT_YCBCR_P010, COLOR_FormatYUVP010 },
+ { HAL_PIXEL_FORMAT_RGBA_1010102, COLOR_Format32bitABGR2101010 },
+ { HAL_PIXEL_FORMAT_RGBA_FP16, COLOR_Format64bitABGRFloat },
+};
+
/**
* A helper that passes through vendor extension profile and level values.
*/
@@ -603,9 +640,9 @@
};
struct Av1ProfileLevelMapper : ProfileLevelMapperHelper {
- Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+ Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false, int32_t bitDepth = 8) :
ProfileLevelMapperHelper(),
- mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+ mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus), mBitDepth(bitDepth) {}
virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
return sAv1Levels.map(from, to);
@@ -614,19 +651,22 @@
return sAv1Levels.map(from, to);
}
virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
- return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
- mIsHdr ? sAv1HdrProfiles.map(from, to) :
- sAv1Profiles.map(from, to);
+ return (mBitDepth == 10) ? sAv1TenbitProfiles.map(from, to) :
+ mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sAv1HdrProfiles.map(from, to) :
+ sAv1Profiles.map(from, to);
}
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
- return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
- mIsHdr ? sAv1HdrProfiles.map(from, to) :
- sAv1Profiles.map(from, to);
+ return (mBitDepth == 10) ? sAv1TenbitProfiles.map(from, to) :
+ mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sAv1HdrProfiles.map(from, to) :
+ sAv1Profiles.map(from, to);
}
private:
bool mIsHdr;
bool mIsHdr10Plus;
+ int32_t mBitDepth;
};
} // namespace
@@ -674,6 +714,18 @@
}
// static
+std::shared_ptr<C2Mapper::ProfileLevelMapper>
+C2Mapper::GetBitDepthProfileLevelMapper(std::string mediaType, int32_t bitDepth) {
+ std::transform(mediaType.begin(), mediaType.end(), mediaType.begin(), ::tolower);
+ if (bitDepth == 8) {
+ return GetProfileLevelMapper(mediaType);
+ } else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
+ return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+ }
+ return nullptr;
+}
+
+// static
bool C2Mapper::map(C2Config::bitrate_mode_t from, int32_t *to) {
return sBitrateModes.map(from, to);
}
@@ -956,41 +1008,19 @@
// static
bool C2Mapper::mapPixelFormatFrameworkToCodec(
int32_t frameworkValue, uint32_t *c2Value) {
- switch (frameworkValue) {
- case COLOR_FormatSurface:
- *c2Value = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- return true;
- case COLOR_FormatYUV420Flexible:
- case COLOR_FormatYUV420Planar:
- case COLOR_FormatYUV420SemiPlanar:
- case COLOR_FormatYUV420PackedPlanar:
- case COLOR_FormatYUV420PackedSemiPlanar:
- *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
- return true;
- default:
- // Passthrough
- *c2Value = uint32_t(frameworkValue);
- return true;
+ if (!sPixelFormats.map(frameworkValue, c2Value)) {
+ // passthrough if not mapped
+ *c2Value = uint32_t(frameworkValue);
}
+ return true;
}
// static
bool C2Mapper::mapPixelFormatCodecToFramework(
uint32_t c2Value, int32_t *frameworkValue) {
- switch (c2Value) {
- case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
- *frameworkValue = COLOR_FormatSurface;
- return true;
- case HAL_PIXEL_FORMAT_YCBCR_422_SP:
- case HAL_PIXEL_FORMAT_YCRCB_420_SP:
- case HAL_PIXEL_FORMAT_YCBCR_422_I:
- case HAL_PIXEL_FORMAT_YCBCR_420_888:
- case HAL_PIXEL_FORMAT_YV12:
- *frameworkValue = COLOR_FormatYUV420Flexible;
- return true;
- default:
- // Passthrough
- *frameworkValue = int32_t(c2Value);
- return true;
+ if (!sPixelFormats.map(c2Value, frameworkValue)) {
+ // passthrough if not mapped
+ *frameworkValue = int32_t(c2Value);
}
+ return true;
}
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 797c8a8..33d305e 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -43,6 +43,9 @@
static std::shared_ptr<ProfileLevelMapper>
GetHdrProfileLevelMapper(std::string mediaType, bool isHdr10Plus = false);
+ static std::shared_ptr<ProfileLevelMapper>
+ GetBitDepthProfileLevelMapper(std::string mediaType, int32_t bitDepth = 8);
+
// convert between bitrates
static bool map(C2Config::bitrate_mode_t, int32_t*);
static bool map(int32_t, C2Config::bitrate_mode_t*);
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index be81c84..27cd1f8 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -73,11 +73,12 @@
"libbase",
"libcutils",
"libdl",
+ "libdmabufheap",
+ "libfmq",
+ "libgralloctypes",
"libhardware",
"libhidlbase",
"libion",
- "libdmabufheap",
- "libfmq",
"liblog",
"libnativewindow",
"libstagefright_foundation",
@@ -92,6 +93,44 @@
],
}
+// public dependency for statically linking to libcodec2_vndk for unit tests
+cc_defaults {
+ name: "libcodec2-static-defaults",
+
+ static_libs: [
+ "liblog",
+ "libion",
+ "libfmq",
+ "libbase",
+ "libutils",
+ "libcutils",
+ "libcodec2",
+ "libhidlbase",
+ "libdmabufheap",
+ "libcodec2_vndk",
+ "libnativewindow",
+ "libcodec2_soft_common",
+ "libsfplugin_ccodec_utils",
+ "libstagefright_foundation",
+ "libstagefright_bufferpool@2.0.1",
+ "libgralloctypes",
+ "android.hardware.graphics.mapper@2.0",
+ "android.hardware.graphics.mapper@3.0",
+ "android.hardware.media.bufferpool@2.0",
+ "android.hardware.graphics.allocator@2.0",
+ "android.hardware.graphics.allocator@3.0",
+ "android.hardware.graphics.bufferqueue@2.0",
+ ],
+
+ shared_libs: [
+ "libui",
+ "libdl",
+ "libhardware",
+ "libvndksupport",
+ "libprocessgroup",
+ ],
+}
+
// public dependency for implementing Codec 2 components
cc_defaults {
name: "libcodec2-impl-defaults",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 6a7f19c..b5200a5 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -20,8 +20,10 @@
#include <mutex>
+#include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
#include <android/hardware/graphics/common/1.2/types.h>
#include <cutils/native_handle.h>
+#include <gralloctypes/Gralloc4.h>
#include <hardware/gralloc.h>
#include <ui/GraphicBufferAllocator.h>
#include <ui/GraphicBufferMapper.h>
@@ -29,6 +31,7 @@
#include <C2AllocatorGralloc.h>
#include <C2Buffer.h>
+#include <C2Debug.h>
#include <C2PlatformSupport.h>
using ::android::hardware::hidl_handle;
@@ -230,8 +233,89 @@
}
};
+static
+c2_status_t Gralloc4Mapper_lock(native_handle_t *handle, uint64_t usage, const Rect& bounds,
+ C2PlanarLayout *layout, uint8_t **addr) {
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+ std::vector<ui::PlaneLayout> planes;
+ // this method is only supported on Gralloc 4 or later
+ status_t err = mapper.getPlaneLayouts(handle, &planes);
+ if (err != NO_ERROR || planes.empty()) {
+ return C2_CANNOT_DO;
+ }
+
+ uint8_t *pointer = nullptr;
+ err = mapper.lock(handle, usage, bounds, (void **)&pointer, nullptr, nullptr);
+ if (err != NO_ERROR || pointer == nullptr) {
+ return C2_CORRUPTED;
+ }
+
+ using aidl::android::hardware::graphics::common::PlaneLayoutComponentType;
+ using aidl::android::hardware::graphics::common::PlaneLayoutComponent;
+
+ layout->type = C2PlanarLayout::TYPE_YUV;
+ layout->numPlanes = 0;
+ layout->rootPlanes = 0;
+
+ for (const ui::PlaneLayout &plane : planes) {
+ layout->rootPlanes++;
+ uint32_t lastOffsetInBits = 0;
+ uint32_t rootIx = 0;
+
+ for (const PlaneLayoutComponent &component : plane.components) {
+ if (!gralloc4::isStandardPlaneLayoutComponentType(component.type)) {
+ return C2_CANNOT_DO;
+ }
+
+ uint32_t rightShiftBits = component.offsetInBits - lastOffsetInBits;
+ uint32_t allocatedDepthInBits = component.sizeInBits + rightShiftBits;
+ C2PlanarLayout::plane_index_t planeId;
+ C2PlaneInfo::channel_t channel;
+
+ switch (static_cast<PlaneLayoutComponentType>(component.type.value)) {
+ case PlaneLayoutComponentType::Y:
+ planeId = C2PlanarLayout::PLANE_Y;
+ channel = C2PlaneInfo::CHANNEL_Y;
+ break;
+ case PlaneLayoutComponentType::CB:
+ planeId = C2PlanarLayout::PLANE_U;
+ channel = C2PlaneInfo::CHANNEL_CB;
+ break;
+ case PlaneLayoutComponentType::CR:
+ planeId = C2PlanarLayout::PLANE_V;
+ channel = C2PlaneInfo::CHANNEL_CR;
+ break;
+ default:
+ return C2_CORRUPTED;
+ }
+
+ addr[planeId] = pointer + plane.offsetInBytes + (component.offsetInBits / 8);
+ layout->planes[planeId] = {
+ channel, // channel
+ static_cast<int32_t>(plane.sampleIncrementInBits / 8), // colInc
+ static_cast<int32_t>(plane.strideInBytes), // rowInc
+ static_cast<uint32_t>(plane.horizontalSubsampling), // mColSampling
+ static_cast<uint32_t>(plane.verticalSubsampling), // mRowSampling
+ allocatedDepthInBits, // allocatedDepth (bits)
+ static_cast<uint32_t>(component.sizeInBits), // bitDepth (bits)
+ rightShiftBits, // rightShift (bits)
+ C2PlaneInfo::NATIVE, // endianness
+ rootIx, // rootIx
+ static_cast<uint32_t>(component.offsetInBits / 8), // offset (bytes)
+ };
+
+ layout->numPlanes++;
+ lastOffsetInBits = component.offsetInBits + component.sizeInBits;
+ rootIx++;
+ }
+ }
+ return C2_OK;
+}
+
} // unnamed namespace
+
native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
return C2HandleGralloc::UnwrapNativeHandle(handle);
}
@@ -385,6 +469,10 @@
mBuffer, mWidth, mHeight, mFormat, mGrallocUsage,
mStride, generation, igbp_id, igbp_slot);
}
+
+ // 'NATIVE' on Android means LITTLE_ENDIAN
+ constexpr C2PlaneInfo::endianness_t kEndianness = C2PlaneInfo::NATIVE;
+
switch (mFormat) {
case static_cast<uint32_t>(PixelFormat4::RGBA_1010102): {
// TRICKY: this is used for media as YUV444 in the case when it is queued directly to a
@@ -646,7 +734,7 @@
16, // allocatedDepth
10, // bitDepth
6, // rightShift
- C2PlaneInfo::LITTLE_END, // endianness
+ kEndianness, // endianness
C2PlanarLayout::PLANE_Y, // rootIx
0, // offset
};
@@ -659,7 +747,7 @@
16, // allocatedDepth
10, // bitDepth
6, // rightShift
- C2PlaneInfo::LITTLE_END, // endianness
+ kEndianness, // endianness
C2PlanarLayout::PLANE_U, // rootIx
0, // offset
};
@@ -672,7 +760,7 @@
16, // allocatedDepth
10, // bitDepth
6, // rightShift
- C2PlaneInfo::LITTLE_END, // endianness
+ kEndianness, // endianness
C2PlanarLayout::PLANE_U, // rootIx
2, // offset
};
@@ -680,9 +768,15 @@
}
default: {
- // We don't know what it is, but let's try to lock it.
+ // We don't know what it is, let's try to lock it with gralloc4
android_ycbcr ycbcrLayout;
+ c2_status_t status = Gralloc4Mapper_lock(
+ const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+ if (status == C2_OK) {
+ break;
+ }
+ // fallback to lockYCbCr
status_t err = GraphicBufferMapper::get().lockYCbCr(
const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
if (err == OK && ycbcrLayout.y && ycbcrLayout.cb && ycbcrLayout.cr
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 1660c38..dfdd84d 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -301,13 +301,21 @@
std::lock_guard<std::mutex> lock(_mComponentStoreReadLock);
_mComponentStore = store;
}
- std::shared_ptr<C2AllocatorIon> allocator;
+ std::shared_ptr<C2AllocatorIon> ionAllocator;
{
std::lock_guard<std::mutex> lock(gIonAllocatorMutex);
- allocator = gIonAllocator.lock();
+ ionAllocator = gIonAllocator.lock();
}
- if (allocator) {
- UseComponentStoreForIonAllocator(allocator, store);
+ if (ionAllocator) {
+ UseComponentStoreForIonAllocator(ionAllocator, store);
+ }
+ std::shared_ptr<C2DmaBufAllocator> dmaAllocator;
+ {
+ std::lock_guard<std::mutex> lock(gDmaBufAllocatorMutex);
+ dmaAllocator = gDmaBufAllocator.lock();
+ }
+ if (dmaAllocator) {
+ UseComponentStoreForDmaBufAllocator(dmaAllocator, store);
}
}
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 169de0c..01995fd 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,6 +16,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2BqBuffer"
+#include <android/hardware_buffer.h>
#include <utils/Log.h>
#include <ui/BufferQueueDefs.h>
@@ -171,6 +172,91 @@
return stamp;
}
+// Do not rely on AHardwareBuffer module for GraphicBuffer handling since AHardwareBuffer
+// module is linked to framework which could have a different implementation of GraphicBuffer
+// than mainline/vndk implementation.(See b/203347494.)
+//
+// b2h/h2b between HardwareBuffer and GraphicBuffer cannot be used. (b2h/h2b depend on
+// AHardwareBuffer module for the conversion between HardwareBuffer and GraphicBuffer.)
+// hgbp_ prefixed methods are added to be used instead of b2h/h2b.
+//
+// TODO: Remove dependency with existing AHwB module. Also clean up conversions.(conversions here
+// and h2b/b2h coversions)
+const GraphicBuffer* hgbp_AHBuffer_to_GraphicBuffer(const AHardwareBuffer* buffer) {
+ return GraphicBuffer::fromAHardwareBuffer(buffer);
+}
+
+int hgbp_createFromHandle(const AHardwareBuffer_Desc* desc,
+ const native_handle_t* handle,
+ sp<GraphicBuffer> *outBuffer) {
+
+ if (!desc || !handle || !outBuffer) return ::android::BAD_VALUE;
+ if (desc->rfu0 != 0 || desc->rfu1 != 0) return ::android::BAD_VALUE;
+ if (desc->format == AHARDWAREBUFFER_FORMAT_BLOB && desc->height != 1)
+ return ::android::BAD_VALUE;
+
+ const int format = uint32_t(desc->format);
+ const uint64_t usage = uint64_t(desc->usage);
+ sp<GraphicBuffer> gbuffer(new GraphicBuffer(handle,
+ GraphicBuffer::HandleWrapMethod::CLONE_HANDLE,
+ desc->width, desc->height,
+ format, desc->layers, usage, desc->stride));
+ status_t err = gbuffer->initCheck();
+ if (err != 0 || gbuffer->handle == 0) return err;
+
+ *outBuffer = gbuffer;
+
+ return ::android::NO_ERROR;
+}
+
+void hgbp_describe(const AHardwareBuffer* buffer,
+ AHardwareBuffer_Desc* outDesc) {
+ if (!buffer || !outDesc) return;
+
+ const GraphicBuffer* gbuffer = hgbp_AHBuffer_to_GraphicBuffer(buffer);
+
+ outDesc->width = gbuffer->getWidth();
+ outDesc->height = gbuffer->getHeight();
+ outDesc->layers = gbuffer->getLayerCount();
+ outDesc->format = uint32_t(gbuffer->getPixelFormat());
+ outDesc->usage = uint64_t(gbuffer->getUsage());
+ outDesc->stride = gbuffer->getStride();
+ outDesc->rfu0 = 0;
+ outDesc->rfu1 = 0;
+}
+
+
+bool hgbp_h2b(HBuffer const& from, sp<GraphicBuffer>* to) {
+ AHardwareBuffer_Desc const* desc =
+ reinterpret_cast<AHardwareBuffer_Desc const*>(
+ from.description.data());
+ native_handle_t const* handle = from.nativeHandle;
+ if (hgbp_createFromHandle(desc, handle, to) != ::android::OK) {
+ return false;
+ }
+ return true;
+}
+
+bool hgbp_b2h(sp<GraphicBuffer> const& from, HBuffer* to,
+ uint32_t* toGenerationNumber) {
+ if (!from) {
+ return false;
+ }
+ AHardwareBuffer* hwBuffer = from->toAHardwareBuffer();
+ to->nativeHandle.setTo(
+ const_cast<native_handle_t*>(from->handle),
+ false);
+ hgbp_describe(
+ hwBuffer,
+ reinterpret_cast<AHardwareBuffer_Desc*>(to->description.data()));
+ if (toGenerationNumber) {
+ *toGenerationNumber = from->getGenerationNumber();
+ }
+ return true;
+}
+
+// End of hgbp methods for GraphicBuffer creation.
+
bool getGenerationNumberAndUsage(const sp<HGraphicBufferProducer> &producer,
uint32_t *generation, uint64_t *usage) {
status_t status{};
@@ -211,7 +297,7 @@
HBuffer const& hBuffer,
uint32_t generationNumber){
if (h2b(hStatus, &status) &&
- h2b(hBuffer, &slotBuffer) &&
+ hgbp_h2b(hBuffer, &slotBuffer) &&
slotBuffer) {
*generation = generationNumber;
*usage = slotBuffer->getUsage();
@@ -402,7 +488,7 @@
HBuffer const& hBuffer,
uint32_t generationNumber){
if (h2b(hStatus, &status) &&
- h2b(hBuffer, &slotBuffer) &&
+ hgbp_h2b(hBuffer, &slotBuffer) &&
slotBuffer) {
slotBuffer->setGenerationNumber(generationNumber);
outGeneration = generationNumber;
@@ -804,7 +890,7 @@
HBuffer hBuffer{};
uint32_t hGenerationNumber{};
- if (!b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
+ if (!hgbp_b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
ALOGD("I to O conversion failed");
return -1;
}
diff --git a/media/codecs/m4v_h263/enc/src/fastidct.cpp b/media/codecs/m4v_h263/enc/src/fastidct.cpp
index 688effc..ec1b28f 100644
--- a/media/codecs/m4v_h263/enc/src/fastidct.cpp
+++ b/media/codecs/m4v_h263/enc/src/fastidct.cpp
@@ -76,6 +76,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_col2(Short *blk)
{
int32 x0, x1, x3, x5, x7;//, x8;
@@ -102,6 +104,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_col3(Short *blk)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -137,6 +141,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_col4(Short *blk)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -180,6 +186,8 @@
}
#ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_col0x40(Short *blk)
{
int32 x1, x3, x5, x7;//, x8;
@@ -230,6 +238,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_col0x10(Short *blk)
{
int32 x1, x3, x5, x7;
@@ -256,6 +266,8 @@
#endif /* SMALL_DCT */
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_col(Short *blk)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -368,6 +380,8 @@
return;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row2Inter(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x4, x5;
@@ -427,6 +441,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row3Inter(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -497,6 +513,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row4Inter(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -573,6 +591,8 @@
}
#ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row0x40Inter(Short *blk, UChar *rec, Int lx)
{
int32 x1, x2, x4, x5;
@@ -686,6 +706,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row0x10Inter(Short *blk, UChar *rec, Int lx)
{
int32 x1, x3, x5, x7;
@@ -741,6 +763,8 @@
#endif /* SMALL_DCT */
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_rowInter(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -864,6 +888,8 @@
return;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row2Intra(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x4, x5;
@@ -919,6 +945,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row3Intra(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -985,6 +1013,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row4Intra(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1058,6 +1088,8 @@
}
#ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row0x40Intra(Short *blk, UChar *rec, Int lx)
{
int32 x1, x2, x4, x5;
@@ -1166,6 +1198,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row0x10Intra(Short *blk, UChar *rec, Int lx)
{
int32 x1, x3, x5, x7;
@@ -1218,6 +1252,8 @@
}
#endif /* SMALL_DCT */
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_rowIntra(Short *blk, UChar *rec, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1364,6 +1400,8 @@
return;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row2zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
{
int32 x0, x1, x2, x4, x5;
@@ -1424,6 +1462,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row3zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1495,6 +1535,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row4zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1572,6 +1614,8 @@
}
#ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row0x40zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
{
int32 x1, x2, x4, x5;
@@ -1687,6 +1731,8 @@
return ;
}
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_row0x10zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
{
int32 x1, x3, x5, x7;
@@ -1743,6 +1789,8 @@
#endif /* SMALL_DCT */
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
void idct_rowzmv(Short *blk, UChar *rec, UChar *pred, Int lx)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index 00b2ab6..b295258 100644
--- a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -501,13 +501,16 @@
/* check frame rate */
for (i = 0; i < encParams->nLayers; i++)
{
+ if (encOption->encFrameRate[i] <= 0. || encOption->encFrameRate[i] > 120)
+ {
+ goto CLEAN_UP;
+ }
encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
}
if (encParams->nLayers > 1)
{
- if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
- encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
+ if (encOption->encFrameRate[0] == encOption->encFrameRate[1])
goto CLEAN_UP;
}
/* set max frame rate */
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
index 912c821..5e613d9 100644
--- a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
@@ -50,7 +50,7 @@
private:
tagvideoDecControls *mDecHandle = nullptr;
- uint8_t *mOutputBuffer[kNumOutputBuffers];
+ uint8_t *mOutputBuffer[kNumOutputBuffers] = {};
bool mInitialized = false;
bool mFramesConfigured = false;
#ifdef MPEG4
diff --git a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
index 4338c43..c04f7f3 100644
--- a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
+++ b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
@@ -219,6 +219,9 @@
; FUNCTION CODE
----------------------------------------------------------------------------*/
+#if __has_attribute(no_sanitize)
+__attribute__((no_sanitize("integer")))
+#endif
void pvmp3_st_intensity(int32 xr[SUBBANDS_NUMBER*FILTERBANK_BANDS],
int32 xl[SUBBANDS_NUMBER*FILTERBANK_BANDS],
int32 is_pos,
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 9a2a76b..f6ce969 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -19,7 +19,7 @@
cc_library {
name: "libflacextractor",
- defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
+ defaults: ["extractor-defaults"],
srcs: ["FLACExtractor.cpp"],
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 8836c47..fb935b6 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1127,10 +1127,10 @@
void *data;
size_t size;
- if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
+ if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
&data, &size)
- && size >= 5) {
- const uint8_t *ptr = (const uint8_t *)data;
+ && size >= 24) {
+ const uint8_t *ptr = (const uint8_t *)data + (size - 24);
const uint8_t profile = ptr[2] >> 1;
const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
bool create_two_tracks = false;
@@ -1147,13 +1147,15 @@
track_b->timescale = mLastTrack->timescale;
track_b->sampleTable = mLastTrack->sampleTable;
- track_b->includes_expensive_metadata = mLastTrack->includes_expensive_metadata;
+ track_b->includes_expensive_metadata =
+ mLastTrack->includes_expensive_metadata;
track_b->skipTrack = mLastTrack->skipTrack;
track_b->elst_needs_processing = mLastTrack->elst_needs_processing;
track_b->elst_media_time = mLastTrack->elst_media_time;
track_b->elst_segment_duration = mLastTrack->elst_segment_duration;
track_b->elst_shift_start_ticks = mLastTrack->elst_shift_start_ticks;
- track_b->elst_initial_empty_edit_ticks = mLastTrack->elst_initial_empty_edit_ticks;
+ track_b->elst_initial_empty_edit_ticks =
+ mLastTrack->elst_initial_empty_edit_ticks;
track_b->subsample_encryption = mLastTrack->subsample_encryption;
track_b->mTx3gBuffer = mLastTrack->mTx3gBuffer;
@@ -1166,11 +1168,11 @@
mLastTrack->next = track_b;
track_b->next = NULL;
- // we want to remove the csd-2 key from the metadata, but
+ // we want to remove the csd-0 key from the metadata, but
// don't have an AMediaFormat_* function to do so. Settle
- // for replacing this csd-2 with an empty csd-2.
+ // for replacing this csd-0 with an empty csd-0.
uint8_t emptybuffer[8] = {};
- AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_2,
+ AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_0,
emptybuffer, 0);
if (4 == profile || 7 == profile || 8 == profile ) {
@@ -1182,6 +1184,8 @@
} else if (10 == profile) {
AMediaFormat_setString(track_b->meta,
AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
+ AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_0,
+ data, size - 24);
} // Should never get to else part
mLastTrack = track_b;
@@ -2591,9 +2595,11 @@
*offset += chunk_size;
break;
}
- case FOURCC("dvcC"):
- case FOURCC("dvvC"): {
+ case FOURCC("dvcC"):
+ case FOURCC("dvvC"):
+ case FOURCC("dvwC"):
+ {
if (chunk_data_size != 24) {
return ERROR_MALFORMED;
}
@@ -2612,14 +2618,29 @@
if (mLastTrack == NULL)
return ERROR_MALFORMED;
- AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
- buffer.get(), chunk_data_size);
+ void *data = nullptr;
+ size_t size = 0;
+ if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+ //if csd-0 is already present, then append dvcc
+ auto csd0_dvcc = heapbuffer<uint8_t>(size + chunk_data_size);
+
+ memcpy(csd0_dvcc.get(), data, size);
+ memcpy(csd0_dvcc.get() + size, buffer.get(), chunk_data_size);
+
+ AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+ csd0_dvcc.get(), size + chunk_data_size);
+ } else {
+ //if not set csd-0 directly
+ AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+ buffer.get(), chunk_data_size);
+ }
AMediaFormat_setString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME,
MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
*offset += chunk_size;
break;
}
+
case FOURCC("d263"):
{
*offset += chunk_size;
@@ -4458,7 +4479,6 @@
if (!AMediaFormat_getString(track->meta, AMEDIAFORMAT_KEY_MIME, &mime)) {
return NULL;
}
-
sp<ItemTable> itemTable;
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
void *data;
@@ -4491,14 +4511,14 @@
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
void *data;
size_t size;
- if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+ if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)
+ || size < 24) {
return NULL;
}
- const uint8_t *ptr = (const uint8_t *)data;
-
+ const uint8_t *ptr = (const uint8_t *)data + (size - 24);
// dv_major.dv_minor Should be 1.0 or 2.1
- if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
+ if ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)) {
return NULL;
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
@@ -4576,7 +4596,7 @@
return ERROR_MALFORMED;
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
- if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+ if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
return ERROR_MALFORMED;
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
@@ -5152,11 +5172,11 @@
ALOGV("%s DolbyVision stream detected", __FUNCTION__);
void *data;
size_t size;
- CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_2, &data, &size));
+ CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_0, &data, &size));
- const uint8_t *ptr = (const uint8_t *)data;
+ const uint8_t *ptr = (const uint8_t *)data + (size - 24);
- CHECK(size == 24);
+ CHECK(size >= 24);
// dv_major.dv_minor Should be 1.0 or 2.1
CHECK(!((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)));
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index 5d97d9a..23c74f7 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -45,14 +45,11 @@
"libdatasource",
"libwatchdog",
- "libstagefright",
"libstagefright_id3",
"libstagefright_flacdec",
"libstagefright_esds",
"libstagefright_mpeg2support",
- "libstagefright_mpeg2extractor",
"libstagefright_foundation_colorutils_ndk",
- "libstagefright_foundation",
"libstagefright_metadatautils",
"libmedia_midiiowrapper",
@@ -74,6 +71,8 @@
"libcutils",
"libmediandk",
"libmedia",
+ "libstagefright",
+ "libstagefright_foundation",
"libcrypto",
"libhidlmemory",
"libhidlbase",
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index cc5e1c7..76546b8 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -18,7 +18,7 @@
cc_library {
name: "libwavextractor",
- defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
+ defaults: ["extractor-defaults"],
srcs: ["WAVExtractor.cpp"],
diff --git a/media/janitors/codec_OWNERS b/media/janitors/codec_OWNERS
index e201399..d4ee51b 100644
--- a/media/janitors/codec_OWNERS
+++ b/media/janitors/codec_OWNERS
@@ -2,4 +2,4 @@
# differentiated from plugins connecting those codecs to either omx or codec2 infrastructure
essick@google.com
lajos@google.com
-marcone@google.com
+wonsik@google.com
diff --git a/media/janitors/reliability_mainline_OWNERS b/media/janitors/reliability_mainline_OWNERS
new file mode 100644
index 0000000..cced19c
--- /dev/null
+++ b/media/janitors/reliability_mainline_OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1051309
+# go/android-media-reliability
+
+essick@google.com
+nchalko@google.com
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index acab774..efc870a 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -584,6 +584,8 @@
return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
case media::AudioOutputFlags::GAPLESS_OFFLOAD:
return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
+ case media::AudioOutputFlags::SPATIALIZER:
+ return AUDIO_OUTPUT_FLAG_SPATIALIZER;
}
return unexpected(BAD_VALUE);
}
@@ -625,6 +627,8 @@
return media::AudioOutputFlags::INCALL_MUSIC;
case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
return media::AudioOutputFlags::GAPLESS_OFFLOAD;
+ case AUDIO_OUTPUT_FLAG_SPATIALIZER:
+ return media::AudioOutputFlags::SPATIALIZER;
}
return unexpected(BAD_VALUE);
}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 3c3d340..a00cb79 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -41,7 +41,7 @@
},
}
-cc_library_shared {
+cc_library {
name: "libaudiopolicy",
srcs: [
"AudioAttributes.cpp",
@@ -136,6 +136,7 @@
"libutils",
"libvibrator",
"framework-permission-aidl-cpp",
+ "packagemanager_aidl-cpp",
],
export_shared_lib_headers: [
"audioflinger-aidl-cpp",
@@ -201,7 +202,7 @@
],
apex_available: [
"//apex_available:platform",
- "com.android.bluetooth.updatable",
+ "com.android.bluetooth",
"com.android.media",
"com.android.media.swcodec",
],
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index da21771..22f0295 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -205,16 +205,18 @@
// Otherwise the callback thread will never exit.
stop();
if (mAudioRecordThread != 0) {
- mProxy->interrupt();
mAudioRecordThread->requestExit(); // see comment in AudioRecord.h
+ mProxy->interrupt();
mAudioRecordThread->requestExitAndWait();
mAudioRecordThread.clear();
}
- // No lock here: worst case we remove a NULL callback which will be a nop
+
+ AutoMutex lock(mLock);
if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
// This may not stop all of these device callbacks!
// TODO: Add some sort of protection.
AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
+ mDeviceCallback.clear();
}
}
status_t AudioRecord::set(
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index c7967e5..139d931 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1966,8 +1966,8 @@
return result.value_or(false);
}
-status_t AudioSystem::getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<audio_format_t>* formats) {
+status_t AudioSystem::getHwOffloadFormatsSupportedForBluetoothMedia(
+ audio_devices_t device, std::vector<audio_format_t>* formats) {
if (formats == nullptr) {
return BAD_VALUE;
}
@@ -1977,8 +1977,10 @@
if (aps == 0) return PERMISSION_DENIED;
std::vector<media::audio::common::AudioFormat> formatsAidl;
+ int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
- aps->getHwOffloadEncodingFormatsSupportedForA2DP(&formatsAidl)));
+ aps->getHwOffloadFormatsSupportedForBluetoothMedia(deviceAidl, &formatsAidl)));
*formats = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<audio_format_t>>(formatsAidl,
aidl2legacy_AudioFormat_audio_format_t));
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 54d186e..e96cf87 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -357,12 +357,13 @@
// Otherwise the callback thread will never exit.
stop();
if (mAudioTrackThread != 0) { // not thread safe
- mProxy->interrupt();
mAudioTrackThread->requestExit(); // see comment in AudioTrack.h
+ mProxy->interrupt();
mAudioTrackThread->requestExitAndWait();
mAudioTrackThread.clear();
}
- // No lock here: worst case we remove a NULL callback which will be a nop
+
+ AutoMutex lock(mLock);
if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
// This may not stop all of these device callbacks!
// TODO: Add some sort of protection.
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index 35719be..e3b79b2 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -409,7 +409,7 @@
android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
// it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process
(void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
- 1);
+ INT_MAX);
}
}
@@ -419,7 +419,7 @@
if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) {
android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
(void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
- 1);
+ INT_MAX);
}
}
@@ -490,6 +490,8 @@
status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *requested)
{
struct timespec total; // total elapsed time spent waiting
+ struct timespec before;
+ bool beforeIsValid = false;
total.tv_sec = 0;
total.tv_nsec = 0;
audio_track_cblk_t* cblk = mCblk;
@@ -570,17 +572,38 @@
}
int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
+ if (!beforeIsValid) {
+ clock_gettime(CLOCK_MONOTONIC, &before);
+ beforeIsValid = true;
+ }
errno = 0;
(void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts);
- switch (errno) {
+ status_t error = errno; // clock_gettime can affect errno
+ {
+ struct timespec after;
+ clock_gettime(CLOCK_MONOTONIC, &after);
+ total.tv_sec += after.tv_sec - before.tv_sec;
+ // Use auto instead of long to avoid the google-runtime-int warning.
+ auto deltaNs = after.tv_nsec - before.tv_nsec;
+ if (deltaNs < 0) {
+ deltaNs += 1000000000;
+ total.tv_sec--;
+ }
+ if ((total.tv_nsec += deltaNs) >= 1000000000) {
+ total.tv_nsec -= 1000000000;
+ total.tv_sec++;
+ }
+ before = after;
+ }
+ switch (error) {
case 0: // normal wakeup by server, or by binderDied()
case EWOULDBLOCK: // benign race condition with server
case EINTR: // wait was interrupted by signal or other spurious wakeup
case ETIMEDOUT: // time-out expired
break;
default:
- status = errno;
+ status = error;
ALOGE("%s unexpected error %s", __func__, strerror(status));
goto end;
}
@@ -747,7 +770,7 @@
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
(void) syscall(__NR_futex, &cblk->mFutex,
- mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
+ mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, INT_MAX);
}
}
mFlushed += (newFront - front) & mask;
@@ -917,7 +940,7 @@
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
(void) syscall(__NR_futex, &cblk->mFutex,
- mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
+ mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, INT_MAX);
}
}
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 25fdb49..dcfde8b 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -26,16 +26,6 @@
using base::unexpected;
-ConversionResult<volume_group_t>
-aidl2legacy_int32_t_volume_group_t(int32_t aidl) {
- return convertReinterpret<volume_group_t>(aidl);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_volume_group_t_int32_t(volume_group_t legacy) {
- return convertReinterpret<int32_t>(legacy);
-}
-
ConversionResult<uint32_t>
aidl2legacy_AudioMixType_uint32_t(media::AudioMixType aidl) {
switch (aidl) {
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
index cebd8f0..a0cf95d 100644
--- a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
@@ -36,4 +36,5 @@
VOIP_RX = 13,
INCALL_MUSIC = 14,
GAPLESS_OFFLOAD = 15,
+ SPATIALIZER = 16,
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 5f0a1de..6140a64 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -295,7 +295,7 @@
void getReportedSurroundFormats(inout Int count,
out AudioFormat[] formats);
- AudioFormat[] getHwOffloadEncodingFormatsSupportedForA2DP();
+ AudioFormat[] getHwOffloadFormatsSupportedForBluetoothMedia(int /* audio_devices_t */ device);
void setSurroundFormatEnabled(AudioFormat audioFormat, boolean enabled);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 869bd6e..4d85f7a 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -420,8 +420,8 @@
static status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
- static status_t getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<audio_format_t> *formats);
+ static status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+ audio_devices_t device, std::vector<audio_format_t> *formats);
// numSurroundFormats holds the maximum number of formats and bool value allowed in the array.
// When numSurroundFormats is 0, surroundFormats and surroundFormatsEnabled will not be
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index 873f27a..2cfa438 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -39,11 +39,6 @@
namespace android {
-ConversionResult<volume_group_t>
-aidl2legacy_int32_t_volume_group_t(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
-
ConversionResult<product_strategy_t>
aidl2legacy_int32_t_product_strategy_t(int32_t aidl);
ConversionResult<int32_t>
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 31257d5..3df9378 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -63,6 +63,13 @@
return audioDeviceInAllUsbSet;
}
+const DeviceTypeSet& getAudioDeviceOutAllBleSet() {
+ static const DeviceTypeSet audioDeviceOutAllBleSet = DeviceTypeSet(
+ std::begin(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY),
+ std::end(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY));
+ return audioDeviceOutAllBleSet;
+}
+
bool deviceTypesToString(const DeviceTypeSet &deviceTypes, std::string &str) {
if (deviceTypes.empty()) {
str = "Empty device types";
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 204b365..60b42fb 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -40,6 +40,7 @@
const DeviceTypeSet& getAudioDeviceOutAllUsbSet();
const DeviceTypeSet& getAudioDeviceInAllSet();
const DeviceTypeSet& getAudioDeviceInAllUsbSet();
+const DeviceTypeSet& getAudioDeviceOutAllBleSet();
template<typename T>
static std::vector<T> Intersection(const std::set<T>& a, const std::set<T>& b) {
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index bd24c84..b32c735 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -27,6 +27,7 @@
"libaudiohal@5.0",
"libaudiohal@6.0",
"libaudiohal@7.0",
+ "libaudiohal@7.1",
],
shared_libs: [
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index e420d07..804edcc 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -31,6 +31,7 @@
/** Supported HAL versions, in order of preference.
*/
const char* sAudioHALVersions[] = {
+ "7.1",
"7.0",
"6.0",
"5.0",
@@ -94,7 +95,7 @@
} // namespace
void* createPreferredImpl(const std::string& package, const std::string& interface) {
- for (auto version = detail::sAudioHALVersions; version != nullptr; ++version) {
+ for (auto version = detail::sAudioHALVersions; *version != nullptr; ++version) {
void* rawInterface = nullptr;
if (hasHalService(package, *version, interface)
&& createHalService(*version, interface, &rawInterface)) {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index a2c6e8a..090bd21 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -7,22 +7,33 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-cc_defaults {
- name: "libaudiohal_default",
-
+filegroup {
+ name: "audio_core_hal_client_sources",
srcs: [
"DeviceHalLocal.cpp",
"DevicesFactoryHalHybrid.cpp",
"DevicesFactoryHalLocal.cpp",
- "StreamHalLocal.cpp",
-
- "ConversionHelperHidl.cpp",
"DeviceHalHidl.cpp",
"DevicesFactoryHalHidl.cpp",
+ "StreamHalLocal.cpp",
+ "StreamHalHidl.cpp",
+ ],
+}
+
+filegroup {
+ name: "audio_effect_hal_client_sources",
+ srcs: [
"EffectBufferHalHidl.cpp",
"EffectHalHidl.cpp",
"EffectsFactoryHalHidl.cpp",
- "StreamHalHidl.cpp",
+ ],
+}
+
+cc_defaults {
+ name: "libaudiohal_default",
+
+ srcs: [
+ "ConversionHelperHidl.cpp",
],
cflags: [
@@ -65,6 +76,10 @@
cc_library_shared {
name: "libaudiohal@4.0",
defaults: ["libaudiohal_default"],
+ srcs: [
+ ":audio_core_hal_client_sources",
+ ":audio_effect_hal_client_sources",
+ ],
shared_libs: [
"android.hardware.audio.common@4.0",
"android.hardware.audio.common@4.0-util",
@@ -83,6 +98,10 @@
cc_library_shared {
name: "libaudiohal@5.0",
defaults: ["libaudiohal_default"],
+ srcs: [
+ ":audio_core_hal_client_sources",
+ ":audio_effect_hal_client_sources",
+ ],
shared_libs: [
"android.hardware.audio.common@5.0",
"android.hardware.audio.common@5.0-util",
@@ -101,6 +120,10 @@
cc_library_shared {
name: "libaudiohal@6.0",
defaults: ["libaudiohal_default"],
+ srcs: [
+ ":audio_core_hal_client_sources",
+ ":audio_effect_hal_client_sources",
+ ],
shared_libs: [
"android.hardware.audio.common@6.0",
"android.hardware.audio.common@6.0-util",
@@ -119,6 +142,10 @@
cc_library_shared {
name: "libaudiohal@7.0",
defaults: ["libaudiohal_default"],
+ srcs: [
+ ":audio_core_hal_client_sources",
+ ":audio_effect_hal_client_sources",
+ ],
shared_libs: [
"android.hardware.audio.common@7.0",
"android.hardware.audio.common@7.0-util",
@@ -133,3 +160,25 @@
"-include common/all-versions/VersionMacro.h",
]
}
+
+cc_library_shared {
+ name: "libaudiohal@7.1",
+ defaults: ["libaudiohal_default"],
+ srcs: [
+ ":audio_core_hal_client_sources",
+ ],
+ shared_libs: [
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-util",
+ "android.hardware.audio@7.0",
+ "android.hardware.audio@7.1",
+ "android.hardware.audio@7.0-util",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=1",
+ "-DCOMMON_TYPES_MINOR_VERSION=0",
+ "-DCORE_TYPES_MINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ]
+}
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/ConversionHelperHidl.cpp
index 0503698..1d34814 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperHidl.cpp
@@ -24,10 +24,9 @@
#include "ConversionHelperHidl.h"
namespace android {
-namespace CPP_VERSION {
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
// static
status_t ConversionHelperHidl::keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
@@ -129,5 +128,4 @@
ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
}
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index 2909013..9368551 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -17,22 +17,21 @@
#ifndef ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
#define ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
#include <hidl/HidlSupport.h>
#include <system/audio.h>
#include <utils/String8.h>
#include <utils/String16.h>
#include <utils/Vector.h>
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using CoreResult = ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
using ::android::hardware::Return;
using ::android::hardware::hidl_string;
using ::android::hardware::hidl_vec;
namespace android {
-namespace CPP_VERSION {
class ConversionHelperHidl {
protected:
@@ -85,7 +84,6 @@
void emitError(const char* funcName, const char* description);
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index aa94eea..50311e5 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -30,27 +30,40 @@
#include <util/CoreUtils.h>
#include "DeviceHalHidl.h"
-#include "EffectHalHidl.h"
#include "ParameterUtils.h"
#include "StreamHalHidl.h"
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
using ::android::hardware::audio::common::utils::EnumBitfield;
-using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
using ::android::hardware::hidl_string;
using ::android::hardware::hidl_vec;
namespace android {
-namespace CPP_VERSION {
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
-using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
+DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
+ : ConversionHelperHidl("Device"), mDevice(device) {
+}
-DeviceHalHidl::DeviceHalHidl(const sp<IDevice>& device)
- : ConversionHelperHidl("Device"), mDevice(device),
- mPrimaryDevice(IPrimaryDevice::castFrom(device)) {
+DeviceHalHidl::DeviceHalHidl(
+ const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device)
+ : ConversionHelperHidl("Device"),
+#if MAJOR_VERSION <= 6 || (MAJOR_VERSION == 7 && MINOR_VERSION == 0)
+ mDevice(device),
+#endif
+ mPrimaryDevice(device) {
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+ auto getDeviceRet = mPrimaryDevice->getDevice();
+ if (getDeviceRet.isOk()) {
+ mDevice = getDeviceRet;
+ } else {
+ ALOGE("Call to IPrimaryDevice.getDevice has failed: %s",
+ getDeviceRet.description().c_str());
+ }
+#endif
}
DeviceHalHidl::~DeviceHalHidl() {
@@ -209,12 +222,17 @@
return status;
}
Result retval = Result::NOT_INITIALIZED;
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+ Return<void> ret = mDevice->openOutputStream_7_1(
+#else
Return<void> ret = mDevice->openOutputStream(
+#endif
handle, hidlDevice, hidlConfig, hidlFlags,
#if MAJOR_VERSION >= 4
{} /* metadata */,
#endif
- [&](Result r, const sp<IStreamOut>& result, const AudioConfig& suggestedConfig) {
+ [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
+ const AudioConfig& suggestedConfig) {
retval = r;
if (retval == Result::OK) {
*outStream = new StreamOutHalHidl(result);
@@ -282,9 +300,14 @@
sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
}
#endif
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+ Return<void> ret = mDevice->openInputStream_7_1(
+#else
Return<void> ret = mDevice->openInputStream(
+#endif
handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
- [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
+ [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& result,
+ const AudioConfig& suggestedConfig) {
retval = r;
if (retval == Result::OK) {
*inStream = new StreamInHalHidl(result);
@@ -432,8 +455,7 @@
audio_port_handle_t device, sp<EffectHalInterface> effect) {
if (mDevice == 0) return NO_INIT;
return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
- static_cast<AudioPortHandle>(device),
- static_cast<EffectHalHidl*>(effect.get())->effectId()));
+ static_cast<AudioPortHandle>(device), effect->effectId()));
}
#else
status_t DeviceHalHidl::addDeviceEffect(
@@ -447,8 +469,7 @@
audio_port_handle_t device, sp<EffectHalInterface> effect) {
if (mDevice == 0) return NO_INIT;
return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
- static_cast<AudioPortHandle>(device),
- static_cast<EffectHalHidl*>(effect.get())->effectId()));
+ static_cast<AudioPortHandle>(device), effect->effectId()));
}
#else
status_t DeviceHalHidl::removeDeviceEffect(
@@ -480,5 +501,4 @@
return processReturn("dump", ret);
}
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 2694ab3..d2b31d4 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -20,15 +20,11 @@
#include PATH(android/hardware/audio/FILE_VERSION/IDevice.h)
#include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
#include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/EffectHalInterface.h>
#include "ConversionHelperHidl.h"
-using ::android::hardware::audio::CPP_VERSION::IDevice;
-using ::android::hardware::audio::CPP_VERSION::IPrimaryDevice;
-using ::android::hardware::Return;
-
namespace android {
-namespace CPP_VERSION {
class DeviceHalHidl : public DeviceHalInterface, public ConversionHelperHidl
{
@@ -123,11 +119,14 @@
private:
friend class DevicesFactoryHalHidl;
- sp<IDevice> mDevice;
- sp<IPrimaryDevice> mPrimaryDevice; // Null if it's not a primary device.
+ sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
+ sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice> mPrimaryDevice;
+ // Null if it's not a primary device.
// Can not be constructed directly by clients.
- explicit DeviceHalHidl(const sp<IDevice>& device);
+ explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
+ explicit DeviceHalHidl(
+ const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device);
// The destructor automatically closes the device.
virtual ~DeviceHalHidl();
@@ -135,7 +134,6 @@
template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_DEVICE_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index e0304af..1384c1e 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -23,7 +23,6 @@
#include "StreamHalLocal.h"
namespace android {
-namespace CPP_VERSION {
DeviceHalLocal::DeviceHalLocal(audio_hw_device_t *dev)
: mDev(dev) {
@@ -245,5 +244,4 @@
mDev->close_input_stream(mDev, stream_in);
}
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index 2fde936..a75ce2e 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -21,7 +21,6 @@
#include <media/audiohal/DeviceHalInterface.h>
namespace android {
-namespace CPP_VERSION {
class DeviceHalLocal : public DeviceHalInterface
{
@@ -131,7 +130,6 @@
virtual ~DeviceHalLocal();
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 1c0eacb..f475729 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -31,14 +31,13 @@
#include "DevicesFactoryHalHidl.h"
using ::android::hardware::audio::CPP_VERSION::IDevice;
-using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
using ::android::hardware::Return;
using ::android::hardware::Void;
using ::android::hidl::manager::V1_0::IServiceManager;
using ::android::hidl::manager::V1_0::IServiceNotification;
namespace android {
-namespace CPP_VERSION {
class ServiceNotificationListener : public IServiceNotification {
public:
@@ -115,14 +114,37 @@
if (status != OK) return status;
Result retval = Result::NOT_INITIALIZED;
for (const auto& factory : factories) {
- Return<void> ret = factory->openDevice(
- hidlId,
- [&](Result r, const sp<IDevice>& result) {
- retval = r;
- if (retval == Result::OK) {
- *device = new DeviceHalHidl(result);
- }
- });
+ Return<void> ret;
+ if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_PRIMARY) == 0) {
+ // In V7.1 it's not possible to cast IDevice back to IPrimaryDevice,
+ // thus openPrimaryDevice must be used.
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+ ret = factory->openPrimaryDevice_7_1(
+#else
+ ret = factory->openPrimaryDevice(
+#endif
+ [&](Result r,
+ const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& result) {
+ retval = r;
+ if (retval == Result::OK) {
+ *device = new DeviceHalHidl(result);
+ }
+ });
+ } else {
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+ ret = factory->openDevice_7_1(
+#else
+ ret = factory->openDevice(
+#endif
+ hidlId,
+ [&](Result r,
+ const sp<::android::hardware::audio::CPP_VERSION::IDevice>& result) {
+ retval = r;
+ if (retval == Result::OK) {
+ *device = new DeviceHalHidl(result);
+ }
+ });
+ }
if (!ret.isOk()) return FAILED_TRANSACTION;
switch (retval) {
// Device was found and was initialized successfully.
@@ -178,7 +200,8 @@
return NO_ERROR;
}
-void DevicesFactoryHalHidl::addDeviceFactory(sp<IDevicesFactory> factory, bool needToNotify) {
+void DevicesFactoryHalHidl::addDeviceFactory(
+ sp<::android::hardware::audio::CPP_VERSION::IDevicesFactory> factory, bool needToNotify) {
// It is assumed that the DevicesFactoryHalInterface instance is owned
// by AudioFlinger and thus have the same lifespan.
factory->linkToDeath(HalDeathHandler::getInstance(), 0 /*cookie*/);
@@ -198,10 +221,10 @@
}
}
-std::vector<sp<IDevicesFactory>> DevicesFactoryHalHidl::copyDeviceFactories() {
+std::vector<sp<::android::hardware::audio::CPP_VERSION::IDevicesFactory>>
+ DevicesFactoryHalHidl::copyDeviceFactories() {
std::lock_guard<std::mutex> lock(mLock);
return mDeviceFactories;
}
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 6f84efe..fd8dbc4 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -30,7 +30,6 @@
using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
namespace android {
-namespace CPP_VERSION {
class DevicesFactoryHalHidl : public DevicesFactoryHalInterface
{
@@ -59,7 +58,6 @@
virtual ~DevicesFactoryHalHidl() = default;
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
index cde8d85..d684c27 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
@@ -22,7 +22,6 @@
#include "DevicesFactoryHalLocal.h"
namespace android {
-namespace CPP_VERSION {
DevicesFactoryHalHybrid::DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory)
: mLocalFactory(new DevicesFactoryHalLocal()),
@@ -51,11 +50,9 @@
return INVALID_OPERATION;
}
-} // namespace CPP_VERSION
-
extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
- return service ? new CPP_VERSION::DevicesFactoryHalHybrid(service) : nullptr;
+ return service ? new DevicesFactoryHalHybrid(service) : nullptr;
}
} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
index 568a1fb..6b2b845 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
@@ -25,7 +25,6 @@
using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
namespace android {
-namespace CPP_VERSION {
class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
{
@@ -45,7 +44,6 @@
sp<DevicesFactoryHalInterface> mHidlFactory;
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
index af67ff5..13a9acd 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
@@ -26,7 +26,6 @@
#include "DevicesFactoryHalLocal.h"
namespace android {
-namespace CPP_VERSION {
static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
{
@@ -67,5 +66,4 @@
return rc;
}
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.h b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
index 32bf362..eacf109 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
@@ -24,7 +24,6 @@
#include "DeviceHalLocal.h"
namespace android {
-namespace CPP_VERSION {
class DevicesFactoryHalLocal : public DevicesFactoryHalInterface
{
@@ -50,7 +49,6 @@
virtual ~DevicesFactoryHalLocal() {}
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.cpp b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
index 5367972..65297af 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
@@ -31,7 +31,6 @@
namespace android {
namespace effect {
-namespace CPP_VERSION {
// static
uint64_t EffectBufferHalHidl::makeUniqueId() {
@@ -144,5 +143,4 @@
}
} // namespace effect
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.h b/media/libaudiohal/impl/EffectBufferHalHidl.h
index 4826813..a9df68b 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.h
@@ -28,7 +28,6 @@
namespace android {
namespace effect {
-namespace CPP_VERSION {
using namespace ::android::hardware::audio::effect::CPP_VERSION;
@@ -74,7 +73,6 @@
status_t init();
};
-} // namespace CPP_VERSION
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 51ad146..1bb1e5f 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -36,7 +36,6 @@
namespace android {
namespace effect {
-namespace CPP_VERSION {
using namespace ::android::hardware::audio::common::CPP_VERSION;
using namespace ::android::hardware::audio::effect::CPP_VERSION;
@@ -310,6 +309,5 @@
return result;
}
-} // namespace CPP_VERSION
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 8e46638..07745db 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -28,7 +28,6 @@
namespace android {
namespace effect {
-namespace CPP_VERSION {
using namespace ::android::hardware::audio::effect::CPP_VERSION;
@@ -63,7 +62,7 @@
virtual status_t dump(int fd);
- uint64_t effectId() const { return mEffectId; }
+ virtual uint64_t effectId() const { return mEffectId; }
private:
friend class EffectsFactoryHalHidl;
@@ -96,7 +95,6 @@
status_t setProcessBuffers();
};
-} // namespace CPP_VERSION
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index ffe0d72..90954b2 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -33,7 +33,6 @@
namespace android {
namespace effect {
-namespace CPP_VERSION {
using namespace ::android::hardware::audio::common::CPP_VERSION;
using namespace ::android::hardware::audio::effect::CPP_VERSION;
@@ -204,12 +203,11 @@
return EffectBufferHalHidl::mirror(external, size, buffer);
}
-} // namespace CPP_VERSION
} // namespace effect
extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
auto service = hardware::audio::effect::CPP_VERSION::IEffectsFactory::getService();
- return service ? new effect::CPP_VERSION::EffectsFactoryHalHidl(service) : nullptr;
+ return service ? new effect::EffectsFactoryHalHidl(service) : nullptr;
}
} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index ff26d9f..7491133 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -24,10 +24,9 @@
namespace android {
namespace effect {
-namespace CPP_VERSION {
using ::android::hardware::hidl_vec;
-using ::android::CPP_VERSION::ConversionHelperHidl;
+using ::android::ConversionHelperHidl;
using namespace ::android::hardware::audio::effect::CPP_VERSION;
class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public ConversionHelperHidl
@@ -70,7 +69,6 @@
status_t queryAllDescriptors();
};
-} // namespace CPP_VERSION
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/ParameterUtils.h b/media/libaudiohal/impl/ParameterUtils.h
index 9cab72e..b5dcb9d 100644
--- a/media/libaudiohal/impl/ParameterUtils.h
+++ b/media/libaudiohal/impl/ParameterUtils.h
@@ -16,17 +16,16 @@
#pragma once
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
#include <hidl/HidlSupport.h>
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
using ::android::hardware::Return;
using ::android::hardware::hidl_vec;
using ::android::hardware::hidl_string;
namespace android {
-namespace CPP_VERSION {
namespace utils {
#if MAJOR_VERSION == 2
@@ -56,5 +55,4 @@
#endif
} // namespace utils
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index e63aded..b2f1cf3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -23,29 +23,26 @@
#include <mediautils/SchedulingPolicyService.h>
#include <utils/Log.h>
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutCallback.h)
#include <HidlUtils.h>
#include <util/CoreUtils.h>
#include "DeviceHalHidl.h"
-#include "EffectHalHidl.h"
#include "ParameterUtils.h"
#include "StreamHalHidl.h"
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
-using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
+using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
using ::android::hardware::MQDescriptorSync;
using ::android::hardware::Return;
using ::android::hardware::Void;
namespace android {
-namespace CPP_VERSION {
-using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
using ReadCommand = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadCommand;
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
StreamHalHidl::StreamHalHidl(IStream *stream)
: ConversionHelperHidl("Stream"),
@@ -137,14 +134,12 @@
status_t StreamHalHidl::addEffect(sp<EffectHalInterface> effect) {
if (!mStream) return NO_INIT;
- return processReturn("addEffect", mStream->addEffect(
- static_cast<EffectHalHidl*>(effect.get())->effectId()));
+ return processReturn("addEffect", mStream->addEffect(effect->effectId()));
}
status_t StreamHalHidl::removeEffect(sp<EffectHalInterface> effect) {
if (!mStream) return NO_INIT;
- return processReturn("removeEffect", mStream->removeEffect(
- static_cast<EffectHalHidl*>(effect.get())->effectId()));
+ return processReturn("removeEffect", mStream->removeEffect(effect->effectId()));
}
status_t StreamHalHidl::standby() {
@@ -328,7 +323,8 @@
} // namespace
-StreamOutHalHidl::StreamOutHalHidl(const sp<IStreamOut>& stream)
+StreamOutHalHidl::StreamOutHalHidl(
+ const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream)
: StreamHalHidl(stream.get()), mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
}
@@ -644,7 +640,11 @@
#elif MAJOR_VERSION >= 4
status_t StreamOutHalHidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
- CPP_VERSION::SourceMetadata hidlMetadata;
+#if MAJOR_VERSION == 4
+ ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+ ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
if (status_t status = CoreUtils::sourceMetadataFromHalV7(
sourceMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
status != OK) {
@@ -686,6 +686,7 @@
// Codec format callback is supported starting from audio HAL V6.0
return INVALID_OPERATION;
}
+
#else
status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
@@ -755,7 +756,7 @@
static_cast<TimestretchFallbackMode>(playbackRate.mFallbackMode)}));
}
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutEventCallback.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutEventCallback.h)
namespace {
@@ -791,6 +792,84 @@
}
#endif
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+using hardware::audio::V7_1::LatencyMode;
+
+status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode) {
+ if (mStream == 0) return NO_INIT;
+ return processReturn(
+ "setLatencyMode", mStream->setLatencyMode(static_cast<LatencyMode>(mode)));
+};
+
+status_t StreamOutHalHidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
+ if (!mStream) return NO_INIT;
+ Result retval;
+ Return<void> ret = mStream->getRecommendedLatencyModes(
+ [&](Result r, hidl_vec<LatencyMode> hidlModes) {
+ retval = r;
+ for (size_t i = 0; i < hidlModes.size(); i++) {
+ modes->push_back(static_cast<audio_latency_mode_t>(hidlModes[i]));
+ }
+ });
+ return processReturn("getRecommendedLatencyModes", ret, retval);
+};
+
+#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutLatencyModeCallback.h)
+
+using hardware::audio::V7_1::IStreamOutLatencyModeCallback;
+
+namespace {
+struct StreamOutLatencyModeCallback : public IStreamOutLatencyModeCallback {
+ StreamOutLatencyModeCallback(const wp<StreamOutHalHidl>& stream) : mStream(stream) {}
+
+ // IStreamOutLatencyModeCallback implementation
+ Return<void> onRecommendedLatencyModeChanged(const hidl_vec<LatencyMode>& hidlModes) override {
+ sp<StreamOutHalHidl> stream = mStream.promote();
+ if (stream != nullptr) {
+ std::vector<audio_latency_mode_t> modes;
+ for (size_t i = 0; i < hidlModes.size(); i++) {
+ modes.push_back(static_cast<audio_latency_mode_t>(hidlModes[i]));
+ }
+ stream->onRecommendedLatencyModeChanged(modes);
+ }
+ return Void();
+ }
+
+ private:
+ wp<StreamOutHalHidl> mStream;
+};
+} // namespace
+
+status_t StreamOutHalHidl::setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
+
+ if (mStream == nullptr) return NO_INIT;
+ mLatencyModeCallback = callback;
+ status_t status = processReturn(
+ "setLatencyModeCallback",
+ mStream->setLatencyModeCallback(
+ callback.get() == nullptr ? nullptr : new StreamOutLatencyModeCallback(this)));
+ return status;
+};
+
+#else
+
+status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode __unused) {
+ return INVALID_OPERATION;
+};
+
+status_t StreamOutHalHidl::getRecommendedLatencyModes(
+ std::vector<audio_latency_mode_t> *modes __unused) {
+ return INVALID_OPERATION;
+};
+
+status_t StreamOutHalHidl::setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) {
+ return INVALID_OPERATION;
+};
+
+#endif
+
void StreamOutHalHidl::onWriteReady() {
sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
if (callback == 0) return;
@@ -819,8 +898,16 @@
callback->onCodecFormatChanged(metadataBs);
}
+void StreamOutHalHidl::onRecommendedLatencyModeChanged(
+ const std::vector<audio_latency_mode_t>& modes) {
+ sp<StreamOutHalInterfaceLatencyModeCallback> callback = mLatencyModeCallback.load().promote();
+ if (callback == nullptr) return;
+ callback->onRecommendedLatencyModeChanged(modes);
+}
-StreamInHalHidl::StreamInHalHidl(const sp<IStreamIn>& stream)
+
+StreamInHalHidl::StreamInHalHidl(
+ const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& stream)
: StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
}
@@ -1033,7 +1120,11 @@
status_t StreamInHalHidl::updateSinkMetadata(const
StreamInHalInterface::SinkMetadata& sinkMetadata) {
- CPP_VERSION::SinkMetadata hidlMetadata;
+#if MAJOR_VERSION == 4
+ ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
+#else
+ ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
+#endif
if (status_t status = CoreUtils::sinkMetadataFromHalV7(
sinkMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
status != OK) {
@@ -1068,5 +1159,4 @@
}
#endif
-} // namespace CPP_VERSION
} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 6f5dd04..03342ef 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -19,20 +19,19 @@
#include <atomic>
-#include PATH(android/hardware/audio/FILE_VERSION/IStream.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
#include PATH(android/hardware/audio/FILE_VERSION/IStreamIn.h)
#include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
#include <fmq/EventFlag.h>
#include <fmq/MessageQueue.h>
+#include <media/audiohal/EffectHalInterface.h>
#include <media/audiohal/StreamHalInterface.h>
#include <mediautils/Synchronization.h>
#include "ConversionHelperHidl.h"
#include "StreamPowerLog.h"
-using ::android::hardware::audio::CPP_VERSION::IStream;
-using ::android::hardware::audio::CPP_VERSION::IStreamIn;
-using ::android::hardware::audio::CPP_VERSION::IStreamOut;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStream;
using ::android::hardware::EventFlag;
using ::android::hardware::MessageQueue;
using ::android::hardware::Return;
@@ -42,7 +41,6 @@
using WriteStatus = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteStatus;
namespace android {
-namespace CPP_VERSION {
class DeviceHalHidl;
@@ -191,6 +189,13 @@
// Methods used by StreamCodecFormatCallback (HIDL).
void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
+ status_t setLatencyMode(audio_latency_mode_t mode) override;
+ status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
+ status_t setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) override;
+
+ void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
+
private:
friend class DeviceHalHidl;
typedef MessageQueue<WriteCommand, hardware::kSynchronizedReadWrite> CommandMQ;
@@ -199,7 +204,9 @@
mediautils::atomic_wp<StreamOutHalInterfaceCallback> mCallback;
mediautils::atomic_wp<StreamOutHalInterfaceEventCallback> mEventCallback;
- const sp<IStreamOut> mStream;
+ mediautils::atomic_wp<StreamOutHalInterfaceLatencyModeCallback> mLatencyModeCallback;
+
+ const sp<::android::hardware::audio::CPP_VERSION::IStreamOut> mStream;
std::unique_ptr<CommandMQ> mCommandMQ;
std::unique_ptr<DataMQ> mDataMQ;
std::unique_ptr<StatusMQ> mStatusMQ;
@@ -207,7 +214,7 @@
EventFlag* mEfGroup;
// Can not be constructed directly by clients.
- StreamOutHalHidl(const sp<IStreamOut>& stream);
+ StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
virtual ~StreamOutHalHidl();
@@ -255,7 +262,7 @@
typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
- const sp<IStreamIn> mStream;
+ const sp<::android::hardware::audio::CPP_VERSION::IStreamIn> mStream;
std::unique_ptr<CommandMQ> mCommandMQ;
std::unique_ptr<DataMQ> mDataMQ;
std::unique_ptr<StatusMQ> mStatusMQ;
@@ -263,7 +270,7 @@
EventFlag* mEfGroup;
// Can not be constructed directly by clients.
- StreamInHalHidl(const sp<IStreamIn>& stream);
+ StreamInHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& stream);
virtual ~StreamInHalHidl();
@@ -273,7 +280,6 @@
status_t prepareForReading(size_t bufferSize);
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_STREAM_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index 11fac61..477f510 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -27,7 +27,6 @@
#include "StreamHalLocal.h"
namespace android {
-namespace CPP_VERSION {
StreamHalLocal::StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device)
: mDevice(device),
@@ -518,7 +517,4 @@
}
#endif
-} // namespace CPP_VERSION
} // namespace android
-
-
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
index 493c521..770137f 100644
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ b/media/libaudiohal/impl/StreamHalLocal.h
@@ -21,7 +21,6 @@
#include "StreamPowerLog.h"
namespace android {
-namespace CPP_VERSION {
class DeviceHalLocal;
@@ -169,6 +168,18 @@
status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
+ status_t setLatencyMode(audio_latency_mode_t mode __unused) override {
+ return INVALID_OPERATION;
+ }
+ status_t getRecommendedLatencyModes(
+ std::vector<audio_latency_mode_t> *modes __unused) override {
+ return INVALID_OPERATION;
+ }
+ status_t setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) override {
+ return INVALID_OPERATION;
+ }
+
private:
audio_stream_out_t *mStream;
wp<StreamOutHalInterfaceCallback> mCallback;
@@ -246,7 +257,6 @@
void doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata);
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index f6a554b..c08ee47 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -24,7 +24,6 @@
#include <system/audio.h>
namespace android {
-namespace CPP_VERSION {
class StreamPowerLog {
public:
@@ -99,7 +98,6 @@
size_t mFrameSize;
};
-} // namespace CPP_VERSION
} // namespace android
#endif // ANDROID_HARDWARE_STREAM_POWER_LOG_H
diff --git a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
index 03165bd..2969c92 100644
--- a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
@@ -57,6 +57,9 @@
virtual status_t dump(int fd) = 0;
+ // Unique effect ID to use with the core HAL.
+ virtual uint64_t effectId() const = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
EffectHalInterface() {}
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 2b5b2db..e12fe77 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -117,6 +117,18 @@
virtual ~StreamOutHalInterfaceEventCallback() {}
};
+class StreamOutHalInterfaceLatencyModeCallback : public virtual RefBase {
+public:
+ /**
+ * Called with the new list of supported latency modes when a change occurs.
+ */
+ virtual void onRecommendedLatencyModeChanged(std::vector<audio_latency_mode_t> modes) = 0;
+
+protected:
+ StreamOutHalInterfaceLatencyModeCallback() {}
+ virtual ~StreamOutHalInterfaceLatencyModeCallback() {}
+};
+
class StreamOutHalInterface : public virtual StreamHalInterface {
public:
// Return the audio hardware driver estimated latency in milliseconds.
@@ -194,6 +206,42 @@
virtual status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) = 0;
+ /**
+ * Indicates the requested latency mode for this output stream.
+ *
+ * The requested mode can be one of the modes returned by
+ * getRecommendedLatencyModes() API.
+ *
+ * @param mode the requested latency mode.
+ * @return operation completion status.
+ */
+ virtual status_t setLatencyMode(audio_latency_mode_t mode) = 0;
+
+ /**
+ * Indicates which latency modes are currently supported on this output stream.
+ * If the transport protocol (e.g Bluetooth A2DP) used by this output stream to reach
+ * the output device supports variable latency modes, the HAL indicates which
+ * modes are currently supported.
+ * The framework can then call setLatencyMode() with one of the supported modes to select
+ * the desired operation mode.
+ *
+ * @param modes currrently supported latency modes.
+ * @return operation completion status.
+ */
+ virtual status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) = 0;
+
+ /**
+ * Set the callback interface for notifying changes in supported latency modes.
+ *
+ * Calling this method with a null pointer will result in releasing
+ * the callback.
+ *
+ * @param callback the registered callback or null to unregister.
+ * @return operation completion status.
+ */
+ virtual status_t setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) = 0;
+
protected:
virtual ~StreamOutHalInterface() {}
};
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 1eadd27..ccef5ab 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -135,7 +135,6 @@
LVM_UINT32 fs =
(LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
LVM_UINT32 fc; /* Filter centre frequency */
- LVM_INT16 QFactor; /* Filter Q factor */
pInstance->NBands = pParams->NBands;
@@ -144,7 +143,6 @@
* Get the filter settings
*/
fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
- QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
pInstance->pBiquadType[i] = LVEQNB_SinglePrecision_Float; /* Default to single precision */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
index 8e63502..ffed6d4 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
@@ -421,7 +421,6 @@
* Intermediate variables and temporary values
*/
LVM_FLOAT T0;
- LVM_FLOAT D;
LVM_FLOAT A0;
LVM_FLOAT B1;
LVM_FLOAT B2;
@@ -444,9 +443,6 @@
* Calculating the intermediate values
*/
T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
- D = 3200; /* Floating point value 1.000000 (1*100*2^5) */
- /* Force D = 1 : the function was originally used for a peaking filter.
- The D parameter do not exist for a BandPass filter coefficients */
/*
* Calculate the B2 coefficient
@@ -535,7 +531,6 @@
* Intermediate variables and temporary values
*/
LVM_FLOAT T0;
- LVM_FLOAT D;
LVM_FLOAT A0;
LVM_FLOAT B1;
LVM_FLOAT B2;
@@ -558,9 +553,6 @@
* Calculating the intermediate values
*/
T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
- D = 3200; /* Floating point value 1.000000 (1*100*2^5) */
- /* Force D = 1 : the function was originally used for a peaking filter.
- The D parameter do not exist for a BandPass filter coefficients */
/*
* Calculate the B2 coefficient
diff --git a/media/libeffects/preprocessing/.clang-format b/media/libeffects/preprocessing/.clang-format
deleted file mode 120000
index f1b4f69..0000000
--- a/media/libeffects/preprocessing/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/libeffects/preprocessing/tests/correlation.cpp b/media/libeffects/preprocessing/tests/correlation.cpp
index eb56fc3..0853673 100644
--- a/media/libeffects/preprocessing/tests/correlation.cpp
+++ b/media/libeffects/preprocessing/tests/correlation.cpp
@@ -36,7 +36,7 @@
const int16_t* sigY, int len,
int16_t enableCrossCorr) {
float maxCorrVal = 0.f, prevCorrVal = 0.f;
- int delay = 0, peakIndex = 0, flag = 0;
+ int peakIndex = 0, flag = 0;
int loopLim = (1 == enableCrossCorr) ? len : kMinLoopLimitValue;
std::vector<int> peakIndexVect(kNumPeaks, 0);
std::vector<float> peakValueVect(kNumPeaks, 0.f);
@@ -47,7 +47,6 @@
}
corrVal /= len - i;
if (corrVal > maxCorrVal) {
- delay = i;
maxCorrVal = corrVal;
}
// Correlation peaks are expected to be observed at equal intervals. The interval length is
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 4a96e7b..50f1bf2 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -15,6 +15,7 @@
*/
//#define LOG_NDEBUG 0
+#include "include/HeifDecoderAPI.h"
#define LOG_TAG "HeifDecoderImpl"
#include "HeifDecoderImpl.h"
@@ -471,7 +472,7 @@
}
bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
- if (heifColor == mOutputColor) {
+ if (heifColor == (HeifColorFormat)mOutputColor) {
return true;
}
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index e98d7d8..9c1b563 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -378,12 +378,12 @@
],
static_libs: [
- "resourcemanager_aidl_interface-ndk_platform",
+ "resourcemanager_aidl_interface-ndk",
"framework-permission-aidl-cpp",
],
export_static_lib_headers: [
- "resourcemanager_aidl_interface-ndk_platform",
+ "resourcemanager_aidl_interface-ndk",
"framework-permission-aidl-cpp",
],
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1c9b9e4..5215c1b 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -949,6 +949,9 @@
mVideoWidth = ext1;
mVideoHeight = ext2;
break;
+ case MEDIA_STARTED:
+ ALOGV("Received media started message");
+ break;
case MEDIA_NOTIFY_TIME:
ALOGV("Received notify time message");
break;
diff --git a/media/libmedia/xsd/vts/OWNERS b/media/libmedia/xsd/vts/OWNERS
new file mode 100644
index 0000000..9af2eba
--- /dev/null
+++ b/media/libmedia/xsd/vts/OWNERS
@@ -0,0 +1,2 @@
+# Bug component: 151862
+sundongahn@google.com
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 9b54199..a433fc6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -20,7 +20,7 @@
},
apex_available: [
"//apex_available:platform",
- "com.android.bluetooth.updatable",
+ "com.android.bluetooth",
"com.android.media",
"com.android.media.swcodec",
],
diff --git a/media/libmediahelper/AudioValidator.cpp b/media/libmediahelper/AudioValidator.cpp
index 7eddbe1..5a0d517 100644
--- a/media/libmediahelper/AudioValidator.cpp
+++ b/media/libmediahelper/AudioValidator.cpp
@@ -47,8 +47,7 @@
const effect_descriptor_t& desc, std::string_view bugNumber)
{
status_t status = NO_ERROR;
- if (checkStringOverflow(desc.name)
- | /* always */ checkStringOverflow(desc.implementor)) {
+ if (checkStringOverflow(desc.name) || checkStringOverflow(desc.implementor)) {
status = BAD_VALUE;
}
return safetyNetLog(status, bugNumber);
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index f55678d..c416a51 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -65,6 +65,7 @@
"libstagefright_foundation",
"libstagefright_httplive",
"libutils",
+ "packagemanager_aidl-cpp",
],
header_libs: [
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 2aabd53..8c86e16 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -225,10 +225,26 @@
"media.stagefright.thumbnail.prefer_hw_codecs", false);
uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
Vector<AString> matchingCodecs;
+ sp<AMessage> format = new AMessage;
+ status_t err = convertMetaDataToMessage(trackMeta, &format);
+ if (err != OK) {
+ format = NULL;
+ }
+
+ // If decoding thumbnail check decoder supports thumbnail dimensions instead
+ int32_t thumbHeight, thumbWidth;
+ if (thumbnail && format != NULL
+ && trackMeta->findInt32(kKeyThumbnailHeight, &thumbHeight)
+ && trackMeta->findInt32(kKeyThumbnailWidth, &thumbWidth)) {
+ format->setInt32("height", thumbHeight);
+ format->setInt32("width", thumbWidth);
+ }
+
MediaCodecList::findMatchingCodecs(
mime,
false, /* encoder */
flags,
+ format,
&matchingCodecs);
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
@@ -348,11 +364,18 @@
bool preferhw = property_get_bool(
"media.stagefright.thumbnail.prefer_hw_codecs", false);
uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
+ sp<AMessage> format = new AMessage;
+ status_t err = convertMetaDataToMessage(trackMeta, &format);
+ if (err != OK) {
+ format = NULL;
+ }
+
Vector<AString> matchingCodecs;
MediaCodecList::findMatchingCodecs(
mime,
false, /* encoder */
flags,
+ format,
&matchingCodecs);
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index bffd7b3..6347b7a 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -16,6 +16,8 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "StagefrightRecorder"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Trace.h>
#include <inttypes.h>
// TODO/workaround: including base logging now as it conflicts with ADebug.h
// and it must be included first.
@@ -1856,6 +1858,7 @@
// Set up the appropriate MediaSource depending on the chosen option
status_t StagefrightRecorder::setupMediaSource(
sp<MediaSource> *mediaSource) {
+ ATRACE_CALL();
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource == VIDEO_SOURCE_CAMERA) {
sp<CameraSource> cameraSource;
@@ -1936,6 +1939,7 @@
status_t StagefrightRecorder::setupVideoEncoder(
const sp<MediaSource> &cameraSource,
sp<MediaCodecSource> *source) {
+ ATRACE_CALL();
source->clear();
sp<AMessage> format = new AMessage();
@@ -2114,6 +2118,7 @@
}
status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+ ATRACE_CALL();
status_t status = BAD_VALUE;
if (OK != (status = checkAudioEncoderCapabilities())) {
return status;
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
index 98626fd..99202b8 100644
--- a/media/libmediaplayerservice/tests/Android.bp
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -30,7 +30,7 @@
],
static_libs: [
- "resourcemanager_aidl_interface-ndk_platform",
+ "resourcemanager_aidl_interface-ndk",
],
include_dirs: [
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 042850c..937650f 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -106,8 +106,8 @@
export_include_dirs: ["include"],
static_libs: [
- "mediatranscoding_aidl_interface-ndk_platform",
- "resourceobserver_aidl_interface-V1-ndk_platform",
+ "mediatranscoding_aidl_interface-ndk",
+ "resourceobserver_aidl_interface-V1-ndk",
"libstatslog_media",
],
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
index 9311e2e..ea63da8 100644
--- a/media/libmediatranscoding/include/media/ControllerClientInterface.h
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -66,7 +66,7 @@
* Returns false if the session doesn't exist, or the client is already requesting the
* session. Returns true otherwise.
*/
- virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid);
+ virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) = 0;
/**
* Retrieves the (unsorted) list of all clients requesting the session identified by
@@ -81,7 +81,7 @@
* Returns false if the session doesn't exist. Returns true otherwise.
*/
virtual bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
- std::vector<int32_t>* out_clientUids);
+ std::vector<int32_t>* out_clientUids) = 0;
protected:
virtual ~ControllerClientInterface() = default;
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 603611a..7a6980f 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -31,7 +31,7 @@
],
static_libs: [
- "mediatranscoding_aidl_interface-ndk_platform",
+ "mediatranscoding_aidl_interface-ndk",
"libmediatranscoding",
],
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2c8d8de..7917395 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -3307,10 +3307,12 @@
if (err != OK) {
ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
sidebandHandle, err);
- return err;
}
- return OK;
+ native_handle_close(sidebandHandle);
+ native_handle_delete(sidebandHandle);
+
+ return err;
}
status_t ACodec::setVideoPortFormatType(
@@ -5434,6 +5436,7 @@
notify->setInt32("channel-count", params.nChannels);
notify->setInt32("sample-rate", params.nSampleRate);
notify->setInt32("bitrate", params.nBitRate);
+ notify->setInt32("aac-profile", params.eAACProfile);
break;
}
@@ -9208,4 +9211,19 @@
return OK;
}
+status_t ACodec::querySupportedParameters(std::vector<std::string> *names) {
+ if (!names) {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t ACodec::subscribeToParameters([[maybe_unused]] const std::vector<std::string> &names) {
+ return OK;
+}
+
+status_t ACodec::unsubscribeFromParameters([[maybe_unused]] const std::vector<std::string> &names) {
+ return OK;
+}
+
} // namespace android
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a052a70..e47e7ff 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -109,6 +109,7 @@
srcs: [
"CodecBase.cpp",
+ "DataConverter.cpp",
"FrameRenderTracker.cpp",
"MediaCodecListWriter.cpp",
"SkipCutBuffer.cpp",
@@ -125,6 +126,7 @@
],
shared_libs: [
+ "libaudioutils",
"libgui",
"libhidlallocatorutils",
"liblog",
@@ -266,7 +268,6 @@
"CallbackMediaSource.cpp",
"CameraSource.cpp",
"CameraSourceTimeLapse.cpp",
- "DataConverter.cpp",
"FrameDecoder.cpp",
"HevcUtils.cpp",
"InterfaceUtils.cpp",
@@ -340,6 +341,7 @@
"android.hardware.media.omx@1.0",
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
+ "packagemanager_aidl-cpp",
],
static_libs: [
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index a241e29..5da32c9 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -35,6 +35,7 @@
#include <media/stagefright/FrameCaptureProcessor.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
@@ -192,6 +193,13 @@
*dstBpp = 4;
return true;
}
+ case HAL_PIXEL_FORMAT_RGBA_1010102:
+ {
+ *dstFormat = (OMX_COLOR_FORMATTYPE)COLOR_Format32bitABGR2101010;
+ *captureFormat = ui::PixelFormat::RGBA_1010102;
+ *dstBpp = 4;
+ return true;
+ }
default:
{
ALOGE("Unsupported color format: %d", colorFormat);
@@ -262,13 +270,10 @@
}
bool isHDR(const sp<AMessage> &format) {
- uint32_t standard, range, transfer;
+ uint32_t standard, transfer;
if (!format->findInt32("color-standard", (int32_t*)&standard)) {
standard = 0;
}
- if (!format->findInt32("color-range", (int32_t*)&range)) {
- range = 0;
- }
if (!format->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
@@ -526,8 +531,12 @@
return NULL;
}
- // TODO: Use Flexible color instead
- videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ if (dstFormat() == COLOR_Format32bitABGR2101010) {
+ videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
+ } else {
+ // TODO: Use Flexible color instead
+ videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ }
// For the thumbnail extraction case, try to allocate single buffer in both
// input and output ports, if seeking to a sync frame. NOTE: This request may
@@ -635,6 +644,11 @@
crop_bottom = height - 1;
}
+ int32_t slice_height;
+ if (outputFormat->findInt32("slice-height", &slice_height) && slice_height > 0) {
+ height = slice_height;
+ }
+
if (mFrame == NULL) {
sp<IMemory> frameMem = allocVideoFrame(
trackMeta(),
@@ -834,8 +848,12 @@
return NULL;
}
- // TODO: Use Flexible color instead
- videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ if (dstFormat() == COLOR_Format32bitABGR2101010) {
+ videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
+ } else {
+ // TODO: Use Flexible color instead
+ videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ }
if ((mGridRows == 1) && (mGridCols == 1)) {
videoFormat->setInt32("android._num-input-buffers", 1);
@@ -941,6 +959,11 @@
crop_bottom = height - 1;
}
+ int32_t slice_height;
+ if (outputFormat->findInt32("slice-height", &slice_height) && slice_height > 0) {
+ height = slice_height;
+ }
+
int32_t crop_width, crop_height;
crop_width = crop_right - crop_left + 1;
crop_height = crop_bottom - crop_top + 1;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7c7fcac..1f18377 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -156,7 +156,7 @@
bool isHeic() const { return mIsHeic; }
bool isAudio() const { return mIsAudio; }
bool isMPEG4() const { return mIsMPEG4; }
- bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
+ bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
void addChunkOffset(off64_t offset);
void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
@@ -164,6 +164,7 @@
TrackId& getTrackId() { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
static const char *getFourCCForMime(const char *mime);
+ const char *getDoviFourCC() const;
const char *getTrackType() const;
void resetInternal();
int64_t trackMetaDataSize();
@@ -316,6 +317,7 @@
volatile bool mStarted;
bool mIsAvc;
bool mIsHevc;
+ bool mIsDovi;
bool mIsAudio;
bool mIsVideo;
bool mIsHeic;
@@ -370,6 +372,10 @@
uint8_t mProfileCompatible;
uint8_t mLevelIdc;
+ uint8_t mDoviProfile;
+ void *mDoviConfigData;
+ size_t mDoviConfigDataSize;
+
void *mCodecSpecificData;
size_t mCodecSpecificDataSize;
bool mGotAllCodecSpecificData;
@@ -422,6 +428,8 @@
status_t parseHEVCCodecSpecificData(
const uint8_t *data, size_t size, HevcParameterSets ¶mSets);
+ status_t makeDoviCodecSpecificData();
+
// Track authoring progress status
void trackProgressStatus(int64_t timeUs, status_t err = OK);
void initTrackingProgressStatus(MetaData *params);
@@ -459,6 +467,7 @@
void writePaspBox();
void writeAvccBox();
void writeHvccBox();
+ void writeDoviConfigBox();
void writeUrlBox();
void writeDrefBox();
void writeDinfBox();
@@ -617,6 +626,17 @@
return OK;
}
+const char *MPEG4Writer::Track::getDoviFourCC() const {
+ if (mDoviProfile == 5) {
+ return "dvh1";
+ } else if (mDoviProfile == 8) {
+ return "hvc1";
+ } else if (mDoviProfile == 9 || mDoviProfile == 32) {
+ return "avc1";
+ }
+ return (const char*)NULL;
+}
+
// static
const char *MPEG4Writer::Track::getFourCCForMime(const char *mime) {
if (mime == NULL) {
@@ -671,7 +691,9 @@
mIsBackgroundMode |= isBackgroundMode;
}
- if (Track::getFourCCForMime(mime) == NULL) {
+ if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+ ALOGV("Add source mime '%s'", mime);
+ } else if (Track::getFourCCForMime(mime) == NULL) {
ALOGE("Unsupported mime '%s'", mime);
return ERROR_UNSUPPORTED;
}
@@ -2150,6 +2172,8 @@
mMinCttsOffsetTimeUs(0),
mMinCttsOffsetTicks(0),
mMaxCttsOffsetTicks(0),
+ mDoviConfigData(NULL),
+ mDoviConfigDataSize(0),
mCodecSpecificData(NULL),
mCodecSpecificDataSize(0),
mGotAllCodecSpecificData(false),
@@ -2176,6 +2200,7 @@
mMeta->findCString(kKeyMIMEType, &mime);
mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+ mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
mIsAudio = !strncasecmp(mime, "audio/", 6);
mIsVideo = !strncasecmp(mime, "video/", 6);
mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
@@ -2610,7 +2635,12 @@
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
mMeta->findData(kKeyHVCC, &type, &data, &size);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
- mMeta->findData(kKeyDVCC, &type, &data, &size);
+ makeDoviCodecSpecificData();
+ if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
+ !mMeta->findData(kKeyHVCC, &type, &data, &size)) {
+ ALOGE("Failed: No HVCC/AVCC for Dolby Vision ..\n");
+ return;
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
@@ -2651,6 +2681,11 @@
free(mCodecSpecificData);
mCodecSpecificData = NULL;
}
+
+ if (mDoviConfigData != NULL) {
+ free(mDoviConfigData);
+ mDoviConfigData = NULL;
+ }
}
void MPEG4Writer::Track::initTrackingProgressStatus(MetaData *params) {
@@ -3329,6 +3364,37 @@
return OK;
}
+status_t MPEG4Writer::Track::makeDoviCodecSpecificData() {
+ uint32_t type;
+ const void *data = NULL;
+ size_t size = 0;
+
+ if (mDoviConfigData != NULL) {
+ ALOGE("Already have Dolby Vision codec specific data");
+ return OK;
+ }
+
+ if (!mMeta->findData(kKeyDVCC, &type, &data, &size)
+ && !mMeta->findData(kKeyDVVC, &type, &data, &size)
+ && !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+ ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
+ return ERROR_MALFORMED;
+ }
+
+ mDoviConfigData = malloc(size);
+ if (mDoviConfigData == NULL) {
+ ALOGE("Failed allocating Dolby Vision config data");
+ return ERROR_MALFORMED;
+ }
+
+ mDoviConfigDataSize = size;
+ memcpy(mDoviConfigData, data, size);
+
+ mDoviProfile = (((char *)data)[2] >> 1) & 0x7f; //getting profile info
+
+ return OK;
+}
+
/*
* Updates the drift time from the audio track so that
* the video track can get the updated drift time information
@@ -3474,6 +3540,23 @@
err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
buffer->range_length());
}
+ if (mIsDovi) {
+ err = makeDoviCodecSpecificData();
+
+ const void *data = NULL;
+ size_t size = 0;
+
+ uint32_t type = 0;
+ if (mDoviProfile == 9){
+ mMeta->findData(kKeyAVCC, &type, &data, &size);
+ } else if (mDoviProfile < 9) {
+ mMeta->findData(kKeyHVCC, &type, &data, &size);
+ }
+
+ if (data != NULL && copyCodecSpecificData((uint8_t *)data, size) == OK) {
+ mGotAllCodecSpecificData = true;
+ }
+ }
}
buffer->release();
@@ -4173,6 +4256,7 @@
!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
if (!mCodecSpecificData ||
mCodecSpecificDataSize <= 0) {
@@ -4297,7 +4381,13 @@
const char *mime;
bool success = mMeta->findCString(kKeyMIMEType, &mime);
CHECK(success);
- const char *fourcc = getFourCCForMime(mime);
+ const char *fourcc;
+ if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+ fourcc = getDoviFourCC();
+ } else {
+ fourcc = getFourCCForMime(mime);
+ }
+
if (fourcc == NULL) {
ALOGE("Unknown mime type '%s'.", mime);
TRESPASS();
@@ -4337,6 +4427,13 @@
writeAvccBox();
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
writeHvccBox();
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
+ if (mDoviProfile <= 8) {
+ writeHvccBox();
+ } else if (mDoviProfile == 9 || mDoviProfile == 32) {
+ writeAvccBox();
+ }
+ writeDoviConfigBox();
}
writePaspBox();
@@ -4349,30 +4446,31 @@
memset(&aspects, 0, sizeof(aspects));
// Color metadata may have changed.
sp<MetaData> meta = mSource->getFormat();
- // TRICKY: using | instead of || because we want to execute all findInt32-s
- if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
- | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
- | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
- | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
- int32_t primaries, transfer, coeffs;
- bool fullRange;
- ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
- asString(aspects.mPrimaries),
- asString(aspects.mTransfer),
- asString(aspects.mMatrixCoeffs),
- asString(aspects.mRange));
- ColorUtils::convertCodecColorAspectsToIsoAspects(
- aspects, &primaries, &transfer, &coeffs, &fullRange);
- mOwner->beginBox("colr");
- mOwner->writeFourcc("nclx");
- mOwner->writeInt16(primaries);
- mOwner->writeInt16(transfer);
- mOwner->writeInt16(coeffs);
- mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
- mOwner->endBox(); // colr
- } else {
+ bool findPrimaries = meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries);
+ bool findTransfer = meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer);
+ bool findMatrix = meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs);
+ bool findRange = meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange);
+ if (!findPrimaries && !findTransfer && !findMatrix && !findRange) {
ALOGV("no color information");
+ return;
}
+
+ int32_t primaries, transfer, coeffs;
+ bool fullRange;
+ ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+ asString(aspects.mPrimaries),
+ asString(aspects.mTransfer),
+ asString(aspects.mMatrixCoeffs),
+ asString(aspects.mRange));
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ aspects, &primaries, &transfer, &coeffs, &fullRange);
+ mOwner->beginBox("colr");
+ mOwner->writeFourcc("nclx");
+ mOwner->writeInt16(primaries);
+ mOwner->writeInt16(transfer);
+ mOwner->writeInt16(coeffs);
+ mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
+ mOwner->endBox(); // colr
}
void MPEG4Writer::Track::writeAudioFourCCBox() {
@@ -4829,12 +4927,11 @@
mOwner->endBox(); // avcC
}
-
void MPEG4Writer::Track::writeHvccBox() {
CHECK(mCodecSpecificData);
CHECK_GE(mCodecSpecificDataSize, 5u);
- // Patch avcc's lengthSize field to match the number
+ // Patch hvcc's lengthSize field to match the number
// of bytes we use to indicate the size of a nal unit.
uint8_t *ptr = (uint8_t *)mCodecSpecificData;
ptr[21] = (ptr[21] & 0xfc) | (mOwner->useNalLengthFour() ? 3 : 1);
@@ -4843,6 +4940,24 @@
mOwner->endBox(); // hvcC
}
+void MPEG4Writer::Track::writeDoviConfigBox() {
+ CHECK(mDoviConfigData);
+ CHECK_EQ(mDoviConfigDataSize, 24u);
+
+ uint8_t *ptr = (uint8_t *)mDoviConfigData;
+ uint8_t profile = (ptr[2] >> 1) & 0x7f;
+
+ if (profile > 10) {
+ mOwner->beginBox("dvwC");
+ } else if (profile > 7) {
+ mOwner->beginBox("dvvC");
+ } else {
+ mOwner->beginBox("dvcC");
+ }
+ mOwner->write(mDoviConfigData, mDoviConfigDataSize);
+ mOwner->endBox(); // dvwC/dvvC/dvcC
+}
+
void MPEG4Writer::Track::writeD263Box() {
mOwner->beginBox("d263");
mOwner->writeInt32(0); // vendor
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index da581fb..2ef64b3 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -112,6 +112,13 @@
static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
static const char *kCodecPriority = "android.media.mediacodec.priority";
+static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
+static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
+static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
+static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
+static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
+static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
+static const char *kCodecHDRMetadataFlags = "android.media.mediacodec.hdr-metadata-flags";
// Min/Max QP before shaping
static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -748,6 +755,7 @@
mVideoWidth(0),
mVideoHeight(0),
mRotationDegrees(0),
+ mHDRMetadataFlags(0),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
@@ -898,6 +906,8 @@
mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
mIndexOfFirstFrameWhenLowLatencyOn);
}
+
+ mediametrics_setInt32(mMetricsHandle, kCodecHDRMetadataFlags, mHDRMetadataFlags);
#if 0
// enable for short term, only while debugging
updateEphemeralMediametrics(mMetricsHandle);
@@ -1568,6 +1578,23 @@
if (format->findInt32("priority", &priority)) {
mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
}
+ int32_t colorStandard = -1;
+ if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
+ }
+ int32_t colorRange = -1;
+ if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
+ }
+ int32_t colorTransfer = -1;
+ if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
+ }
+ HDRStaticInfo info;
+ if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
+ && ColorUtils::isHDRStaticInfoValid(&info)) {
+ mHDRMetadataFlags |= kFlagHDRStaticInfo;
+ }
}
// Prevent possible integer overflow in downstream code.
@@ -3187,8 +3214,11 @@
mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
}
- if (mIsVideo) {
- // audio codec is currently ignored.
+ MediaCodecInfo::Attributes attr = mCodecInfo
+ ? mCodecInfo->getAttributes()
+ : MediaCodecInfo::Attributes(0);
+ if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
+ // software codec is currently ignored.
mResourceManagerProxy->addResource(
MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
}
@@ -3504,6 +3534,20 @@
case kWhatDrainThisBuffer:
{
+ if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("buffer", &obj));
+ sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+ if (mFlags & kFlagIsAsync) {
+ // In asynchronous mode, output format change is processed immediately.
+ handleOutputFormatChangeIfNeeded(buffer);
+ } else {
+ postActivityNotificationIfPossible();
+ }
+ mBufferChannel->discardBuffer(buffer);
+ break;
+ }
+
/* size_t index = */updateBuffers(kPortIndexOutput, msg);
if (mState == FLUSHING
@@ -4501,6 +4545,9 @@
HDRStaticInfo info;
if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
setNativeWindowHdrMetadata(mSurface.get(), &info);
+ if (ColorUtils::isHDRStaticInfoValid(&info)) {
+ mHDRMetadataFlags |= kFlagHDRStaticInfo;
+ }
}
}
@@ -4509,6 +4556,7 @@
&& hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
hdr10PlusInfo->size(), hdr10PlusInfo->data());
+ mHDRMetadataFlags |= kFlagHDR10PlusInfo;
}
if (mime.startsWithIgnoreCase("video/")) {
@@ -4553,6 +4601,21 @@
mCrypto->notifyResolution(width, height);
}
}
+
+ if (mMetricsHandle != 0) {
+ int32_t colorStandard = -1;
+ if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
+ }
+ int32_t colorRange = -1;
+ if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
+ }
+ int32_t colorTransfer = -1;
+ if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
+ }
+ }
}
void MediaCodec::extractCSD(const sp<AMessage> &format) {
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 6243828..2ffe728 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -509,6 +509,29 @@
}
}
}
+
+ int32_t profile = -1;
+ if (format->findInt32("profile", &profile)) {
+ int32_t level = -1;
+ format->findInt32("level", &level);
+ Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+ capabilities->getSupportedProfileLevels(&profileLevels);
+ auto it = profileLevels.begin();
+ for (; it != profileLevels.end(); ++it) {
+ if (profile != it->mProfile) {
+ continue;
+ }
+ if (level > -1 && level > it->mLevel) {
+ continue;
+ }
+ break;
+ }
+
+ if (it == profileLevels.end()) {
+ ALOGV("Codec does not support profile %d with level %d", profile, level);
+ return false;
+ }
+ }
}
// haven't found a reason to discard this one
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0107c32..b07f8f7 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -943,10 +943,17 @@
sp<MediaCodecBuffer> outbuf;
status_t err = mEncoder->getOutputBuffer(index, &outbuf);
- if (err != OK || outbuf == NULL || outbuf->data() == NULL
- || outbuf->size() == 0) {
+ if (err != OK || outbuf == NULL || outbuf->data() == NULL) {
signalEOS();
break;
+ } else if (outbuf->size() == 0) {
+ // Zero length CSD buffers are not treated as an error
+ if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+ mEncoder->releaseOutputBuffer(index);
+ } else {
+ signalEOS();
+ }
+ break;
}
MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index ee9016d..de91533 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -145,7 +145,19 @@
if (available < num) {
int32_t newcapacity = mCapacity + (num - available);
char * newbuffer = new char[newcapacity];
- memcpy(newbuffer, mCutBuffer, mCapacity);
+ if (mWriteHead < mReadHead) {
+ // data isn't continuous, need to memcpy twice
+ // to move previous data to new buffer.
+ size_t copyLeft = mCapacity - mReadHead;
+ memcpy(newbuffer, mCutBuffer + mReadHead, copyLeft);
+ memcpy(newbuffer + copyLeft, mCutBuffer, mWriteHead);
+ mReadHead = 0;
+ mWriteHead += copyLeft;
+ } else {
+ memcpy(newbuffer, mCutBuffer + mReadHead, mWriteHead - mReadHead);
+ mWriteHead -= mReadHead;
+ mReadHead = 0;
+ }
delete [] mCutBuffer;
mCapacity = newcapacity;
mCutBuffer = newbuffer;
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 7ce2968..91a44d1 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -20,13 +20,68 @@
},
{
"exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaAudioTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
},
// TODO: b/149314419
{
- "exclude-filter": "android.media.cts.AudioPlaybackCaptureTest"
+ "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
},
{
- "exclude-filter": "android.media.cts.AudioRecordTest"
+ "exclude-filter": "android.media.audio.cts.AudioRecordTest"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaDecoderTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaEncoderTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaCodecTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+ }
+ ]
+ },
+ {
+ "name": "CtsMediaPlayerTestCases",
+ "options": [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
}
]
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4c18f87..1854588 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -28,9 +28,6 @@
#include "include/HevcUtils.h"
#include <cutils/properties.h>
-#include <media/openmax/OMX_Audio.h>
-#include <media/openmax/OMX_Video.h>
-#include <media/openmax/OMX_VideoExt.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -57,6 +54,14 @@
#define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
"mpegh-compatible-sets"
+namespace {
+ // TODO: this should possibly be handled in an else
+ constexpr static int32_t AACObjectNull = 0;
+
+ // TODO: decide if we should just not transmit the level in this case
+ constexpr static int32_t DolbyVisionLevelUnknown = 0;
+}
+
namespace android {
static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) {
@@ -156,21 +161,22 @@
audioObjectType >>= 11;
}
- const static ALookup<uint16_t, OMX_AUDIO_AACPROFILETYPE> profiles {
- { 1, OMX_AUDIO_AACObjectMain },
- { 2, OMX_AUDIO_AACObjectLC },
- { 3, OMX_AUDIO_AACObjectSSR },
- { 4, OMX_AUDIO_AACObjectLTP },
- { 5, OMX_AUDIO_AACObjectHE },
- { 6, OMX_AUDIO_AACObjectScalable },
- { 17, OMX_AUDIO_AACObjectERLC },
- { 23, OMX_AUDIO_AACObjectLD },
- { 29, OMX_AUDIO_AACObjectHE_PS },
- { 39, OMX_AUDIO_AACObjectELD },
- { 42, OMX_AUDIO_AACObjectXHE },
+
+ const static ALookup<uint16_t, int32_t> profiles {
+ { 1, AACObjectMain },
+ { 2, AACObjectLC },
+ { 3, AACObjectSSR },
+ { 4, AACObjectLTP },
+ { 5, AACObjectHE },
+ { 6, AACObjectScalable },
+ { 17, AACObjectERLC },
+ { 23, AACObjectLD },
+ { 29, AACObjectHE_PS },
+ { 39, AACObjectELD },
+ { 42, AACObjectXHE },
};
- OMX_AUDIO_AACPROFILETYPE profile;
+ int32_t profile;
if (profiles.map(audioObjectType, &profile)) {
format->setInt32("profile", profile);
}
@@ -184,53 +190,53 @@
const uint8_t constraints = ptr[2];
const uint8_t level = ptr[3];
- const static ALookup<uint8_t, OMX_VIDEO_AVCLEVELTYPE> levels {
- { 9, OMX_VIDEO_AVCLevel1b }, // technically, 9 is only used for High+ profiles
- { 10, OMX_VIDEO_AVCLevel1 },
- { 11, OMX_VIDEO_AVCLevel11 }, // prefer level 1.1 for the value 11
- { 11, OMX_VIDEO_AVCLevel1b },
- { 12, OMX_VIDEO_AVCLevel12 },
- { 13, OMX_VIDEO_AVCLevel13 },
- { 20, OMX_VIDEO_AVCLevel2 },
- { 21, OMX_VIDEO_AVCLevel21 },
- { 22, OMX_VIDEO_AVCLevel22 },
- { 30, OMX_VIDEO_AVCLevel3 },
- { 31, OMX_VIDEO_AVCLevel31 },
- { 32, OMX_VIDEO_AVCLevel32 },
- { 40, OMX_VIDEO_AVCLevel4 },
- { 41, OMX_VIDEO_AVCLevel41 },
- { 42, OMX_VIDEO_AVCLevel42 },
- { 50, OMX_VIDEO_AVCLevel5 },
- { 51, OMX_VIDEO_AVCLevel51 },
- { 52, OMX_VIDEO_AVCLevel52 },
- { 60, OMX_VIDEO_AVCLevel6 },
- { 61, OMX_VIDEO_AVCLevel61 },
- { 62, OMX_VIDEO_AVCLevel62 },
+ const static ALookup<uint8_t, int32_t> levels {
+ { 9, AVCLevel1b }, // technically, 9 is only used for High+ profiles
+ { 10, AVCLevel1 },
+ { 11, AVCLevel11 }, // prefer level 1.1 for the value 11
+ { 11, AVCLevel1b },
+ { 12, AVCLevel12 },
+ { 13, AVCLevel13 },
+ { 20, AVCLevel2 },
+ { 21, AVCLevel21 },
+ { 22, AVCLevel22 },
+ { 30, AVCLevel3 },
+ { 31, AVCLevel31 },
+ { 32, AVCLevel32 },
+ { 40, AVCLevel4 },
+ { 41, AVCLevel41 },
+ { 42, AVCLevel42 },
+ { 50, AVCLevel5 },
+ { 51, AVCLevel51 },
+ { 52, AVCLevel52 },
+ { 60, AVCLevel6 },
+ { 61, AVCLevel61 },
+ { 62, AVCLevel62 },
};
- const static ALookup<uint8_t, OMX_VIDEO_AVCPROFILETYPE> profiles {
- { 66, OMX_VIDEO_AVCProfileBaseline },
- { 77, OMX_VIDEO_AVCProfileMain },
- { 88, OMX_VIDEO_AVCProfileExtended },
- { 100, OMX_VIDEO_AVCProfileHigh },
- { 110, OMX_VIDEO_AVCProfileHigh10 },
- { 122, OMX_VIDEO_AVCProfileHigh422 },
- { 244, OMX_VIDEO_AVCProfileHigh444 },
+ const static ALookup<uint8_t, int32_t> profiles {
+ { 66, AVCProfileBaseline },
+ { 77, AVCProfileMain },
+ { 88, AVCProfileExtended },
+ { 100, AVCProfileHigh },
+ { 110, AVCProfileHigh10 },
+ { 122, AVCProfileHigh422 },
+ { 244, AVCProfileHigh444 },
};
// set profile & level if they are recognized
- OMX_VIDEO_AVCPROFILETYPE codecProfile;
- OMX_VIDEO_AVCLEVELTYPE codecLevel;
+ int32_t codecProfile;
+ int32_t codecLevel;
if (profiles.map(profile, &codecProfile)) {
if (profile == 66 && (constraints & 0x40)) {
- codecProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedBaseline;
+ codecProfile = AVCProfileConstrainedBaseline;
} else if (profile == 100 && (constraints & 0x0C) == 0x0C) {
- codecProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedHigh;
+ codecProfile = AVCProfileConstrainedHigh;
}
format->setInt32("profile", codecProfile);
if (levels.map(level, &codecLevel)) {
// for 9 && 11 decide level based on profile and constraint_set3 flag
if (level == 11 && (profile == 66 || profile == 77 || profile == 88)) {
- codecLevel = (constraints & 0x10) ? OMX_VIDEO_AVCLevel1b : OMX_VIDEO_AVCLevel11;
+ codecLevel = (constraints & 0x10) ? AVCLevel1b : AVCLevel11;
}
format->setInt32("level", codecLevel);
}
@@ -256,41 +262,44 @@
// All Dolby Profiles will have profile and level info in MediaFormat
// Profile 8 and 9 will have bl_compatibility_id too.
- const static ALookup<uint8_t, OMX_VIDEO_DOLBYVISIONPROFILETYPE> profiles{
- {1, OMX_VIDEO_DolbyVisionProfileDvavPen},
- {3, OMX_VIDEO_DolbyVisionProfileDvheDen},
- {4, OMX_VIDEO_DolbyVisionProfileDvheDtr},
- {5, OMX_VIDEO_DolbyVisionProfileDvheStn},
- {6, OMX_VIDEO_DolbyVisionProfileDvheDth},
- {7, OMX_VIDEO_DolbyVisionProfileDvheDtb},
- {8, OMX_VIDEO_DolbyVisionProfileDvheSt},
- {9, OMX_VIDEO_DolbyVisionProfileDvavSe},
- {10, OMX_VIDEO_DolbyVisionProfileDvav110},
+ const static ALookup<uint8_t, int32_t> profiles{
+ {1, DolbyVisionProfileDvavPen},
+ {3, DolbyVisionProfileDvheDen},
+ {4, DolbyVisionProfileDvheDtr},
+ {5, DolbyVisionProfileDvheStn},
+ {6, DolbyVisionProfileDvheDth},
+ {7, DolbyVisionProfileDvheDtb},
+ {8, DolbyVisionProfileDvheSt},
+ {9, DolbyVisionProfileDvavSe},
+ {10, DolbyVisionProfileDvav110},
};
- const static ALookup<uint8_t, OMX_VIDEO_DOLBYVISIONLEVELTYPE> levels{
- {0, OMX_VIDEO_DolbyVisionLevelUnknown},
- {1, OMX_VIDEO_DolbyVisionLevelHd24},
- {2, OMX_VIDEO_DolbyVisionLevelHd30},
- {3, OMX_VIDEO_DolbyVisionLevelFhd24},
- {4, OMX_VIDEO_DolbyVisionLevelFhd30},
- {5, OMX_VIDEO_DolbyVisionLevelFhd60},
- {6, OMX_VIDEO_DolbyVisionLevelUhd24},
- {7, OMX_VIDEO_DolbyVisionLevelUhd30},
- {8, OMX_VIDEO_DolbyVisionLevelUhd48},
- {9, OMX_VIDEO_DolbyVisionLevelUhd60},
+ const static ALookup<uint8_t, int32_t> levels{
+ {0, DolbyVisionLevelUnknown},
+ {1, DolbyVisionLevelHd24},
+ {2, DolbyVisionLevelHd30},
+ {3, DolbyVisionLevelFhd24},
+ {4, DolbyVisionLevelFhd30},
+ {5, DolbyVisionLevelFhd60},
+ {6, DolbyVisionLevelUhd24},
+ {7, DolbyVisionLevelUhd30},
+ {8, DolbyVisionLevelUhd48},
+ {9, DolbyVisionLevelUhd60},
+ {10, DolbyVisionLevelUhd120},
+ {11, DolbyVisionLevel8k30},
+ {12, DolbyVisionLevel8k60},
};
// set rpuAssoc
if (rpu_present_flag && el_present_flag && !bl_present_flag) {
format->setInt32("rpuAssoc", 1);
}
// set profile & level if they are recognized
- OMX_VIDEO_DOLBYVISIONPROFILETYPE codecProfile;
- OMX_VIDEO_DOLBYVISIONLEVELTYPE codecLevel;
+ int32_t codecProfile;
+ int32_t codecLevel;
if (profiles.map(profile, &codecProfile)) {
format->setInt32("profile", codecProfile);
- if (codecProfile == OMX_VIDEO_DolbyVisionProfileDvheSt ||
- codecProfile == OMX_VIDEO_DolbyVisionProfileDvavSe) {
+ if (codecProfile == DolbyVisionProfileDvheSt ||
+ codecProfile == DolbyVisionProfileDvavSe) {
format->setInt32("bl_compatibility_id", bl_compatibility_id);
}
if (levels.map(level, &codecLevel)) {
@@ -307,32 +316,32 @@
const uint8_t profile = ptr[6];
const uint8_t level = ptr[5];
- const static ALookup<uint8_t, OMX_VIDEO_H263PROFILETYPE> profiles {
- { 0, OMX_VIDEO_H263ProfileBaseline },
- { 1, OMX_VIDEO_H263ProfileH320Coding },
- { 2, OMX_VIDEO_H263ProfileBackwardCompatible },
- { 3, OMX_VIDEO_H263ProfileISWV2 },
- { 4, OMX_VIDEO_H263ProfileISWV3 },
- { 5, OMX_VIDEO_H263ProfileHighCompression },
- { 6, OMX_VIDEO_H263ProfileInternet },
- { 7, OMX_VIDEO_H263ProfileInterlace },
- { 8, OMX_VIDEO_H263ProfileHighLatency },
+ const static ALookup<uint8_t, int32_t> profiles {
+ { 0, H263ProfileBaseline },
+ { 1, H263ProfileH320Coding },
+ { 2, H263ProfileBackwardCompatible },
+ { 3, H263ProfileISWV2 },
+ { 4, H263ProfileISWV3 },
+ { 5, H263ProfileHighCompression },
+ { 6, H263ProfileInternet },
+ { 7, H263ProfileInterlace },
+ { 8, H263ProfileHighLatency },
};
- const static ALookup<uint8_t, OMX_VIDEO_H263LEVELTYPE> levels {
- { 10, OMX_VIDEO_H263Level10 },
- { 20, OMX_VIDEO_H263Level20 },
- { 30, OMX_VIDEO_H263Level30 },
- { 40, OMX_VIDEO_H263Level40 },
- { 45, OMX_VIDEO_H263Level45 },
- { 50, OMX_VIDEO_H263Level50 },
- { 60, OMX_VIDEO_H263Level60 },
- { 70, OMX_VIDEO_H263Level70 },
+ const static ALookup<uint8_t, int32_t> levels {
+ { 10, H263Level10 },
+ { 20, H263Level20 },
+ { 30, H263Level30 },
+ { 40, H263Level40 },
+ { 45, H263Level45 },
+ { 50, H263Level50 },
+ { 60, H263Level60 },
+ { 70, H263Level70 },
};
// set profile & level if they are recognized
- OMX_VIDEO_H263PROFILETYPE codecProfile;
- OMX_VIDEO_H263LEVELTYPE codecLevel;
+ int32_t codecProfile;
+ int32_t codecLevel;
if (profiles.map(profile, &codecProfile)) {
format->setInt32("profile", codecProfile);
if (levels.map(level, &codecLevel)) {
@@ -350,59 +359,59 @@
const uint8_t tier = (ptr[1] & 0x20) >> 5;
const uint8_t level = ptr[12];
- const static ALookup<std::pair<uint8_t, uint8_t>, OMX_VIDEO_HEVCLEVELTYPE> levels {
- { { 0, 30 }, OMX_VIDEO_HEVCMainTierLevel1 },
- { { 0, 60 }, OMX_VIDEO_HEVCMainTierLevel2 },
- { { 0, 63 }, OMX_VIDEO_HEVCMainTierLevel21 },
- { { 0, 90 }, OMX_VIDEO_HEVCMainTierLevel3 },
- { { 0, 93 }, OMX_VIDEO_HEVCMainTierLevel31 },
- { { 0, 120 }, OMX_VIDEO_HEVCMainTierLevel4 },
- { { 0, 123 }, OMX_VIDEO_HEVCMainTierLevel41 },
- { { 0, 150 }, OMX_VIDEO_HEVCMainTierLevel5 },
- { { 0, 153 }, OMX_VIDEO_HEVCMainTierLevel51 },
- { { 0, 156 }, OMX_VIDEO_HEVCMainTierLevel52 },
- { { 0, 180 }, OMX_VIDEO_HEVCMainTierLevel6 },
- { { 0, 183 }, OMX_VIDEO_HEVCMainTierLevel61 },
- { { 0, 186 }, OMX_VIDEO_HEVCMainTierLevel62 },
- { { 1, 30 }, OMX_VIDEO_HEVCHighTierLevel1 },
- { { 1, 60 }, OMX_VIDEO_HEVCHighTierLevel2 },
- { { 1, 63 }, OMX_VIDEO_HEVCHighTierLevel21 },
- { { 1, 90 }, OMX_VIDEO_HEVCHighTierLevel3 },
- { { 1, 93 }, OMX_VIDEO_HEVCHighTierLevel31 },
- { { 1, 120 }, OMX_VIDEO_HEVCHighTierLevel4 },
- { { 1, 123 }, OMX_VIDEO_HEVCHighTierLevel41 },
- { { 1, 150 }, OMX_VIDEO_HEVCHighTierLevel5 },
- { { 1, 153 }, OMX_VIDEO_HEVCHighTierLevel51 },
- { { 1, 156 }, OMX_VIDEO_HEVCHighTierLevel52 },
- { { 1, 180 }, OMX_VIDEO_HEVCHighTierLevel6 },
- { { 1, 183 }, OMX_VIDEO_HEVCHighTierLevel61 },
- { { 1, 186 }, OMX_VIDEO_HEVCHighTierLevel62 },
+ const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> levels {
+ { { 0, 30 }, HEVCMainTierLevel1 },
+ { { 0, 60 }, HEVCMainTierLevel2 },
+ { { 0, 63 }, HEVCMainTierLevel21 },
+ { { 0, 90 }, HEVCMainTierLevel3 },
+ { { 0, 93 }, HEVCMainTierLevel31 },
+ { { 0, 120 }, HEVCMainTierLevel4 },
+ { { 0, 123 }, HEVCMainTierLevel41 },
+ { { 0, 150 }, HEVCMainTierLevel5 },
+ { { 0, 153 }, HEVCMainTierLevel51 },
+ { { 0, 156 }, HEVCMainTierLevel52 },
+ { { 0, 180 }, HEVCMainTierLevel6 },
+ { { 0, 183 }, HEVCMainTierLevel61 },
+ { { 0, 186 }, HEVCMainTierLevel62 },
+ { { 1, 30 }, HEVCHighTierLevel1 },
+ { { 1, 60 }, HEVCHighTierLevel2 },
+ { { 1, 63 }, HEVCHighTierLevel21 },
+ { { 1, 90 }, HEVCHighTierLevel3 },
+ { { 1, 93 }, HEVCHighTierLevel31 },
+ { { 1, 120 }, HEVCHighTierLevel4 },
+ { { 1, 123 }, HEVCHighTierLevel41 },
+ { { 1, 150 }, HEVCHighTierLevel5 },
+ { { 1, 153 }, HEVCHighTierLevel51 },
+ { { 1, 156 }, HEVCHighTierLevel52 },
+ { { 1, 180 }, HEVCHighTierLevel6 },
+ { { 1, 183 }, HEVCHighTierLevel61 },
+ { { 1, 186 }, HEVCHighTierLevel62 },
};
- const static ALookup<uint8_t, OMX_VIDEO_HEVCPROFILETYPE> profiles {
- { 1, OMX_VIDEO_HEVCProfileMain },
- { 2, OMX_VIDEO_HEVCProfileMain10 },
+ const static ALookup<uint8_t, int32_t> profiles {
+ { 1, HEVCProfileMain },
+ { 2, HEVCProfileMain10 },
// use Main for Main Still Picture decoding
- { 3, OMX_VIDEO_HEVCProfileMain },
+ { 3, HEVCProfileMain },
};
// set profile & level if they are recognized
- OMX_VIDEO_HEVCPROFILETYPE codecProfile;
- OMX_VIDEO_HEVCLEVELTYPE codecLevel;
+ int32_t codecProfile;
+ int32_t codecLevel;
if (!profiles.map(profile, &codecProfile)) {
if (ptr[2] & 0x40 /* general compatibility flag 1 */) {
// Note that this case covers Main Still Picture too
- codecProfile = OMX_VIDEO_HEVCProfileMain;
+ codecProfile = HEVCProfileMain;
} else if (ptr[2] & 0x20 /* general compatibility flag 2 */) {
- codecProfile = OMX_VIDEO_HEVCProfileMain10;
+ codecProfile = HEVCProfileMain10;
} else {
return;
}
}
// bump to HDR profile
- if (isHdr(format) && codecProfile == OMX_VIDEO_HEVCProfileMain10) {
- codecProfile = OMX_VIDEO_HEVCProfileMain10HDR10;
+ if (isHdr(format) && codecProfile == HEVCProfileMain10) {
+ codecProfile = HEVCProfileMain10HDR10;
}
format->setInt32("profile", codecProfile);
@@ -422,36 +431,36 @@
}
const uint8_t indication = ((seq[4] & 0xF) << 4) | ((seq[5] & 0xF0) >> 4);
- const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles {
- { 0x50, OMX_VIDEO_MPEG2ProfileSimple },
- { 0x40, OMX_VIDEO_MPEG2ProfileMain },
- { 0x30, OMX_VIDEO_MPEG2ProfileSNR },
- { 0x20, OMX_VIDEO_MPEG2ProfileSpatial },
- { 0x10, OMX_VIDEO_MPEG2ProfileHigh },
+ const static ALookup<uint8_t, int32_t> profiles {
+ { 0x50, MPEG2ProfileSimple },
+ { 0x40, MPEG2ProfileMain },
+ { 0x30, MPEG2ProfileSNR },
+ { 0x20, MPEG2ProfileSpatial },
+ { 0x10, MPEG2ProfileHigh },
};
- const static ALookup<uint8_t, OMX_VIDEO_MPEG2LEVELTYPE> levels {
- { 0x0A, OMX_VIDEO_MPEG2LevelLL },
- { 0x08, OMX_VIDEO_MPEG2LevelML },
- { 0x06, OMX_VIDEO_MPEG2LevelH14 },
- { 0x04, OMX_VIDEO_MPEG2LevelHL },
- { 0x02, OMX_VIDEO_MPEG2LevelHP },
+ const static ALookup<uint8_t, int32_t> levels {
+ { 0x0A, MPEG2LevelLL },
+ { 0x08, MPEG2LevelML },
+ { 0x06, MPEG2LevelH14 },
+ { 0x04, MPEG2LevelHL },
+ { 0x02, MPEG2LevelHP },
};
const static ALookup<uint8_t,
- std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE>> escapes {
+ std::pair<int32_t, int32_t>> escapes {
/* unsupported
- { 0x8E, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelLL } },
- { 0x8D, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelML } },
- { 0x8B, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelH14 } },
- { 0x8A, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelHL } }, */
- { 0x85, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelML } },
- { 0x82, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelHL } },
+ { 0x8E, { XXX_MPEG2ProfileMultiView, MPEG2LevelLL } },
+ { 0x8D, { XXX_MPEG2ProfileMultiView, MPEG2LevelML } },
+ { 0x8B, { XXX_MPEG2ProfileMultiView, MPEG2LevelH14 } },
+ { 0x8A, { XXX_MPEG2ProfileMultiView, MPEG2LevelHL } }, */
+ { 0x85, { MPEG2Profile422, MPEG2LevelML } },
+ { 0x82, { MPEG2Profile422, MPEG2LevelHL } },
};
- OMX_VIDEO_MPEG2PROFILETYPE profile;
- OMX_VIDEO_MPEG2LEVELTYPE level;
- std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE> profileLevel;
+ int32_t profile;
+ int32_t level;
+ std::pair<int32_t, int32_t> profileLevel;
if (escapes.map(indication, &profileLevel)) {
format->setInt32("profile", profileLevel.first);
format->setInt32("level", profileLevel.second);
@@ -468,16 +477,16 @@
// esds seems to only contain the profile for MPEG-2
uint8_t objType;
if (esds.getObjectTypeIndication(&objType) == OK) {
- const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles{
- { 0x60, OMX_VIDEO_MPEG2ProfileSimple },
- { 0x61, OMX_VIDEO_MPEG2ProfileMain },
- { 0x62, OMX_VIDEO_MPEG2ProfileSNR },
- { 0x63, OMX_VIDEO_MPEG2ProfileSpatial },
- { 0x64, OMX_VIDEO_MPEG2ProfileHigh },
- { 0x65, OMX_VIDEO_MPEG2Profile422 },
+ const static ALookup<uint8_t, int32_t> profiles{
+ { 0x60, MPEG2ProfileSimple },
+ { 0x61, MPEG2ProfileMain },
+ { 0x62, MPEG2ProfileSNR },
+ { 0x63, MPEG2ProfileSpatial },
+ { 0x64, MPEG2ProfileHigh },
+ { 0x65, MPEG2Profile422 },
};
- OMX_VIDEO_MPEG2PROFILETYPE profile;
+ int32_t profile;
if (profiles.map(objType, &profile)) {
format->setInt32("profile", profile);
}
@@ -492,82 +501,82 @@
const uint8_t indication = seq[4];
const static ALookup<uint8_t,
- std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE>> table {
- { 0b00000001, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level1 } },
- { 0b00000010, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level2 } },
- { 0b00000011, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level3 } },
- { 0b00000100, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level4a } },
- { 0b00000101, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level5 } },
- { 0b00000110, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level6 } },
- { 0b00001000, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level0 } },
- { 0b00001001, { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level0b } },
- { 0b00010000, { OMX_VIDEO_MPEG4ProfileSimpleScalable, OMX_VIDEO_MPEG4Level0 } },
- { 0b00010001, { OMX_VIDEO_MPEG4ProfileSimpleScalable, OMX_VIDEO_MPEG4Level1 } },
- { 0b00010010, { OMX_VIDEO_MPEG4ProfileSimpleScalable, OMX_VIDEO_MPEG4Level2 } },
+ std::pair<int32_t, int32_t>> table {
+ { 0b00000001, { MPEG4ProfileSimple, MPEG4Level1 } },
+ { 0b00000010, { MPEG4ProfileSimple, MPEG4Level2 } },
+ { 0b00000011, { MPEG4ProfileSimple, MPEG4Level3 } },
+ { 0b00000100, { MPEG4ProfileSimple, MPEG4Level4a } },
+ { 0b00000101, { MPEG4ProfileSimple, MPEG4Level5 } },
+ { 0b00000110, { MPEG4ProfileSimple, MPEG4Level6 } },
+ { 0b00001000, { MPEG4ProfileSimple, MPEG4Level0 } },
+ { 0b00001001, { MPEG4ProfileSimple, MPEG4Level0b } },
+ { 0b00010000, { MPEG4ProfileSimpleScalable, MPEG4Level0 } },
+ { 0b00010001, { MPEG4ProfileSimpleScalable, MPEG4Level1 } },
+ { 0b00010010, { MPEG4ProfileSimpleScalable, MPEG4Level2 } },
/* unsupported
- { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER, OMX_VIDEO_MPEG4Level0 } },
- { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER, OMX_VIDEO_MPEG4Level1 } },
- { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER, OMX_VIDEO_MPEG4Level2 } }, */
- { 0b00100001, { OMX_VIDEO_MPEG4ProfileCore, OMX_VIDEO_MPEG4Level1 } },
- { 0b00100010, { OMX_VIDEO_MPEG4ProfileCore, OMX_VIDEO_MPEG4Level2 } },
- { 0b00110010, { OMX_VIDEO_MPEG4ProfileMain, OMX_VIDEO_MPEG4Level2 } },
- { 0b00110011, { OMX_VIDEO_MPEG4ProfileMain, OMX_VIDEO_MPEG4Level3 } },
- { 0b00110100, { OMX_VIDEO_MPEG4ProfileMain, OMX_VIDEO_MPEG4Level4 } },
+ { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER, MPEG4Level0 } },
+ { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER, MPEG4Level1 } },
+ { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER, MPEG4Level2 } }, */
+ { 0b00100001, { MPEG4ProfileCore, MPEG4Level1 } },
+ { 0b00100010, { MPEG4ProfileCore, MPEG4Level2 } },
+ { 0b00110010, { MPEG4ProfileMain, MPEG4Level2 } },
+ { 0b00110011, { MPEG4ProfileMain, MPEG4Level3 } },
+ { 0b00110100, { MPEG4ProfileMain, MPEG4Level4 } },
/* deprecated
- { 0b01000010, { OMX_VIDEO_MPEG4ProfileNbit, OMX_VIDEO_MPEG4Level2 } }, */
- { 0b01010001, { OMX_VIDEO_MPEG4ProfileScalableTexture, OMX_VIDEO_MPEG4Level1 } },
- { 0b01100001, { OMX_VIDEO_MPEG4ProfileSimpleFace, OMX_VIDEO_MPEG4Level1 } },
- { 0b01100010, { OMX_VIDEO_MPEG4ProfileSimpleFace, OMX_VIDEO_MPEG4Level2 } },
- { 0b01100011, { OMX_VIDEO_MPEG4ProfileSimpleFBA, OMX_VIDEO_MPEG4Level1 } },
- { 0b01100100, { OMX_VIDEO_MPEG4ProfileSimpleFBA, OMX_VIDEO_MPEG4Level2 } },
- { 0b01110001, { OMX_VIDEO_MPEG4ProfileBasicAnimated, OMX_VIDEO_MPEG4Level1 } },
- { 0b01110010, { OMX_VIDEO_MPEG4ProfileBasicAnimated, OMX_VIDEO_MPEG4Level2 } },
- { 0b10000001, { OMX_VIDEO_MPEG4ProfileHybrid, OMX_VIDEO_MPEG4Level1 } },
- { 0b10000010, { OMX_VIDEO_MPEG4ProfileHybrid, OMX_VIDEO_MPEG4Level2 } },
- { 0b10010001, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime, OMX_VIDEO_MPEG4Level1 } },
- { 0b10010010, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime, OMX_VIDEO_MPEG4Level2 } },
- { 0b10010011, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime, OMX_VIDEO_MPEG4Level3 } },
- { 0b10010100, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime, OMX_VIDEO_MPEG4Level4 } },
- { 0b10100001, { OMX_VIDEO_MPEG4ProfileCoreScalable, OMX_VIDEO_MPEG4Level1 } },
- { 0b10100010, { OMX_VIDEO_MPEG4ProfileCoreScalable, OMX_VIDEO_MPEG4Level2 } },
- { 0b10100011, { OMX_VIDEO_MPEG4ProfileCoreScalable, OMX_VIDEO_MPEG4Level3 } },
- { 0b10110001, { OMX_VIDEO_MPEG4ProfileAdvancedCoding, OMX_VIDEO_MPEG4Level1 } },
- { 0b10110010, { OMX_VIDEO_MPEG4ProfileAdvancedCoding, OMX_VIDEO_MPEG4Level2 } },
- { 0b10110011, { OMX_VIDEO_MPEG4ProfileAdvancedCoding, OMX_VIDEO_MPEG4Level3 } },
- { 0b10110100, { OMX_VIDEO_MPEG4ProfileAdvancedCoding, OMX_VIDEO_MPEG4Level4 } },
- { 0b11000001, { OMX_VIDEO_MPEG4ProfileAdvancedCore, OMX_VIDEO_MPEG4Level1 } },
- { 0b11000010, { OMX_VIDEO_MPEG4ProfileAdvancedCore, OMX_VIDEO_MPEG4Level2 } },
- { 0b11010001, { OMX_VIDEO_MPEG4ProfileAdvancedScalable, OMX_VIDEO_MPEG4Level1 } },
- { 0b11010010, { OMX_VIDEO_MPEG4ProfileAdvancedScalable, OMX_VIDEO_MPEG4Level2 } },
- { 0b11010011, { OMX_VIDEO_MPEG4ProfileAdvancedScalable, OMX_VIDEO_MPEG4Level3 } },
+ { 0b01000010, { MPEG4ProfileNbit, MPEG4Level2 } }, */
+ { 0b01010001, { MPEG4ProfileScalableTexture, MPEG4Level1 } },
+ { 0b01100001, { MPEG4ProfileSimpleFace, MPEG4Level1 } },
+ { 0b01100010, { MPEG4ProfileSimpleFace, MPEG4Level2 } },
+ { 0b01100011, { MPEG4ProfileSimpleFBA, MPEG4Level1 } },
+ { 0b01100100, { MPEG4ProfileSimpleFBA, MPEG4Level2 } },
+ { 0b01110001, { MPEG4ProfileBasicAnimated, MPEG4Level1 } },
+ { 0b01110010, { MPEG4ProfileBasicAnimated, MPEG4Level2 } },
+ { 0b10000001, { MPEG4ProfileHybrid, MPEG4Level1 } },
+ { 0b10000010, { MPEG4ProfileHybrid, MPEG4Level2 } },
+ { 0b10010001, { MPEG4ProfileAdvancedRealTime, MPEG4Level1 } },
+ { 0b10010010, { MPEG4ProfileAdvancedRealTime, MPEG4Level2 } },
+ { 0b10010011, { MPEG4ProfileAdvancedRealTime, MPEG4Level3 } },
+ { 0b10010100, { MPEG4ProfileAdvancedRealTime, MPEG4Level4 } },
+ { 0b10100001, { MPEG4ProfileCoreScalable, MPEG4Level1 } },
+ { 0b10100010, { MPEG4ProfileCoreScalable, MPEG4Level2 } },
+ { 0b10100011, { MPEG4ProfileCoreScalable, MPEG4Level3 } },
+ { 0b10110001, { MPEG4ProfileAdvancedCoding, MPEG4Level1 } },
+ { 0b10110010, { MPEG4ProfileAdvancedCoding, MPEG4Level2 } },
+ { 0b10110011, { MPEG4ProfileAdvancedCoding, MPEG4Level3 } },
+ { 0b10110100, { MPEG4ProfileAdvancedCoding, MPEG4Level4 } },
+ { 0b11000001, { MPEG4ProfileAdvancedCore, MPEG4Level1 } },
+ { 0b11000010, { MPEG4ProfileAdvancedCore, MPEG4Level2 } },
+ { 0b11010001, { MPEG4ProfileAdvancedScalable, MPEG4Level1 } },
+ { 0b11010010, { MPEG4ProfileAdvancedScalable, MPEG4Level2 } },
+ { 0b11010011, { MPEG4ProfileAdvancedScalable, MPEG4Level3 } },
/* unsupported
- { 0b11100001, { XXX_MPEG4ProfileSimpleStudio, OMX_VIDEO_MPEG4Level1 } },
- { 0b11100010, { XXX_MPEG4ProfileSimpleStudio, OMX_VIDEO_MPEG4Level2 } },
- { 0b11100011, { XXX_MPEG4ProfileSimpleStudio, OMX_VIDEO_MPEG4Level3 } },
- { 0b11100100, { XXX_MPEG4ProfileSimpleStudio, OMX_VIDEO_MPEG4Level4 } },
- { 0b11100101, { XXX_MPEG4ProfileCoreStudio, OMX_VIDEO_MPEG4Level1 } },
- { 0b11100110, { XXX_MPEG4ProfileCoreStudio, OMX_VIDEO_MPEG4Level2 } },
- { 0b11100111, { XXX_MPEG4ProfileCoreStudio, OMX_VIDEO_MPEG4Level3 } },
- { 0b11101000, { XXX_MPEG4ProfileCoreStudio, OMX_VIDEO_MPEG4Level4 } },
- { 0b11101011, { XXX_MPEG4ProfileSimpleStudio, OMX_VIDEO_MPEG4Level5 } },
- { 0b11101100, { XXX_MPEG4ProfileSimpleStudio, OMX_VIDEO_MPEG4Level6 } }, */
- { 0b11110000, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level0 } },
- { 0b11110001, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level1 } },
- { 0b11110010, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level2 } },
- { 0b11110011, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level3 } },
- { 0b11110100, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level4 } },
- { 0b11110101, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level5 } },
- { 0b11110111, { OMX_VIDEO_MPEG4ProfileAdvancedSimple, OMX_VIDEO_MPEG4Level3b } },
+ { 0b11100001, { XXX_MPEG4ProfileSimpleStudio, MPEG4Level1 } },
+ { 0b11100010, { XXX_MPEG4ProfileSimpleStudio, MPEG4Level2 } },
+ { 0b11100011, { XXX_MPEG4ProfileSimpleStudio, MPEG4Level3 } },
+ { 0b11100100, { XXX_MPEG4ProfileSimpleStudio, MPEG4Level4 } },
+ { 0b11100101, { XXX_MPEG4ProfileCoreStudio, MPEG4Level1 } },
+ { 0b11100110, { XXX_MPEG4ProfileCoreStudio, MPEG4Level2 } },
+ { 0b11100111, { XXX_MPEG4ProfileCoreStudio, MPEG4Level3 } },
+ { 0b11101000, { XXX_MPEG4ProfileCoreStudio, MPEG4Level4 } },
+ { 0b11101011, { XXX_MPEG4ProfileSimpleStudio, MPEG4Level5 } },
+ { 0b11101100, { XXX_MPEG4ProfileSimpleStudio, MPEG4Level6 } }, */
+ { 0b11110000, { MPEG4ProfileAdvancedSimple, MPEG4Level0 } },
+ { 0b11110001, { MPEG4ProfileAdvancedSimple, MPEG4Level1 } },
+ { 0b11110010, { MPEG4ProfileAdvancedSimple, MPEG4Level2 } },
+ { 0b11110011, { MPEG4ProfileAdvancedSimple, MPEG4Level3 } },
+ { 0b11110100, { MPEG4ProfileAdvancedSimple, MPEG4Level4 } },
+ { 0b11110101, { MPEG4ProfileAdvancedSimple, MPEG4Level5 } },
+ { 0b11110111, { MPEG4ProfileAdvancedSimple, MPEG4Level3b } },
/* deprecated
- { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level0 } },
- { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level1 } },
- { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level2 } },
- { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level3 } },
- { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level4 } },
- { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level5 } }, */
+ { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level0 } },
+ { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level1 } },
+ { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level2 } },
+ { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level3 } },
+ { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level4 } },
+ { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level5 } }, */
};
- std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE> profileLevel;
+ std::pair<int32_t, int32_t> profileLevel;
if (table.map(indication, &profileLevel)) {
format->setInt32("profile", profileLevel.first);
format->setInt32("level", profileLevel.second);
@@ -590,19 +599,19 @@
switch (id) {
case 1 /* profileId */:
if (length >= 1) {
- const static ALookup<uint8_t, OMX_VIDEO_VP9PROFILETYPE> profiles {
- { 0, OMX_VIDEO_VP9Profile0 },
- { 1, OMX_VIDEO_VP9Profile1 },
- { 2, OMX_VIDEO_VP9Profile2 },
- { 3, OMX_VIDEO_VP9Profile3 },
+ const static ALookup<uint8_t, int32_t> profiles {
+ { 0, VP9Profile0 },
+ { 1, VP9Profile1 },
+ { 2, VP9Profile2 },
+ { 3, VP9Profile3 },
};
- const static ALookup<OMX_VIDEO_VP9PROFILETYPE, OMX_VIDEO_VP9PROFILETYPE> toHdr {
- { OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Profile2HDR },
- { OMX_VIDEO_VP9Profile3, OMX_VIDEO_VP9Profile3HDR },
+ const static ALookup<int32_t, int32_t> toHdr {
+ { VP9Profile2, VP9Profile2HDR },
+ { VP9Profile3, VP9Profile3HDR },
};
- OMX_VIDEO_VP9PROFILETYPE profile;
+ int32_t profile;
if (profiles.map(data[0], &profile)) {
// convert to HDR profile
if (isHdr(format)) {
@@ -615,24 +624,24 @@
break;
case 2 /* levelId */:
if (length >= 1) {
- const static ALookup<uint8_t, OMX_VIDEO_VP9LEVELTYPE> levels {
- { 10, OMX_VIDEO_VP9Level1 },
- { 11, OMX_VIDEO_VP9Level11 },
- { 20, OMX_VIDEO_VP9Level2 },
- { 21, OMX_VIDEO_VP9Level21 },
- { 30, OMX_VIDEO_VP9Level3 },
- { 31, OMX_VIDEO_VP9Level31 },
- { 40, OMX_VIDEO_VP9Level4 },
- { 41, OMX_VIDEO_VP9Level41 },
- { 50, OMX_VIDEO_VP9Level5 },
- { 51, OMX_VIDEO_VP9Level51 },
- { 52, OMX_VIDEO_VP9Level52 },
- { 60, OMX_VIDEO_VP9Level6 },
- { 61, OMX_VIDEO_VP9Level61 },
- { 62, OMX_VIDEO_VP9Level62 },
+ const static ALookup<uint8_t, int32_t> levels {
+ { 10, VP9Level1 },
+ { 11, VP9Level11 },
+ { 20, VP9Level2 },
+ { 21, VP9Level21 },
+ { 30, VP9Level3 },
+ { 31, VP9Level31 },
+ { 40, VP9Level4 },
+ { 41, VP9Level41 },
+ { 50, VP9Level5 },
+ { 51, VP9Level51 },
+ { 52, VP9Level52 },
+ { 60, VP9Level6 },
+ { 61, VP9Level61 },
+ { 62, VP9Level62 },
};
- OMX_VIDEO_VP9LEVELTYPE level;
+ int32_t level;
if (levels.map(data[0], &level)) {
format->setInt32("level", level);
}
@@ -1504,7 +1513,30 @@
msg->setBuffer("csd-0", buffer);
}
- if (meta->findData(kKeyDVCC, &type, &data, &size)) {
+ if (meta->findData(kKeyDVCC, &type, &data, &size)
+ || meta->findData(kKeyDVVC, &type, &data, &size)
+ || meta->findData(kKeyDVWC, &type, &data, &size)) {
+ sp<ABuffer> buffer, csdOrg;
+ if (msg->findBuffer("csd-0", &csdOrg)) {
+ buffer = new (std::nothrow) ABuffer(size + csdOrg->size());
+ if (buffer.get() == NULL || buffer->base() == NULL) {
+ return NO_MEMORY;
+ }
+
+ memcpy(buffer->data(), csdOrg->data(), csdOrg->size());
+ memcpy(buffer->data() + csdOrg->size(), data, size);
+ } else {
+ buffer = new (std::nothrow) ABuffer(size);
+ if (buffer.get() == NULL || buffer->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer->data(), data, size);
+ }
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
+
const uint8_t *ptr = (const uint8_t *)data;
ALOGV("DV: calling parseDolbyVisionProfileLevelFromDvcc with data size %zu", size);
parseDolbyVisionProfileLevelFromDvcc(ptr, size, msg);
@@ -1759,24 +1791,39 @@
if (mime.startsWith("video/") || mime.startsWith("image/")) {
int32_t width;
int32_t height;
- if (msg->findInt32("width", &width) && msg->findInt32("height", &height)) {
- meta->setInt32(kKeyWidth, width);
- meta->setInt32(kKeyHeight, height);
- } else {
+ if (!msg->findInt32("width", &width) || !msg->findInt32("height", &height)) {
ALOGV("did not find width and/or height");
return BAD_VALUE;
}
+ if (width <= 0 || height <= 0) {
+ ALOGE("Invalid value of width: %d and/or height: %d", width, height);
+ return BAD_VALUE;
+ }
+ meta->setInt32(kKeyWidth, width);
+ meta->setInt32(kKeyHeight, height);
- int32_t sarWidth, sarHeight;
- if (msg->findInt32("sar-width", &sarWidth)
- && msg->findInt32("sar-height", &sarHeight)) {
+ int32_t sarWidth = -1, sarHeight = -1;
+ bool foundWidth, foundHeight;
+ foundWidth = msg->findInt32("sar-width", &sarWidth);
+ foundHeight = msg->findInt32("sar-height", &sarHeight);
+ if (foundWidth || foundHeight) {
+ if (sarWidth <= 0 || sarHeight <= 0) {
+ ALOGE("Invalid value of sarWidth: %d and/or sarHeight: %d", sarWidth, sarHeight);
+ return BAD_VALUE;
+ }
meta->setInt32(kKeySARWidth, sarWidth);
meta->setInt32(kKeySARHeight, sarHeight);
}
- int32_t displayWidth, displayHeight;
- if (msg->findInt32("display-width", &displayWidth)
- && msg->findInt32("display-height", &displayHeight)) {
+ int32_t displayWidth = -1, displayHeight = -1;
+ foundWidth = msg->findInt32("display-width", &displayWidth);
+ foundHeight = msg->findInt32("display-height", &displayHeight);
+ if (foundWidth || foundHeight) {
+ if (displayWidth <= 0 || displayHeight <= 0) {
+ ALOGE("Invalid value of displayWidth: %d and/or displayHeight: %d",
+ displayWidth, displayHeight);
+ return BAD_VALUE;
+ }
meta->setInt32(kKeyDisplayWidth, displayWidth);
meta->setInt32(kKeyDisplayHeight, displayHeight);
}
@@ -1786,17 +1833,29 @@
if (msg->findInt32("is-default", &isPrimary) && isPrimary) {
meta->setInt32(kKeyTrackIsDefault, 1);
}
- int32_t tileWidth, tileHeight, gridRows, gridCols;
- if (msg->findInt32("tile-width", &tileWidth)) {
+ int32_t tileWidth = -1, tileHeight = -1;
+ foundWidth = msg->findInt32("tile-width", &tileWidth);
+ foundHeight = msg->findInt32("tile-height", &tileHeight);
+ if (foundWidth || foundHeight) {
+ if (tileWidth <= 0 || tileHeight <= 0) {
+ ALOGE("Invalid value of tileWidth: %d and/or tileHeight: %d",
+ tileWidth, tileHeight);
+ return BAD_VALUE;
+ }
meta->setInt32(kKeyTileWidth, tileWidth);
- }
- if (msg->findInt32("tile-height", &tileHeight)) {
meta->setInt32(kKeyTileHeight, tileHeight);
}
- if (msg->findInt32("grid-rows", &gridRows)) {
+ int32_t gridRows = -1, gridCols = -1;
+ bool foundRows, foundCols;
+ foundRows = msg->findInt32("grid-rows", &gridRows);
+ foundCols = msg->findInt32("grid-cols", &gridCols);
+ if (foundRows || foundCols) {
+ if (gridRows <= 0 || gridCols <= 0) {
+ ALOGE("Invalid value of gridRows: %d and/or gridCols: %d",
+ gridRows, gridCols);
+ return BAD_VALUE;
+ }
meta->setInt32(kKeyGridRows, gridRows);
- }
- if (msg->findInt32("grid-cols", &gridCols)) {
meta->setInt32(kKeyGridCols, gridCols);
}
}
@@ -1812,6 +1871,14 @@
&cropTop,
&cropRight,
&cropBottom)) {
+ if (cropLeft < 0 || cropLeft > cropRight || cropRight >= width) {
+ ALOGE("Invalid value of cropLeft: %d and/or cropRight: %d", cropLeft, cropRight);
+ return BAD_VALUE;
+ }
+ if (cropTop < 0 || cropTop > cropBottom || cropBottom >= height) {
+ ALOGE("Invalid value of cropTop: %d and/or cropBottom: %d", cropTop, cropBottom);
+ return BAD_VALUE;
+ }
meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
}
@@ -1855,9 +1922,16 @@
ALOGV("did not find channel-count and/or sample-rate");
return BAD_VALUE;
}
+ // channel count can be zero in some cases like mpeg h
+ if (sampleRate <= 0 || numChannels < 0) {
+ ALOGE("Invalid value of channel-count: %d and/or sample-rate: %d",
+ numChannels, sampleRate);
+ return BAD_VALUE;
+ }
meta->setInt32(kKeyChannelCount, numChannels);
meta->setInt32(kKeySampleRate, sampleRate);
int32_t bitsPerSample;
+ // TODO:(b/204430952) add appropriate bound check for bitsPerSample
if (msg->findInt32("bits-per-sample", &bitsPerSample)) {
meta->setInt32(kKeyBitsPerSample, bitsPerSample);
}
@@ -1967,30 +2041,134 @@
mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
- if (msg->findBuffer("csd-2", &csd2)) {
- //dvcc should be 24
- if (csd2->size() == 24) {
- meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
- uint8_t *dvcc = csd2->data();
- const uint8_t profile = dvcc[2] >> 1;
- if (profile > 1 && profile < 9) {
+ int32_t needCreateDoviCSD = 0;
+ int32_t profile = 0;
+ uint8_t bl_compatibility = 0;
+ if (msg->findInt32("profile", &profile)) {
+ if (profile == DolbyVisionProfileDvheSt) {
+ profile = 8;
+ bl_compatibility = 4;
+ } else if (profile == DolbyVisionProfileDvavSe) {
+ profile = 9;
+ bl_compatibility = 2;
+ }
+ if (profile == 8 || profile == 9) {
+ needCreateDoviCSD = 1;
+ }
+ } else {
+ ALOGW("did not find dolby vision profile");
+ }
+ // No dovi csd data, need to create it
+ if (needCreateDoviCSD) {
+ uint8_t dvcc[24];
+ int32_t level = 0;
+ uint8_t level_val = 0;
+
+ if (msg->findInt32("level", &level)) {
+ const static ALookup<int32_t, uint8_t> levels {
+ {DolbyVisionLevelUnknown, 0},
+ {DolbyVisionLevelHd24, 1},
+ {DolbyVisionLevelHd30, 2},
+ {DolbyVisionLevelFhd24, 3},
+ {DolbyVisionLevelFhd30, 4},
+ {DolbyVisionLevelFhd60, 5},
+ {DolbyVisionLevelUhd24, 6},
+ {DolbyVisionLevelUhd30, 7},
+ {DolbyVisionLevelUhd48, 8},
+ {DolbyVisionLevelUhd60, 9},
+ {DolbyVisionLevelUhd120, 10},
+ {DolbyVisionLevel8k30, 11},
+ {DolbyVisionLevel8k60, 12},
+ };
+ levels.map(level, &level_val);
+ ALOGV("found dolby vision level: %d, value: %d", level, level_val);
+ }
+
+ dvcc[0] = 1; // major version
+ dvcc[1] = 0; // minor version
+ dvcc[2] = (uint8_t)((profile & 0x7f) << 1);// dolby vision profile
+ dvcc[2] = (uint8_t)((dvcc[2] | (uint8_t)((level_val >> 5) & 0x1)) & 0xff);
+ dvcc[3] = (uint8_t)((level_val & 0x1f) << 3); // dolby vision level
+ dvcc[3] = (uint8_t)(dvcc[3] | (1 << 2)); // rpu_present_flag
+ dvcc[3] = (uint8_t)(dvcc[3] | (1)); // bl_present_flag
+ dvcc[4] = (uint8_t)(bl_compatibility << 4);// bl_compatibility id
+
+ std::vector<uint8_t> dvcc_data(24);
+ memcpy(dvcc_data.data(), dvcc, 24);
+ if (profile > 10) {
+ meta->setData(kKeyDVWC, kTypeDVWC, dvcc_data.data(), 24);
+ } else if (profile > 7) {
+ meta->setData(kKeyDVVC, kTypeDVVC, dvcc_data.data(), 24);
+ } else {
+ meta->setData(kKeyDVCC, kTypeDVCC, dvcc_data.data(), 24);
+ }
+ } else if (csd0size >= 24) { // have dovi csd, just send it out...
+ uint8_t *dvconfig = csd0->data() + (csd0size -24);
+ profile = dvconfig[2] >> 1;
+ if (profile > 10) {
+ meta->setData(kKeyDVWC, kTypeDVWC, dvconfig, 24);
+ } else if (profile > 7) {
+ meta->setData(kKeyDVVC, kTypeDVVC, dvconfig, 24);
+ } else {
+ meta->setData(kKeyDVCC, kTypeDVCC, dvconfig, 24);
+ }
+ } else {
+ return BAD_VALUE;
+ }
+
+ // Send the avc/hevc/av1 csd data...
+ if (csd0size >= 24) {
+ sp<ABuffer> csd;
+ if ( profile > 1 && profile < 9) {
+ if (msg->findBuffer("csd-hevc", &csd)) {
+ meta->setData(kKeyHVCC, kTypeHVCC, csd->data(), csd->size());
+ } else if (csd0size > 24) {
std::vector<uint8_t> hvcc(csd0size + 1024);
size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
- } else if (DolbyVisionProfileDvav110 == profile) {
- meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
- } else {
- sp<ABuffer> csd1;
- if (msg->findBuffer("csd-1", &csd1)) {
- std::vector<char> avcc(csd0size + csd1->size() + 1024);
- size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
- meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
- }
}
+ } else if (profile == 9) {
+ sp<ABuffer> csd1;
+ if (msg->findBuffer("csd-avc", &csd)) {
+ meta->setData(kKeyAVCC, kTypeAVCC, csd->data(), csd->size());
+ } else if (msg->findBuffer("csd-1", &csd1)) {
+ std::vector<char> avcc(csd0size + csd1->size() + 1024);
+ size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
+ meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+ } else { // for dolby vision avc, csd0 also holds csd1
+ size_t i = 0;
+ int csd0realsize = 0;
+ do {
+ i = findNextNalStartCode(csd0->data() + i,
+ csd0->size() - i) - csd0->data();
+ if (i > 0) {
+ csd0realsize = i;
+ break;
+ }
+ i += 4;
+ } while(i < csd0->size());
+ // buffer0 -> csd0
+ sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
+ if (buffer0.get() == NULL || buffer0->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer0->data(), csd0->data(), csd0realsize);
+ // buffer1 -> csd1
+ sp<ABuffer> buffer1 = new (std::nothrow)
+ ABuffer(csd0->size() - csd0realsize);
+ if (buffer1.get() == NULL || buffer1->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer1->data(), csd0->data()+csd0realsize,
+ csd0->size() - csd0realsize);
+
+ std::vector<char> avcc(csd0->size() + 1024);
+ size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
+ meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+ }
+ } else if (profile == 10) {
+ meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size() - 24);
}
- } else {
- ALOGE("We need csd-2!!. %s", msg->debugString().c_str());
- return BAD_VALUE;
}
} else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
@@ -2038,17 +2216,6 @@
meta->setData(kKeyStreamHeader, 'mdat', csd0->data(), csd0->size());
} else if (msg->findBuffer("d263", &csd0)) {
meta->setData(kKeyD263, kTypeD263, csd0->data(), csd0->size());
- } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION && msg->findBuffer("csd-2", &csd2)) {
- meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
-
- // Remove CSD-2 from the data here to avoid duplicate data in meta
- meta->remove(kKeyOpaqueCSD2);
-
- if (msg->findBuffer("csd-avc", &csd0)) {
- meta->setData(kKeyAVCC, kTypeAVCC, csd0->data(), csd0->size());
- } else if (msg->findBuffer("csd-hevc", &csd0)) {
- meta->setData(kKeyHVCC, kTypeHVCC, csd0->data(), csd0->size());
- }
}
// XXX TODO add whatever other keys there are
@@ -2131,29 +2298,29 @@
}
struct aac_format_conv_t {
- OMX_AUDIO_AACPROFILETYPE eAacProfileType;
+ int32_t eAacProfileType;
audio_format_t format;
};
static const struct aac_format_conv_t profileLookup[] = {
- { OMX_AUDIO_AACObjectMain, AUDIO_FORMAT_AAC_MAIN},
- { OMX_AUDIO_AACObjectLC, AUDIO_FORMAT_AAC_LC},
- { OMX_AUDIO_AACObjectSSR, AUDIO_FORMAT_AAC_SSR},
- { OMX_AUDIO_AACObjectLTP, AUDIO_FORMAT_AAC_LTP},
- { OMX_AUDIO_AACObjectHE, AUDIO_FORMAT_AAC_HE_V1},
- { OMX_AUDIO_AACObjectScalable, AUDIO_FORMAT_AAC_SCALABLE},
- { OMX_AUDIO_AACObjectERLC, AUDIO_FORMAT_AAC_ERLC},
- { OMX_AUDIO_AACObjectLD, AUDIO_FORMAT_AAC_LD},
- { OMX_AUDIO_AACObjectHE_PS, AUDIO_FORMAT_AAC_HE_V2},
- { OMX_AUDIO_AACObjectELD, AUDIO_FORMAT_AAC_ELD},
- { OMX_AUDIO_AACObjectXHE, AUDIO_FORMAT_AAC_XHE},
- { OMX_AUDIO_AACObjectNull, AUDIO_FORMAT_AAC},
+ { AACObjectMain, AUDIO_FORMAT_AAC_MAIN},
+ { AACObjectLC, AUDIO_FORMAT_AAC_LC},
+ { AACObjectSSR, AUDIO_FORMAT_AAC_SSR},
+ { AACObjectLTP, AUDIO_FORMAT_AAC_LTP},
+ { AACObjectHE, AUDIO_FORMAT_AAC_HE_V1},
+ { AACObjectScalable, AUDIO_FORMAT_AAC_SCALABLE},
+ { AACObjectERLC, AUDIO_FORMAT_AAC_ERLC},
+ { AACObjectLD, AUDIO_FORMAT_AAC_LD},
+ { AACObjectHE_PS, AUDIO_FORMAT_AAC_HE_V2},
+ { AACObjectELD, AUDIO_FORMAT_AAC_ELD},
+ { AACObjectXHE, AUDIO_FORMAT_AAC_XHE},
+ { AACObjectNull, AUDIO_FORMAT_AAC},
};
void mapAACProfileToAudioFormat( audio_format_t& format, uint64_t eAacProfile)
{
-const struct aac_format_conv_t* p = &profileLookup[0];
- while (p->eAacProfileType != OMX_AUDIO_AACObjectNull) {
+ const struct aac_format_conv_t* p = &profileLookup[0];
+ while (p->eAacProfileType != AACObjectNull) {
if (eAacProfile == p->eAacProfileType) {
format = p->format;
return;
@@ -2193,7 +2360,7 @@
// Offloading depends on audio DSP capabilities.
int32_t aacaot = -1;
if (meta->findInt32(kKeyAACAOT, &aacaot)) {
- mapAACProfileToAudioFormat(info->format,(OMX_AUDIO_AACPROFILETYPE) aacaot);
+ mapAACProfileToAudioFormat(info->format, aacaot);
}
int32_t srate = -1;
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index fb6c4e2..bb1cb0b 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -354,7 +354,7 @@
}
if (mpeg4type->eProfile != OMX_VIDEO_MPEG4ProfileCore ||
- mpeg4type->eLevel != OMX_VIDEO_MPEG4Level2 ||
+ mpeg4type->eLevel > OMX_VIDEO_MPEG4Level2 ||
(mpeg4type->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) ||
mpeg4type->nBFrames != 0 ||
mpeg4type->nIDCVLCThreshold != 0 ||
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index c7dc415..6004cf8 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -23,6 +23,7 @@
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/ColorUtils.h>
#include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaErrors.h>
#include "libyuv/convert_from.h"
@@ -51,13 +52,17 @@
static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
return colorFormat == OMX_COLOR_Format16bitRGB565
|| colorFormat == OMX_COLOR_Format32BitRGBA8888
- || colorFormat == OMX_COLOR_Format32bitBGRA8888;
+ || colorFormat == OMX_COLOR_Format32bitBGRA8888
+ || colorFormat == COLOR_Format32bitABGR2101010;
}
bool ColorConverter::ColorSpace::isBt709() {
return (mStandard == ColorUtils::kColorStandardBT709);
}
+bool ColorConverter::ColorSpace::isBt2020() {
+ return (mStandard == ColorUtils::kColorStandardBT2020);
+}
bool ColorConverter::ColorSpace::isJpeg() {
return ((mStandard == ColorUtils::kColorStandardBT601_625)
@@ -70,16 +75,19 @@
: mSrcFormat(from),
mDstFormat(to),
mSrcColorSpace({0, 0, 0}),
- mClip(NULL) {
+ mClip(NULL),
+ mClip10Bit(NULL) {
}
ColorConverter::~ColorConverter() {
delete[] mClip;
mClip = NULL;
+ delete[] mClip10Bit;
+ mClip10Bit = NULL;
}
bool ColorConverter::isValid() const {
- switch (mSrcFormat) {
+ switch ((int32_t)mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar16:
if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
return true;
@@ -102,6 +110,8 @@
#else
return mDstFormat == OMX_COLOR_Format16bitRGB565;
#endif
+ case COLOR_FormatYUVP010:
+ return mDstFormat == COLOR_Format32bitABGR2101010;
default:
return false;
@@ -143,9 +153,10 @@
mCropTop(cropTop),
mCropRight(cropRight),
mCropBottom(cropBottom) {
- switch(mColorFormat) {
+ switch((int32_t)mColorFormat) {
case OMX_COLOR_Format16bitRGB565:
case OMX_COLOR_FormatYUV420Planar16:
+ case COLOR_FormatYUVP010:
case OMX_COLOR_FormatCbYCrY:
mBpp = 2;
mStride = 2 * mWidth;
@@ -153,6 +164,7 @@
case OMX_COLOR_Format32bitBGRA8888:
case OMX_COLOR_Format32BitRGBA8888:
+ case COLOR_Format32bitABGR2101010:
case OMX_COLOR_FormatYUV444Y410:
mBpp = 4;
mStride = 4 * mWidth;
@@ -213,7 +225,7 @@
status_t err;
- switch (mSrcFormat) {
+ switch ((int32_t)mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar:
#ifdef USE_LIBYUV
err = convertYUV420PlanarUseLibYUV(src, dst);
@@ -235,6 +247,19 @@
break;
}
+ case COLOR_FormatYUVP010:
+ {
+#if PERF_PROFILING
+ int64_t startTimeUs = ALooper::GetNowUs();
+#endif
+ err = convertYUVP010(src, dst);
+#if PERF_PROFILING
+ int64_t endTimeUs = ALooper::GetNowUs();
+ ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
+#endif
+ break;
+ }
+
case OMX_COLOR_FormatCbYCrY:
err = convertCbYCrY(src, dst);
break;
@@ -439,23 +464,23 @@
}
std::function<void (void *, bool, signed, signed, signed, signed, signed, signed)>
-getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, uint8_t *kAdjustedClip) {
- switch (dstFormat) {
+getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, void *kAdjustedClip) {
+ switch ((int)dstFormat) {
case OMX_COLOR_Format16bitRGB565:
{
return [kAdjustedClip](void *dst_ptr, bool uncropped,
signed r1, signed g1, signed b1,
signed r2, signed g2, signed b2) {
uint32_t rgb1 =
- ((kAdjustedClip[r1] >> 3) << 11)
- | ((kAdjustedClip[g1] >> 2) << 5)
- | (kAdjustedClip[b1] >> 3);
+ ((((uint8_t *)kAdjustedClip)[r1] >> 3) << 11)
+ | ((((uint8_t *)kAdjustedClip)[g1] >> 2) << 5)
+ | (((uint8_t *)kAdjustedClip)[b1] >> 3);
if (uncropped) {
uint32_t rgb2 =
- ((kAdjustedClip[r2] >> 3) << 11)
- | ((kAdjustedClip[g2] >> 2) << 5)
- | (kAdjustedClip[b2] >> 3);
+ ((((uint8_t *)kAdjustedClip)[r2] >> 3) << 11)
+ | ((((uint8_t *)kAdjustedClip)[g2] >> 2) << 5)
+ | (((uint8_t *)kAdjustedClip)[b2] >> 3);
*(uint32_t *)dst_ptr = (rgb2 << 16) | rgb1;
} else {
@@ -469,16 +494,16 @@
signed r1, signed g1, signed b1,
signed r2, signed g2, signed b2) {
((uint32_t *)dst_ptr)[0] =
- (kAdjustedClip[r1])
- | (kAdjustedClip[g1] << 8)
- | (kAdjustedClip[b1] << 16)
+ (((uint8_t *)kAdjustedClip)[r1])
+ | (((uint8_t *)kAdjustedClip)[g1] << 8)
+ | (((uint8_t *)kAdjustedClip)[b1] << 16)
| (0xFF << 24);
if (uncropped) {
((uint32_t *)dst_ptr)[1] =
- (kAdjustedClip[r2])
- | (kAdjustedClip[g2] << 8)
- | (kAdjustedClip[b2] << 16)
+ (((uint8_t *)kAdjustedClip)[r2])
+ | (((uint8_t *)kAdjustedClip)[g2] << 8)
+ | (((uint8_t *)kAdjustedClip)[b2] << 16)
| (0xFF << 24);
}
};
@@ -489,20 +514,41 @@
signed r1, signed g1, signed b1,
signed r2, signed g2, signed b2) {
((uint32_t *)dst_ptr)[0] =
- (kAdjustedClip[b1])
- | (kAdjustedClip[g1] << 8)
- | (kAdjustedClip[r1] << 16)
+ (((uint8_t *)kAdjustedClip)[b1])
+ | (((uint8_t *)kAdjustedClip)[g1] << 8)
+ | (((uint8_t *)kAdjustedClip)[r1] << 16)
| (0xFF << 24);
if (uncropped) {
((uint32_t *)dst_ptr)[1] =
- (kAdjustedClip[b2])
- | (kAdjustedClip[g2] << 8)
- | (kAdjustedClip[r2] << 16)
+ (((uint8_t *)kAdjustedClip)[b2])
+ | (((uint8_t *)kAdjustedClip)[g2] << 8)
+ | (((uint8_t *)kAdjustedClip)[r2] << 16)
| (0xFF << 24);
}
};
}
+ case COLOR_Format32bitABGR2101010:
+ {
+ return [kAdjustedClip](void *dst_ptr, bool uncropped,
+ signed r1, signed g1, signed b1,
+ signed r2, signed g2, signed b2) {
+ ((uint32_t *)dst_ptr)[0] =
+ (((uint16_t *)kAdjustedClip)[r1])
+ | (((uint16_t *)kAdjustedClip)[g1] << 10)
+ | (((uint16_t *)kAdjustedClip)[b1] << 20)
+ | (3 << 30);
+
+ if (uncropped) {
+ ((uint32_t *)dst_ptr)[1] =
+ (((uint16_t *)kAdjustedClip)[r2])
+ | (((uint16_t *)kAdjustedClip)[g2] << 10)
+ | (((uint16_t *)kAdjustedClip)[b2] << 20)
+ | (3 << 30);
+ }
+ };
+ }
+
default:
TRESPASS();
}
@@ -514,7 +560,7 @@
uint8_t *kAdjustedClip = initClip();
auto readFromSrc = getReadFromSrc(mSrcFormat);
- auto writeToDst = getWriteToDst(mDstFormat, kAdjustedClip);
+ auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
@@ -591,34 +637,116 @@
return convertYUV420Planar(src, dst);
}
-/*
- * Pack 10-bit YUV into RGBA_1010102.
- *
- * Media sends 10-bit YUV in a RGBA_1010102 format buffer. SF will handle
- * the conversion to RGB using RenderEngine fallback.
- *
- * We do not perform a YUV->RGB conversion here, however the conversion with
- * BT2020 to Full range is below for reference:
- *
- * B = 1.168 *(Y - 64) + 2.148 *(U - 512)
- * G = 1.168 *(Y - 64) - 0.652 *(V - 512) - 0.188 *(U - 512)
- * R = 1.168 *(Y - 64) + 1.683 *(V - 512)
- *
- * B = 1196/1024 *(Y - 64) + 2200/1024 *(U - 512)
- * G = .................... - 668/1024 *(V - 512) - 192/1024 *(U - 512)
- * R = .................... + 1723/1024 *(V - 512)
- *
- * min_B = (1196 *(- 64) + 2200 *(- 512)) / 1024 = -1175
- * min_G = (1196 *(- 64) - 668 *(1023 - 512) - 192 *(1023 - 512)) / 1024 = -504
- * min_R = (1196 *(- 64) + 1723 *(- 512)) / 1024 = -937
- *
- * max_B = (1196 *(1023 - 64) + 2200 *(1023 - 512)) / 1024 = 2218
- * max_G = (1196 *(1023 - 64) - 668 *(- 512) - 192 *(- 512)) / 1024 = 1551
- * max_R = (1196 *(1023 - 64) + 1723 *(1023 - 512)) / 1024 = 1980
- *
- * clip range -1175 .. 2218
- *
- */
+status_t ColorConverter::convertYUVP010(
+ const BitmapParams &src, const BitmapParams &dst) {
+ if (mDstFormat == COLOR_Format32bitABGR2101010) {
+ return convertYUVP010ToRGBA1010102(src, dst);
+ }
+
+ return ERROR_UNSUPPORTED;
+}
+
+status_t ColorConverter::convertYUVP010ToRGBA1010102(
+ const BitmapParams &src, const BitmapParams &dst) {
+ uint16_t *kAdjustedClip10bit = initClip10Bit();
+
+// auto readFromSrc = getReadFromSrc(mSrcFormat);
+ auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip10bit);
+
+ uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+ uint16_t *src_y = (uint16_t *)((uint8_t *)src.mBits
+ + src.mCropTop * src.mStride + src.mCropLeft * src.mBpp);
+
+ uint16_t *src_uv = (uint16_t *)((uint8_t *)src.mBits
+ + src.mStride * src.mHeight
+ + (src.mCropTop / 2) * src.mStride + src.mCropLeft * src.mBpp);
+
+ // BT.2020 Limited Range conversion
+
+ // B = 1.168 *(Y - 64) + 2.148 *(U - 512)
+ // G = 1.168 *(Y - 64) - 0.652 *(V - 512) - 0.188 *(U - 512)
+ // R = 1.168 *(Y - 64) + 1.683 *(V - 512)
+
+ // B = 1196/1024 *(Y - 64) + 2200/1024 *(U - 512)
+ // G = .................... - 668/1024 *(V - 512) - 192/1024 *(U - 512)
+ // R = .................... + 1723/1024 *(V - 512)
+
+ // min_B = (1196 *(- 64) + 2200 *(- 512)) / 1024 = -1175
+ // min_G = (1196 *(- 64) - 668 *(1023 - 512) - 192 *(1023 - 512)) / 1024 = -504
+ // min_R = (1196 *(- 64) + 1723 *(- 512)) / 1024 = -937
+
+ // max_B = (1196 *(1023 - 64) + 2200 *(1023 - 512)) / 1024 = 2218
+ // max_G = (1196 *(1023 - 64) - 668 *(- 512) - 192 *(- 512)) / 1024 = 1551
+ // max_R = (1196 *(1023 - 64) + 1723 *(1023 - 512)) / 1024 = 1980
+
+ // clip range -1175 .. 2218
+
+ // BT.709 Limited Range conversion
+
+ // B = 1.164 * (Y - 64) + 2.018 * (U - 512)
+ // G = 1.164 * (Y - 64) - 0.813 * (V - 512) - 0.391 * (U - 512)
+ // R = 1.164 * (Y - 64) + 1.596 * (V - 512)
+
+ // B = 1192/1024 * (Y - 64) + 2068/1024 * (U - 512)
+ // G = .................... - 832/1024 * (V - 512) - 400/1024 * (U - 512)
+ // R = .................... + 1636/1024 * (V - 512)
+
+ // min_B = (1192 * (- 64) + 2068 * (- 512)) / 1024 = -1108
+
+ // max_B = (1192 * (1023 - 64) + 517 * (1023 - 512)) / 1024 = 2148
+
+ // clip range -1108 .. 2148
+
+ signed mY = 1196, mU_B = 2200, mV_G = -668, mV_R = 1723, mU_G = -192;
+ if (!mSrcColorSpace.isBt2020()) {
+ mY = 1192;
+ mU_B = 2068;
+ mV_G = -832;
+ mV_R = 1636;
+ mU_G = -400;
+ }
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
+ signed y1, y2, u, v;
+ y1 = (src_y[x] >> 6) - 64;
+ y2 = (src_y[x + 1] >> 6) - 64;
+ u = int(src_uv[x] >> 6) - 512;
+ v = int(src_uv[x + 1] >> 6) - 512;
+
+ signed u_b = u * mU_B;
+ signed u_g = u * mU_G;
+ signed v_g = v * mV_G;
+ signed v_r = v * mV_R;
+
+ signed tmp1 = y1 * mY;
+ signed b1 = (tmp1 + u_b) / 1024;
+ signed g1 = (tmp1 + v_g + u_g) / 1024;
+ signed r1 = (tmp1 + v_r) / 1024;
+
+ signed tmp2 = y2 * mY;
+ signed b2 = (tmp2 + u_b) / 1024;
+ signed g2 = (tmp2 + v_g + u_g) / 1024;
+ signed r2 = (tmp2 + v_r) / 1024;
+
+ bool uncropped = x + 1 < src.cropWidth();
+
+ writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+ }
+
+ src_y += src.mStride / 2;
+
+ if (y & 1) {
+ src_uv += src.mStride / 2;
+ }
+
+ dst_ptr += dst.mStride;
+ }
+
+ return OK;
+}
+
#if !USE_NEON_Y410
@@ -1033,4 +1161,19 @@
return &mClip[-kClipMin];
}
+uint16_t *ColorConverter::initClip10Bit() {
+ static const signed kClipMin = -1176;
+ static const signed kClipMax = 2219;
+
+ if (mClip10Bit == NULL) {
+ mClip10Bit = new uint16_t[kClipMax - kClipMin + 1];
+
+ for (signed i = kClipMin; i <= kClipMax; ++i) {
+ mClip10Bit[i - kClipMin] = (i < 0) ? 0 : (i > 1023) ? 1023 : (uint16_t)i;
+ }
+ }
+
+ return &mClip10Bit[-kClipMin];
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index c2114b3..5c99cc9 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -33,7 +33,7 @@
#include <media/stagefright/foundation/hexdump.h>
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -659,7 +659,7 @@
return s;
}
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
// static
sp<AMessage> AMessage::FromParcel(const Parcel &parcel, size_t maxNestingLevel) {
int32_t what = parcel.readInt32();
@@ -825,7 +825,7 @@
}
}
}
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
sp<AMessage> AMessage::changesFrom(const sp<const AMessage> &other, bool deep) const {
if (other == NULL) {
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index b1ed077..a5e0ff8 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -27,7 +27,7 @@
#include "ADebug.h"
#include "AString.h"
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -365,7 +365,7 @@
return !strcasecmp(mData + mSize - suffixLen, suffix);
}
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
// static
AString AString::FromParcel(const Parcel &parcel) {
size_t size = static_cast<size_t>(parcel.readInt32());
@@ -380,7 +380,7 @@
}
return err;
}
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
AString AStringPrintf(const char *format, ...) {
va_list ap;
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index fa722b5..6dc8157 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -590,9 +590,10 @@
uint32_t gfxRange = range;
uint32_t gfxStandard = standard;
uint32_t gfxTransfer = transfer;
- // TRICKY: use & to ensure all three mappings are completed
- if (!(sGfxRanges.map(range, &gfxRange) & sGfxStandards.map(standard, &gfxStandard)
- & sGfxTransfers.map(transfer, &gfxTransfer))) {
+ bool mappedRange = sGfxRanges.map(range, &gfxRange);
+ bool mappedStandard = sGfxStandards.map(standard, &gfxStandard);
+ bool mappedTransfer = sGfxTransfers.map(transfer, &gfxTransfer);
+ if (! (mappedRange && mappedStandard && mappedTransfer)) {
ALOGW("could not safely map platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s) to "
"graphics dataspace (R:%u S:%u T:%u)",
range, asString(range), standard, asString(standard), transfer, asString(transfer),
@@ -626,9 +627,10 @@
CU::ColorRange cuRange = CU::kColorRangeUnspecified;
CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
- // TRICKY: use & to ensure all three mappings are completed
- if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
- & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+ bool mappedRange = sGfxRanges.map(gfxRange, &cuRange);
+ bool mappedStandard = sGfxStandards.map(gfxStandard, &cuStandard);
+ bool mappedTransfer = sGfxTransfers.map(gfxTransfer, &cuTransfer);
+ if (! (mappedRange && mappedStandard && mappedTransfer)) {
ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
"platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
gfxRange, gfxStandard, gfxTransfer,
@@ -781,5 +783,14 @@
return true;
}
+// static
+bool ColorUtils::isHDRStaticInfoValid(HDRStaticInfo *info) {
+ if (info->sType1.mMaxDisplayLuminance > 0.0f
+ && info->sType1.mMinDisplayLuminance > 0.0f) return true;
+ if (info->sType1.mMaxContentLightLevel > 0.0f
+ && info->sType1.mMaxFrameAverageLightLevel > 0.0f) return true;
+ return false;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index 7f48cfd..77913d5 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -28,7 +28,7 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MetaData.h>
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -48,7 +48,7 @@
MetaData::~MetaData() {
}
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
/* static */
sp<MetaData> MetaData::createFromParcel(const Parcel &parcel) {
diff --git a/media/libstagefright/foundation/MetaDataBase.cpp b/media/libstagefright/foundation/MetaDataBase.cpp
index 3f050ea..980eb22 100644
--- a/media/libstagefright/foundation/MetaDataBase.cpp
+++ b/media/libstagefright/foundation/MetaDataBase.cpp
@@ -28,7 +28,7 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MetaDataBase.h>
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -452,7 +452,7 @@
}
}
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
status_t MetaDataBase::writeToParcel(Parcel &parcel) {
status_t ret;
size_t numItems = mInternalData->mItems.size();
@@ -532,7 +532,7 @@
ALOGW("no metadata in parcel");
return UNKNOWN_ERROR;
}
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
} // namespace android
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index a2b6c4f..72c8074 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -193,6 +193,9 @@
static void setHDRStaticInfoIntoAMediaFormat(const HDRStaticInfo &info, AMediaFormat *format);
// (internal) used by the setHDRStaticInfoInfo* routines
static void fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data);
+
+ // determine whether HDR static info is valid
+ static bool isHDRStaticInfoValid(HDRStaticInfo *info);
};
inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index 0b0acbf..7acf735 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -29,7 +29,6 @@
],
include_dirs: [
- "frameworks/av/media/libstagefright",
"frameworks/native/include/media/openmax",
],
@@ -65,6 +64,8 @@
header_libs: [
"libbase_headers",
+ "libstagefright_headers",
+ "libstagefright_httplive_headers",
],
static_libs: [
@@ -74,3 +75,8 @@
],
}
+
+cc_library_headers {
+ name: "libstagefright_httplive_headers",
+ export_include_dirs: ["."],
+}
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 3bad015..0d7cadd 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -23,7 +23,7 @@
#include "M3UParser.h"
#include "PlaylistFetcher.h"
-#include "mpeg2ts/AnotherPacketSource.h"
+#include <AnotherPacketSource.h>
#include <cutils/properties.h>
#include <media/MediaHTTPService.h>
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 7a6d487..ceea41d 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -24,7 +24,7 @@
#include <utils/String8.h>
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
namespace android {
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index b23aa8a..907b326 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -24,9 +24,9 @@
#include "HTTPDownloader.h"
#include "LiveSession.h"
#include "M3UParser.h"
-#include "include/ID3.h"
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/HlsSampleDecryptor.h"
+#include <ID3.h>
+#include <AnotherPacketSource.h>
+#include <HlsSampleDecryptor.h>
#include <datasource/DataURISource.h>
#include <media/stagefright/foundation/ABitReader.h>
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 5d3f9c1..2e28164 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -21,7 +21,7 @@
#include <media/stagefright/foundation/AHandler.h>
#include <openssl/aes.h>
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
#include "LiveSession.h"
namespace android {
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
new file mode 100644
index 0000000..14097b0
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libstagefright_httplive_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: [
+ "frameworks_av_media_libstagefright_httplive_license",
+ ],
+}
+
+cc_fuzz {
+ name: "httplive_fuzzer",
+ srcs: [
+ "httplive_fuzzer.cpp",
+ ],
+ static_libs: [
+ "libstagefright_httplive",
+ "libstagefright_id3",
+ "libstagefright_metadatautils",
+ "libstagefright_mpeg2support",
+ "liblog",
+ "libcutils",
+ "libdatasource",
+ "libmedia",
+ "libstagefright",
+ "libutils",
+ ],
+ header_libs: [
+ "libbase_headers",
+ "libstagefright_foundation_headers",
+ "libstagefright_headers",
+ "libstagefright_httplive_headers",
+ ],
+ shared_libs: [
+ "libcrypto",
+ "libstagefright_foundation",
+ "libhidlbase",
+ "libhidlmemory",
+ "android.hidl.allocator@1.0",
+ ],
+ corpus: ["corpus/*"],
+ dictionary: "httplive_fuzzer.dict",
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/libstagefright/httplive/fuzzer/README.md b/media/libstagefright/httplive/fuzzer/README.md
new file mode 100644
index 0000000..3a64ea4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libstagefright_httplive
+
+## Plugin Design Considerations
+The fuzzer plugin for libstagefright_httplive is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data.Also, several .m3u8 files are hand-crafted and added to the corpus directory to increase the code coverage. This ensures more code paths are reached by the fuzzer.
+
+libstagefright_httplive supports the following parameters:
+1. Final Result (parameter name: `finalResult`)
+2. Flags (parameter name: `flags`)
+3. Time Us (parameter name: `timeUs`)
+4. Track Index (parameter name: `trackIndex`)
+5. Index (parameter name: `index`)
+6. Select (parameter name: `select`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `finalResult` | `-34` to `-1` | Value obtained from FuzzedDataProvider|
+| `flags` | `0` to `1` | Value obtained from FuzzedDataProvider|
+| `timeUs` | `0` to `10000000` | Value obtained from FuzzedDataProvider|
+| `trackIndex` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `index` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `select` | `True` to `False` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the httplive module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build httplive_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) httplive_fuzzer
+```
+#### Steps to run
+To run on device
+```
+ $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/lib /data/fuzz/$(TARGET_ARCH)/lib
+ $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/httplive_fuzzer /data/fuzz/$(TARGET_ARCH)/httplive_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/httplive_fuzzer /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/corpus
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/httplive/fuzzer/corpus/crypt.key b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
new file mode 100644
index 0000000..f9d5d7f
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
@@ -0,0 +1,2 @@
+Û
+ÏüÐ5Ð_xïHÎ3
diff --git a/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
new file mode 100644
index 0000000..32b0eac
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-ALLOW-CACHE:YES
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-VERSION:3
+#EXT-X-MEDIA-SEQUENCE:1
+#EXT-X-KEY:METHOD=AES-128,URI="../../fuzz/arm64/httplive_fuzzer/corpus/crypt.key"
+#EXTINF:10.000,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5.092,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
new file mode 100644
index 0000000..9338e04
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
@@ -0,0 +1,8 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-MEDIA-SEQUENCE:0
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
new file mode 100644
index 0000000..e1eff58
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
new file mode 100644
index 0000000..37a0189
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
@@ -0,0 +1,6 @@
+#EXTM3U
+#EXT-X-INDEPENDENT-SEGMENTS
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=165340,RESOLUTION=256x144,CODECS="mp4a.40.5,avc1.42c00b"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index.m3u8
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=344388,RESOLUTION=426x240,CODECS="mp4a.40.5,avc1.4d4015"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
new file mode 100644
index 0000000..1b7f489
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
new file mode 100644
index 0000000..89ba37c
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
@@ -0,0 +1,15 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=SAMPLE-AES,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
new file mode 100644
index 0000000..2120de4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:11
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-MEDIA-SEQUENCE:0
+#EXT-X-VERSION:4
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:10@0
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:20@10
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:80
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
new file mode 100644
index 0000000..588368a
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0x30303030303030303030303030303030
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
new file mode 100644
index 0000000..b09948e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
@@ -0,0 +1,46 @@
+#EXTM3U
+#EXT-X-VERSION:4
+## Created with Unified Streaming Platform (version=1.11.3-24438)
+#EXT-X-SESSION-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key"
+
+# AUDIO groups
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-64",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-128",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+
+# SUBTITLES groups
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_eng=1000.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="ru",NAME="Russian",AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_rus=1000.m3u8"
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=494000,CODECS="mp4a.40.2,avc1.42C00D",RESOLUTION=224x100,FRAME-RATE=24,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng=401000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=933000,CODECS="mp4a.40.2,avc1.42C016",RESOLUTION=448x200,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=751000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1198000,CODECS="mp4a.40.2,avc1.4D401F",RESOLUTION=784x350,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1001000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1501000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2469000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=2200000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=1025000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng_1=902000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1368000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=2576x1150,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1161000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1815000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=3360x1500,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1583000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=69000,CODECS="mp4a.40.2",AUDIO="audio-aacl-64",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=64008.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=137000,CODECS="mp4a.40.2",AUDIO="audio-aacl-128",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=128002.m3u8
+
+# keyframes
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=54000,CODECS="avc1.42C00D",RESOLUTION=224x100,URI="keyframes/tears-of-steel-aes-video_eng=401000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=100000,CODECS="avc1.42C016",RESOLUTION=448x200,URI="keyframes/tears-of-steel-aes-video_eng=751000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=133000,CODECS="avc1.4D401F",RESOLUTION=784x350,URI="keyframes/tears-of-steel-aes-video_eng=1001000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=120000,CODECS="hvc1.1.6.L150.90",RESOLUTION=1680x750,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=902000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=154000,CODECS="hvc1.1.6.L150.90",RESOLUTION=2576x1150,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1161000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=210000,CODECS="hvc1.1.6.L150.90",RESOLUTION=3360x1500,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1583000.m3u8"
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
new file mode 100644
index 0000000..353d589
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:5
+
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="English stereo",LANGUAGE="en",AUTOSELECT=YES,URI="../../fuzz/arm64/httplive_fuzzer/index1.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=628000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=320x180,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=928000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=480x270,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index2.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=640x360,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index3.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2528000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=960x540,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
new file mode 100644
index 0000000..eb88422
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
@@ -0,0 +1,17 @@
+#EXTM3U
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="eng",NAME="English",AUTOSELECT=YES,DEFAULT=YES,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="fre",NAME="Français",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="sp",NAME="Espanol",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
new file mode 100644
index 0000000..aa777b3
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <LiveDataSource.h>
+#include <LiveSession.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/mediaplayer_common.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
+#include <string>
+#include <utils/Log.h>
+
+using namespace std;
+using namespace android;
+
+constexpr char kFileNamePrefix[] = "/data/local/tmp/httplive-";
+constexpr char kFileNameSuffix[] = ".m3u8";
+constexpr char kFileUrlPrefix[] = "file://";
+constexpr int64_t kOffSet = 0;
+constexpr int32_t kReadyMarkMs = 5000;
+constexpr int32_t kPrepareMarkMs = 1500;
+constexpr int32_t kErrorNoMax = -1;
+constexpr int32_t kErrorNoMin = -34;
+constexpr int32_t kMaxTimeUs = 1000;
+constexpr int32_t kRandomStringLength = 64;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+constexpr LiveSession::StreamType kValidStreamType[] = {
+ LiveSession::STREAMTYPE_AUDIO, LiveSession::STREAMTYPE_VIDEO,
+ LiveSession::STREAMTYPE_SUBTITLES, LiveSession::STREAMTYPE_METADATA};
+
+constexpr MediaSource::ReadOptions::SeekMode kValidSeekMode[] = {
+ MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+ MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX};
+
+constexpr media_track_type kValidMediaTrackType[] = {
+ MEDIA_TRACK_TYPE_UNKNOWN, MEDIA_TRACK_TYPE_VIDEO,
+ MEDIA_TRACK_TYPE_AUDIO, MEDIA_TRACK_TYPE_TIMEDTEXT,
+ MEDIA_TRACK_TYPE_SUBTITLE, MEDIA_TRACK_TYPE_METADATA};
+
+struct TestAHandler : public AHandler {
+public:
+ TestAHandler(std::function<void()> signalEosFunction)
+ : mSignalEosFunction(signalEosFunction) {}
+ virtual ~TestAHandler() {}
+
+protected:
+ void onMessageReceived(const sp<AMessage> &msg) override {
+ int32_t what = -1;
+ msg->findInt32("what", &what);
+ switch (what) {
+ case LiveSession::kWhatError:
+ case LiveSession::kWhatPrepared:
+ case LiveSession::kWhatPreparationFailed: {
+ mSignalEosFunction();
+ break;
+ }
+ }
+ return;
+ }
+
+private:
+ std::function<void()> mSignalEosFunction;
+};
+
+struct TestMediaHTTPConnection : public MediaHTTPConnection {
+public:
+ TestMediaHTTPConnection() {}
+ virtual ~TestMediaHTTPConnection() {}
+
+ virtual bool connect(const char * /*uri*/,
+ const KeyedVector<String8, String8> * /*headers*/) {
+ return true;
+ }
+
+ virtual void disconnect() { return; }
+
+ virtual ssize_t readAt(off64_t /*offset*/, void * /*data*/, size_t size) {
+ return size;
+ }
+
+ virtual off64_t getSize() { return 0; }
+ virtual status_t getMIMEType(String8 * /*mimeType*/) { return NO_ERROR; }
+ virtual status_t getUri(String8 * /*uri*/) { return NO_ERROR; }
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPConnection);
+};
+
+struct TestMediaHTTPService : public MediaHTTPService {
+public:
+ TestMediaHTTPService() {}
+ ~TestMediaHTTPService(){};
+
+ virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+ mediaHTTPConnection = sp<TestMediaHTTPConnection>::make();
+ return mediaHTTPConnection;
+ }
+
+private:
+ sp<TestMediaHTTPConnection> mediaHTTPConnection = nullptr;
+ DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
+};
+
+class HttpLiveFuzzer {
+public:
+ void process(const uint8_t *data, size_t size);
+ void deInitLiveSession();
+ ~HttpLiveFuzzer() { deInitLiveSession(); }
+
+private:
+ void invokeLiveDataSource();
+ void createM3U8File(const uint8_t *data, size_t size);
+ void initLiveDataSource();
+ void invokeLiveSession();
+ void initLiveSession();
+ void invokeDequeueAccessUnit();
+ void invokeConnectAsync();
+ void invokeSeekTo();
+ void invokeGetConfig();
+ void signalEos();
+ string generateFileName();
+ sp<LiveDataSource> mLiveDataSource = nullptr;
+ sp<LiveSession> mLiveSession = nullptr;
+ sp<ALooper> mLiveLooper = nullptr;
+ sp<TestMediaHTTPService> httpService = nullptr;
+ sp<TestAHandler> mHandler = nullptr;
+ FuzzedDataProvider *mFDP = nullptr;
+ bool mEosReached = false;
+ std::mutex mDownloadCompleteMutex;
+ std::condition_variable mConditionalVariable;
+};
+
+string HttpLiveFuzzer::generateFileName() {
+ return kFileNamePrefix + to_string(getpid()) + kFileNameSuffix;
+}
+
+void HttpLiveFuzzer::createM3U8File(const uint8_t *data, size_t size) {
+ ofstream m3u8File;
+ string currentFileName = generateFileName();
+ m3u8File.open(currentFileName, ios::out | ios::binary);
+ m3u8File.write((char *)data, size);
+ m3u8File.close();
+}
+
+void HttpLiveFuzzer::initLiveDataSource() {
+ mLiveDataSource = sp<LiveDataSource>::make();
+}
+
+void HttpLiveFuzzer::invokeLiveDataSource() {
+ initLiveDataSource();
+ size_t size = mFDP->ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+ sp<ABuffer> buffer = new ABuffer(size);
+ mLiveDataSource->queueBuffer(buffer);
+ uint8_t *data = new uint8_t[size];
+ mLiveDataSource->readAtNonBlocking(kOffSet, data, size);
+ int32_t finalResult = mFDP->ConsumeIntegralInRange(kErrorNoMin, kErrorNoMax);
+ mLiveDataSource->queueEOS(finalResult);
+ mLiveDataSource->reset();
+ mLiveDataSource->countQueuedBuffers();
+ mLiveDataSource->initCheck();
+ delete[] data;
+}
+
+void HttpLiveFuzzer::initLiveSession() {
+ ALooperRoster looperRoster;
+ mHandler =
+ sp<TestAHandler>::make(std::bind(&HttpLiveFuzzer::signalEos, this));
+ mLiveLooper = sp<ALooper>::make();
+ mLiveLooper->setName("http live");
+ mLiveLooper->start();
+ sp<AMessage> notify = sp<AMessage>::make(0, mHandler);
+ httpService = new TestMediaHTTPService();
+ uint32_t flags = mFDP->ConsumeIntegral<uint32_t>();
+ mLiveSession = sp<LiveSession>::make(notify, flags, httpService);
+ mLiveLooper->registerHandler(mLiveSession);
+ looperRoster.registerHandler(mLiveLooper, mHandler);
+}
+
+void HttpLiveFuzzer::invokeDequeueAccessUnit() {
+ LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+ sp<ABuffer> buffer;
+ mLiveSession->dequeueAccessUnit(stream, &buffer);
+}
+
+void HttpLiveFuzzer::invokeSeekTo() {
+ int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(0, kMaxTimeUs);
+ MediaSource::ReadOptions::SeekMode mode =
+ mFDP->PickValueInArray(kValidSeekMode);
+ mLiveSession->seekTo(timeUs, mode);
+}
+
+void HttpLiveFuzzer::invokeGetConfig() {
+ mLiveSession->getTrackCount();
+ size_t trackIndex = mFDP->ConsumeIntegral<size_t>();
+ mLiveSession->getTrackInfo(trackIndex);
+ media_track_type type = mFDP->PickValueInArray(kValidMediaTrackType);
+ mLiveSession->getSelectedTrack(type);
+ sp<MetaData> meta;
+ LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+ mLiveSession->getStreamFormatMeta(stream, &meta);
+ mLiveSession->getKeyForStream(stream);
+ if (stream != LiveSession::STREAMTYPE_SUBTITLES) {
+ mLiveSession->getSourceTypeForStream(stream);
+ }
+}
+
+void HttpLiveFuzzer::invokeConnectAsync() {
+ string currentFileName = generateFileName();
+ string url = kFileUrlPrefix + currentFileName;
+ string str_1 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+ string str_2 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+
+ KeyedVector<String8, String8> headers;
+ headers.add(String8(str_1.c_str()), String8(str_2.c_str()));
+ mLiveSession->connectAsync(url.c_str(), &headers);
+}
+
+void HttpLiveFuzzer::invokeLiveSession() {
+ initLiveSession();
+ BufferingSettings bufferingSettings;
+ bufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+ bufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
+ mLiveSession->setBufferingSettings(bufferingSettings);
+ invokeConnectAsync();
+ std::unique_lock waitForDownloadComplete(mDownloadCompleteMutex);
+ mConditionalVariable.wait(waitForDownloadComplete,
+ [this] { return mEosReached; });
+ if (mLiveSession->isSeekable()) {
+ invokeSeekTo();
+ }
+ invokeDequeueAccessUnit();
+ size_t index = mFDP->ConsumeIntegral<size_t>();
+ bool select = mFDP->ConsumeBool();
+ mLiveSession->selectTrack(index, select);
+ mLiveSession->hasDynamicDuration();
+ int64_t firstTimeUs =
+ mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+ int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+ int32_t discontinuitySeq = mFDP->ConsumeIntegral<int32_t>();
+ mLiveSession->calculateMediaTimeUs(firstTimeUs, timeUs, discontinuitySeq);
+ invokeGetConfig();
+}
+
+void HttpLiveFuzzer::process(const uint8_t *data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ createM3U8File(data, size);
+ invokeLiveDataSource();
+ invokeLiveSession();
+ delete mFDP;
+}
+
+void HttpLiveFuzzer::deInitLiveSession() {
+ if (mLiveSession != nullptr) {
+ mLiveSession->disconnect();
+ mLiveLooper->unregisterHandler(mLiveSession->id());
+ mLiveLooper->stop();
+ }
+ mLiveSession.clear();
+ mLiveLooper.clear();
+}
+
+void HttpLiveFuzzer::signalEos() {
+ mEosReached = true;
+ {
+ std::lock_guard<std::mutex> waitForDownloadComplete(mDownloadCompleteMutex);
+ }
+ mConditionalVariable.notify_one();
+ return;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ HttpLiveFuzzer httpliveFuzzer;
+ httpliveFuzzer.process(data, size);
+ return 0;
+}
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
new file mode 100644
index 0000000..703cc7e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
@@ -0,0 +1,15 @@
+#m3u8-Tags
+kw1="#EXTM3U"
+kw2="#EXT-X-VERSION:"
+kw3="#EXT-X-TARGETDURATION:"
+kw4="#EXT-X-PLAYLIST-TYPE:"
+kw5="#EXTINF:"
+kw6="#EXT-X-ENDLIST"
+kw7="#EXT-X-MEDIA-SEQUENCE:"
+kw8="#EXT-X-KEY:METHOD=NONE"
+kw9="#EXT-X-DISCONTINUITY:"
+kw10="#EXT-X-DISCONTINUITY-SEQUENCE:0"
+kw11="#EXT-X-STREAM-INF:BANDWIDTH="
+kw12="#EXT-X-STREAM-INF:CODECS="
+kw13="#EXT-X-BYTERANGE:"
+kw14="#EXT-X-MEDIA"
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index c84cc10..632b32c 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -71,6 +71,9 @@
virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
virtual void initiateStart();
virtual void initiateShutdown(bool keepComponentAllocated = false);
+ virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+ virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+ virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
status_t queryCapabilities(
const char* owner, const char* name,
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 75b0d8e..1d86a22 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -54,6 +54,7 @@
uint32_t mTransfer;
bool isBt709();
+ bool isBt2020();
bool isJpeg();
};
@@ -78,8 +79,10 @@
OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
ColorSpace mSrcColorSpace;
uint8_t *mClip;
+ uint16_t *mClip10Bit;
uint8_t *initClip();
+ uint16_t *initClip10Bit();
status_t convertCbYCrY(
const BitmapParams &src, const BitmapParams &dst);
@@ -111,6 +114,12 @@
status_t convertTIYUV420PackedSemiPlanar(
const BitmapParams &src, const BitmapParams &dst);
+ status_t convertYUVP010(
+ const BitmapParams &src, const BitmapParams &dst);
+
+ status_t convertYUVP010ToRGBA1010102(
+ const BitmapParams &src, const BitmapParams &dst);
+
ColorConverter(const ColorConverter &);
ColorConverter &operator=(const ColorConverter &);
};
diff --git a/media/libstagefright/include/media/stagefright/MediaBuffer.h b/media/libstagefright/include/media/stagefright/MediaBuffer.h
index 2c03f27..f070aac 100644
--- a/media/libstagefright/include/media/stagefright/MediaBuffer.h
+++ b/media/libstagefright/include/media/stagefright/MediaBuffer.h
@@ -105,7 +105,6 @@
if (mMemory.get() == nullptr || mMemory->unsecurePointer() == nullptr) return 0;
int32_t remoteRefcount =
reinterpret_cast<SharedControl *>(mMemory->unsecurePointer())->getRemoteRefcount();
- // Sanity check so that remoteRefCount() is non-negative.
return remoteRefcount >= 0 ? remoteRefcount : 0; // do not allow corrupted data.
#else
return 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index d372140..ce3b0d0 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -445,6 +445,12 @@
int32_t mRotationDegrees;
int32_t mAllowFrameDroppingBySurface;
+ uint32_t mHDRMetadataFlags; /* bitmask of kFlagHDR* */
+ enum {
+ kFlagHDRStaticInfo = 1 << 0,
+ kFlagHDR10PlusInfo = 1 << 1,
+ };
+
// initial create parameters
AString mInitName;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index c2c79dd..84653eb 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -364,7 +364,7 @@
inline static const char *asString_AV1Profile(int32_t i, const char *def = "??") {
switch (i) {
case AV1ProfileMain8: return "Main8";
- case AV1ProfileMain10: return "Main10HDR";
+ case AV1ProfileMain10: return "Main10";
case AV1ProfileMain10HDR10: return "Main10HDR10";
case AV1ProfileMain10HDR10Plus: return "Main10HDRPlus";
default: return def;
@@ -540,6 +540,9 @@
constexpr int32_t DolbyVisionLevelUhd30 = 0x40;
constexpr int32_t DolbyVisionLevelUhd48 = 0x80;
constexpr int32_t DolbyVisionLevelUhd60 = 0x100;
+constexpr int32_t DolbyVisionLevelUhd120 = 0x200;
+constexpr int32_t DolbyVisionLevel8k30 = 0x400;
+constexpr int32_t DolbyVisionLevel8k60 = 0x800;
inline static const char *asString_DolbyVisionLevel(int32_t i, const char *def = "??") {
switch (i) {
@@ -552,6 +555,9 @@
case DolbyVisionLevelUhd30: return "Uhd30";
case DolbyVisionLevelUhd48: return "Uhd48";
case DolbyVisionLevelUhd60: return "Uhd60";
+ case DolbyVisionLevelUhd120: return "Uhd120";
+ case DolbyVisionLevel8k30: return "8k30";
+ case DolbyVisionLevel8k60: return "8k60";
default: return def;
}
}
@@ -586,9 +592,11 @@
constexpr int32_t COLOR_Format24bitBGR888 = 12;
constexpr int32_t COLOR_Format24bitRGB888 = 11;
constexpr int32_t COLOR_Format25bitARGB1888 = 14;
+constexpr int32_t COLOR_Format32bitABGR2101010 = 0x7F00AAA2;
constexpr int32_t COLOR_Format32bitABGR8888 = 0x7F00A000;
constexpr int32_t COLOR_Format32bitARGB8888 = 16;
constexpr int32_t COLOR_Format32bitBGRA8888 = 15;
+constexpr int32_t COLOR_Format64bitABGRFloat = 0x7F000F16;
constexpr int32_t COLOR_Format8bitRGB332 = 2;
constexpr int32_t COLOR_FormatCbYCrY = 27;
constexpr int32_t COLOR_FormatCrYCbY = 28;
@@ -642,9 +650,11 @@
case COLOR_Format24bitBGR888: return "24bitBGR888";
case COLOR_Format24bitRGB888: return "24bitRGB888";
case COLOR_Format25bitARGB1888: return "25bitARGB1888";
+ case COLOR_Format32bitABGR2101010: return "32bitABGR2101010";
case COLOR_Format32bitABGR8888: return "32bitABGR8888";
case COLOR_Format32bitARGB8888: return "32bitARGB8888";
case COLOR_Format32bitBGRA8888: return "32bitBGRA8888";
+ case COLOR_Format64bitABGRFloat: return "64bitABGRFloat";
case COLOR_Format8bitRGB332: return "8bitRGB332";
case COLOR_FormatCbYCrY: return "CbYCrY";
case COLOR_FormatCrYCbY: return "CrYCbY";
@@ -677,6 +687,7 @@
case COLOR_FormatYUV422SemiPlanar: return "YUV422SemiPlanar";
case COLOR_FormatYUV444Flexible: return "YUV444Flexible";
case COLOR_FormatYUV444Interleaved: return "YUV444Interleaved";
+ case COLOR_FormatYUVP010: return "YUVP010";
case COLOR_QCOM_FormatYUV420SemiPlanar: return "QCOM_YUV420SemiPlanar";
case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
default: return def;
@@ -684,6 +695,7 @@
}
constexpr char FEATURE_AdaptivePlayback[] = "adaptive-playback";
+constexpr char FEATURE_EncodingStatistics[] = "encoding-statistics";
constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
constexpr char FEATURE_PartialFrame[] = "partial-frame";
constexpr char FEATURE_QpBounds[] = "qp-bounds";
@@ -737,6 +749,14 @@
constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
+constexpr int32_t PICTURE_TYPE_I = 1;
+constexpr int32_t PICTURE_TYPE_P = 2;
+constexpr int32_t PICTURE_TYPE_B = 3;
+constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
+
+constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
+constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
+
constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
@@ -796,6 +816,7 @@
constexpr char KEY_OPERATING_RATE[] = "operating-rate";
constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
+constexpr char KEY_PICTURE_TYPE[] = "picture_type";
constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
@@ -812,6 +833,8 @@
constexpr char KEY_TILE_HEIGHT[] = "tile-height";
constexpr char KEY_TILE_WIDTH[] = "tile-width";
constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
+constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index c80012e..88c1f3f 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -60,6 +60,8 @@
kKeyAVCC = 'avcc', // raw data
kKeyHVCC = 'hvcc', // raw data
kKeyDVCC = 'dvcc', // raw data
+ kKeyDVVC = 'dvvc', // raw data
+ kKeyDVWC = 'dvwc', // raw data
kKeyAV1C = 'av1c', // raw data
kKeyThumbnailHVCC = 'thvc', // raw data
kKeyThumbnailAV1C = 'tav1', // raw data
@@ -283,6 +285,8 @@
kTypeHVCC = 'hvcc',
kTypeAV1C = 'av1c',
kTypeDVCC = 'dvcc',
+ kTypeDVVC = 'dvvc',
+ kTypeDVWC = 'dvwc',
kTypeD263 = 'd263',
kTypeHCOS = 'hcos',
};
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 0097830..ea17a4d 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -86,9 +86,6 @@
dictionary: "dictionaries/formats.dict",
defaults: ["libstagefright_fuzzer_defaults"],
static_libs: [
- "libstagefright_webm",
"libdatasource",
- "libstagefright_esds",
- "libogg",
],
}
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 411c206..5506a73 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -39,7 +39,7 @@
darwin: {
enabled: false,
},
- linux_glibc: {
+ glibc: {
cflags: [
"-Dsigev_notify_thread_id=_sigev_un._tid",
],
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 1928ba8..54d3d4a 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,6 +1,5 @@
set noparent
-marcone@google.com
jsharkey@android.com
jameswei@google.com
rmojumder@google.com
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 8d527e9..94e5d1f 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -177,10 +177,6 @@
"NdkMediaDataSourceCallbacks.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- "frameworks/av/media/ndk/include",
- ],
export_include_dirs: [
"include",
@@ -193,6 +189,7 @@
],
header_libs: [
+ "libstagefright_headers",
"libmedia_headers",
],
@@ -223,6 +220,7 @@
"libcutils",
"android.hardware.graphics.bufferqueue@1.0",
],
+
header_libs: [
"libstagefright_foundation_headers",
],
@@ -230,9 +228,6 @@
cflags: [
"-D__ANDROID_VNDK__",
],
- include_dirs: [
- "frameworks/av/media/ndk/",
- ],
}
cc_library_static {
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 1ae2b44..227459a 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -158,8 +158,7 @@
}
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
mCodec->mAsyncCallback.onAsyncInputAvailable(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -205,8 +204,7 @@
(uint32_t)flags};
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
mCodec->mAsyncCallback.onAsyncOutputAvailable(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -234,8 +232,7 @@
AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(©);
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
mCodec->mAsyncCallback.onAsyncFormatChanged(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -263,8 +260,7 @@
err, actionCode, detail.c_str());
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncError != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncError != NULL) {
mCodec->mAsyncCallback.onAsyncError(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -452,17 +448,19 @@
uint32_t flags) {
sp<AMessage> nativeFormat;
AMediaFormat_getFormat(format, &nativeFormat);
- ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
+ // create our shallow copy, so we aren't victim to any later changes.
+ sp<AMessage> dupNativeFormat = nativeFormat->dup();
+ ALOGV("configure with format: %s", dupNativeFormat->debugString(0).c_str());
sp<Surface> surface = NULL;
if (window != NULL) {
surface = (Surface*) window;
}
- status_t err = mData->mCodec->configure(nativeFormat, surface,
+ status_t err = mData->mCodec->configure(dupNativeFormat, surface,
crypto ? crypto->mCrypto : NULL, flags);
if (err != OK) {
ALOGE("configure: err(%d), failed with format: %s",
- err, nativeFormat->debugString(0).c_str());
+ err, dupNativeFormat->debugString(0).c_str());
}
return translate_error(err);
}
@@ -472,16 +470,20 @@
AMediaCodec *mData,
AMediaCodecOnAsyncNotifyCallback callback,
void *userdata) {
- if (mData->mAsyncNotify == NULL && userdata != NULL) {
- mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
- status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
- if (err != OK) {
- ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
- return translate_error(err);
- }
- }
Mutex::Autolock _l(mData->mAsyncCallbackLock);
+
+ if (mData->mAsyncNotify == NULL) {
+ mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+ }
+
+ // always call, codec may have been reset/re-configured since last call.
+ status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
+ if (err != OK) {
+ ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
+ return translate_error(err);
+ }
+
mData->mAsyncCallback = callback;
mData->mAsyncCallbackUserData = userdata;
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 69ab242..923453a 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -361,6 +361,7 @@
"mpegh-reference-channel-layout";
EXPORT const char* AMEDIAFORMAT_KEY_OPERATING_RATE = "operating-rate";
EXPORT const char* AMEDIAFORMAT_KEY_PCM_ENCODING = "pcm-encoding";
+EXPORT const char* AMEDIAFORMAT_KEY_PICTURE_TYPE = "picture-type";
EXPORT const char* AMEDIAFORMAT_KEY_PRIORITY = "priority";
EXPORT const char* AMEDIAFORMAT_KEY_PROFILE = "profile";
EXPORT const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN = "pcm-big-endian";
@@ -394,6 +395,9 @@
EXPORT const char* AMEDIAFORMAT_KEY_TRACK_ID = "track-id";
EXPORT const char* AMEDIAFORMAT_KEY_TRACK_INDEX = "track-index";
EXPORT const char* AMEDIAFORMAT_KEY_VALID_SAMPLES = "valid-samples";
+EXPORT const char* AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL =
+ "video-encoding-statistics-level";
+EXPORT const char* AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE = "video-qp-average";
EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MAX = "video-qp-b-max";
EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MIN = "video-qp-b-min";
EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MAX = "video-qp-i-max";
diff --git a/media/ndk/OWNERS b/media/ndk/OWNERS
index 9dc441e..83644f0 100644
--- a/media/ndk/OWNERS
+++ b/media/ndk/OWNERS
@@ -1,3 +1,4 @@
-marcone@google.com
+essick@google.com
+lajos@google.com
# For AImage/AImageReader
include platform/frameworks/av:/camera/OWNERS
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 2d2fcc0..2195657 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -311,6 +311,10 @@
extern const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK __INTRODUCED_IN(31);
extern const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_PICTURE_TYPE __INTRODUCED_IN(33);
+extern const char* AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL __INTRODUCED_IN(33);
+extern const char* AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE __INTRODUCED_IN(33);
+
extern const char* AMEDIAFORMAT_VIDEO_QP_B_MAX __INTRODUCED_IN(31);
extern const char* AMEDIAFORMAT_VIDEO_QP_B_MIN __INTRODUCED_IN(31);
extern const char* AMEDIAFORMAT_VIDEO_QP_I_MAX __INTRODUCED_IN(31);
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
index b17541d..75d73bf 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -30,7 +30,8 @@
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
- <activity android:name="com.android.media.samplevideoencoder.MainActivity">
+ <activity android:name="com.android.media.samplevideoencoder.MainActivity"
+ android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
@@ -42,4 +43,4 @@
android:targetPackage="com.android.media.samplevideoencoder"
android:label="SampleVideoEncoder Test"/>
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 2e06da5..4b44dcf 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,7 +69,6 @@
java_defaults {
name: "MediaBenchmark-defaults",
- sdk_version: "system_current",
min_sdk_version: "28",
- target_sdk_version: "29",
+ target_sdk_version: "30",
}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b2aee1a..b222d47 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -17,21 +17,21 @@
buildscript {
repositories {
google()
- jcenter()
+ mavenCentral()
}
dependencies {
- classpath 'com.android.tools.build:gradle:3.5.0'
+ classpath 'com.android.tools.build:gradle:4.2.1'
}
}
apply plugin: 'com.android.application'
android {
- compileSdkVersion 29
+ compileSdkVersion 30
defaultConfig {
applicationId "com.android.media.benchmark"
minSdkVersion 28
- targetSdkVersion 29
+ targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
@@ -57,20 +57,20 @@
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
- version "3.10.2"
+ version "3.18.1"
}
}
}
repositories {
google()
- jcenter()
+ mavenCentral()
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
- implementation 'androidx.appcompat:appcompat:1.1.0'
- testImplementation 'junit:junit:4.12'
- androidTestImplementation 'androidx.test:runner:1.2.0'
- androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+ implementation 'androidx.appcompat:appcompat:1.3.0'
+ testImplementation 'junit:junit:4.13.2'
+ androidTestImplementation 'androidx.test:runner:1.3.0'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.2'
}
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
index af92424..0192d68 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -9,7 +9,6 @@
cc_test_library {
name: "libmediabenchmark_jni",
- sdk_version: "current",
defaults: [
"libmediabenchmark_common-defaults",
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index 6b54c6a..718d217 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -55,7 +55,6 @@
cc_defaults {
name: "libmediabenchmark-defaults",
- sdk_version: "current",
stl: "c++_shared",
shared_libs: [
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.cpp b/media/tests/benchmark/src/native/extractor/Extractor.cpp
index f0bb3b9..3bdfbad 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.cpp
+++ b/media/tests/benchmark/src/native/extractor/Extractor.cpp
@@ -124,9 +124,7 @@
int64_t sTime = mStats->getCurTime();
if (mExtractor) {
- // TODO: (b/140128505) Multiple calls result in DoS.
- // Uncomment call to AMediaExtractor_delete() once this is resolved
- // AMediaExtractor_delete(mExtractor);
+ AMediaExtractor_delete(mExtractor);
mExtractor = nullptr;
}
int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index 0fbd20d..9a8caa3 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -33,7 +33,12 @@
srcs: ["ExtractorTest.cpp"],
- static_libs: ["libmediabenchmark_extractor"]
+ static_libs: ["libmediabenchmark_extractor"],
+
+ shared_libs: [
+ "libbase",
+ "libbinder_ndk",
+ ],
}
cc_test {
@@ -50,6 +55,11 @@
"libmediabenchmark_extractor",
"libmediabenchmark_decoder",
],
+
+ shared_libs: [
+ "libbase",
+ "libbinder_ndk",
+ ],
}
cc_test {
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 81ef02a..3666724 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -21,6 +21,8 @@
#include <iostream>
#include <limits>
+#include <android/binder_process.h>
+
#include "BenchmarkTestEnvironment.h"
#include "Decoder.h"
@@ -175,6 +177,7 @@
"c2.android.hevc.decoder", true)));
int main(int argc, char **argv) {
+ ABinderProcess_startThreadPool();
gEnv = new BenchmarkTestEnvironment();
::testing::AddGlobalTestEnvironment(gEnv);
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index d14d15b..27ee9ba 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -19,6 +19,8 @@
#include <gtest/gtest.h>
+#include <android/binder_process.h>
+
#include "BenchmarkTestEnvironment.h"
#include "Extractor.h"
@@ -73,6 +75,7 @@
0)));
int main(int argc, char **argv) {
+ ABinderProcess_startThreadPool();
gEnv = new BenchmarkTestEnvironment();
::testing::AddGlobalTestEnvironment(gEnv);
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 73c4e3b..88b822d 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -51,6 +51,7 @@
"libpermission",
"android.hardware.graphics.bufferqueue@1.0",
"android.hidl.token@1.0-utils",
+ "packagemanager_aidl-cpp",
],
export_static_lib_headers: [
"libbatterystats_aidl",
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 9c7b863..42f48a5 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -101,7 +101,11 @@
AttributionSourceState myAttributionSource;
myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
- myAttributionSource.token = sp<BBinder>::make();
+ if (callerAttributionSource.token != nullptr) {
+ myAttributionSource.token = callerAttributionSource.token;
+ } else {
+ myAttributionSource.token = sp<BBinder>::make();
+ }
myAttributionSource.next.push_back(nextAttributionSource);
return std::optional<AttributionSourceState>{myAttributionSource};
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index c1698dc..c4dc24f 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -18,6 +18,7 @@
"libutils",
"libbinder",
"framework-permission-aidl-cpp",
+ "packagemanager_aidl-cpp",
],
cflags: [
diff --git a/services/OWNERS b/services/OWNERS
index f0b5e2f..17e605d 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -1,9 +1,6 @@
-chz@google.com
elaurent@google.com
essick@google.com
etalvala@google.com
-gkasten@google.com
hunga@google.com
-marcone@google.com
nchalko@google.com
quxiangfang@google.com
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index b91f302..fecc183 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -81,6 +81,7 @@
"libmedia_helper",
"libshmemcompat",
"libvibrator",
+ "packagemanager_aidl-cpp",
],
static_libs: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3a3fb5e..0718643 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -998,8 +998,9 @@
}
}
}
-
- setAudioHwSyncForSession_l(thread, sessionId);
+ if ((output.flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == AUDIO_OUTPUT_FLAG_HW_AV_SYNC) {
+ setAudioHwSyncForSession_l(thread, sessionId);
+ }
}
if (lStatus != NO_ERROR) {
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index 034d161..17d4c37 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,4 @@
-gkasten@google.com
hunga@google.com
jmtrivi@google.com
mnaganov@google.com
+philburk@google.com
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 93118b8..45dd258 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -575,6 +575,12 @@
// create a special playback track to render to playback thread.
// this track is given the same buffer as the PatchRecord buffer
+
+ // Default behaviour is to start as soon as possible to have the lowest possible latency even if
+ // it might glitch.
+ // Disable this behavior for FM Tuner source if no fast capture/mixer available.
+ const bool isFmBridge = mAudioPatch.sources[0].ext.device.type == AUDIO_DEVICE_IN_FM_TUNER;
+ const size_t frameCountToBeReady = isFmBridge && !usePassthruPatchRecord ? frameCount / 4 : 1;
sp<PlaybackThread::PatchTrack> tempPatchTrack = new PlaybackThread::PatchTrack(
mPlayback.thread().get(),
streamType,
@@ -584,7 +590,9 @@
frameCount,
tempRecordTrack->buffer(),
tempRecordTrack->bufferSize(),
- outputFlags);
+ outputFlags,
+ {} /*timeout*/,
+ frameCountToBeReady);
status = mPlayback.checkTrack(tempPatchTrack.get());
if (status != NO_ERROR) {
return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 3cce998..aecd4d3 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -193,6 +193,12 @@
}
}
+ static bool checkServerLatencySupported(
+ audio_format_t format, audio_output_flags_t flags) {
+ return audio_is_linear_pcm(format)
+ && (flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == 0;
+ }
+
audio_output_flags_t getOutputFlags() const { return mFlags; }
float getSpeed() const { return mSpeed; }
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e0da037..09e4078 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -5879,6 +5879,20 @@
return trackCount;
}
+bool AudioFlinger::PlaybackThread::checkRunningTimestamp()
+{
+ uint64_t position = 0;
+ struct timespec unused;
+ const status_t ret = mOutput->getPresentationPosition(&position, &unused);
+ if (ret == NO_ERROR) {
+ if (position != mLastCheckedTimestampPosition) {
+ mLastCheckedTimestampPosition = position;
+ return true;
+ }
+ }
+ return false;
+}
+
// isTrackAllowed_l() must be called with ThreadBase::mLock held
bool AudioFlinger::MixerThread::isTrackAllowed_l(
audio_channel_mask_t channelMask, audio_format_t format,
@@ -6307,19 +6321,24 @@
// fill a buffer, then remove it from active list.
// Only consider last track started for mixer state control
if (--(track->mRetryCount) <= 0) {
- ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
- tracksToRemove->add(track);
- // indicate to client process that the track was disabled because of underrun;
- // it will then automatically call start() when data is available
- track->disable();
- // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
- // unlike mixerthread, HAL can be paused for direct output
- ALOGW("pause because of UNDERRUN, framesReady = %zu,"
- "minFrames = %u, mFormat = %#x",
- framesReady, minFrames, mFormat);
- if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
- doHwPause = true;
- mHwPaused = true;
+ const bool running = checkRunningTimestamp();
+ if (running) { // still running, give us more time.
+ track->mRetryCount = kMaxTrackRetriesOffload;
+ } else {
+ ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
+ tracksToRemove->add(track);
+ // indicate to client process that the track was disabled because of
+ // underrun; it will then automatically call start() when data is available
+ track->disable();
+ // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
+ // unlike mixerthread, HAL can be paused for direct output
+ ALOGW("pause because of UNDERRUN, framesReady = %zu,"
+ "minFrames = %u, mFormat = %#x",
+ framesReady, minFrames, mFormat);
+ if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
+ doHwPause = true;
+ mHwPaused = true;
+ }
}
} else if (last) {
mixerStatus = MIXER_TRACKS_ENABLED;
@@ -6530,6 +6549,7 @@
void AudioFlinger::DirectOutputThread::flushHw_l()
{
+ PlaybackThread::flushHw_l();
mOutput->flush();
mHwPaused = false;
mFlushPending = false;
@@ -6665,8 +6685,7 @@
AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
AudioStreamOut* output, audio_io_handle_t id, bool systemReady)
: DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady),
- mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true),
- mOffloadUnderrunPosition(~0LL)
+ mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true)
{
//FIXME: mStandby should be set to true by ThreadBase constructo
mStandby = true;
@@ -6883,19 +6902,7 @@
// No buffers for this track. Give it a few chances to
// fill a buffer, then remove it from active list.
if (--(track->mRetryCount) <= 0) {
- bool running = false;
- uint64_t position = 0;
- struct timespec unused;
- // The running check restarts the retry counter at least once.
- status_t ret = mOutput->stream->getPresentationPosition(&position, &unused);
- if (ret == NO_ERROR && position != mOffloadUnderrunPosition) {
- running = true;
- mOffloadUnderrunPosition = position;
- }
- if (ret == NO_ERROR) {
- ALOGVV("underrun counter, running(%d): %lld vs %lld", running,
- (long long)position, (long long)mOffloadUnderrunPosition);
- }
+ const bool running = checkRunningTimestamp();
if (running) { // still running, give us more time.
track->mRetryCount = kMaxTrackRetriesOffload;
} else {
@@ -6966,7 +6973,6 @@
mPausedBytesRemaining = 0;
// reset bytes written count to reflect that DSP buffers are empty after flush.
mBytesWritten = 0;
- mOffloadUnderrunPosition = ~0LL;
if (mUseAsyncWrite) {
// discard any pending drain or write ack by incrementing sequence
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 8561de3..04ad20e 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1375,6 +1375,14 @@
struct audio_patch mDownStreamPatch;
std::atomic_bool mCheckOutputStageEffects{};
+
+ // A differential check on the timestamps to see if there is a change in the
+ // timestamp frame position between the last call to checkRunningTimestamp.
+ uint64_t mLastCheckedTimestampPosition = ~0LL;
+
+ bool checkRunningTimestamp();
+
+ virtual void flushHw_l() { mLastCheckedTimestampPosition = ~0LL; }
};
class MixerThread : public PlaybackThread {
@@ -1492,7 +1500,7 @@
virtual bool checkForNewParameter_l(const String8& keyValuePair,
status_t& status);
- virtual void flushHw_l();
+ void flushHw_l() override;
void setMasterBalance(float balance) override;
@@ -1557,7 +1565,7 @@
OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
audio_io_handle_t id, bool systemReady);
virtual ~OffloadThread() {};
- virtual void flushHw_l();
+ void flushHw_l() override;
protected:
// threadLoop snippets
@@ -1574,10 +1582,6 @@
size_t mPausedWriteLength; // length in bytes of write interrupted by pause
size_t mPausedBytesRemaining; // bytes still waiting in mixbuffer after resume
bool mKeepWakeLock; // keep wake lock while waiting for write callback
- uint64_t mOffloadUnderrunPosition; // Current frame position for offloaded playback
- // used and valid only during underrun. ~0 if
- // no underrun has occurred during playback and
- // is not reset on standby.
};
class AsyncCallbackThread : public Thread {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index e0c5fa5..233865f 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -713,8 +713,7 @@
thread->mFastTrackAvailMask &= ~(1 << i);
}
- mServerLatencySupported = thread->type() == ThreadBase::MIXER
- || thread->type() == ThreadBase::DUPLICATING;
+ mServerLatencySupported = checkServerLatencySupported(format, flags);
#ifdef TEE_SINK
mTee.setId(std::string("_") + std::to_string(mThreadIoHandle)
+ "_" + std::to_string(mId) + "_T");
@@ -1405,6 +1404,60 @@
.content_type = mAttr.content_type,
.gain = mFinalVolume,
};
+
+ // When attributes are undefined, derive default values from stream type.
+ // See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
+ if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
+ switch (mStreamType) {
+ case AUDIO_STREAM_VOICE_CALL:
+ metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ break;
+ case AUDIO_STREAM_SYSTEM:
+ metadata.base.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ break;
+ case AUDIO_STREAM_RING:
+ metadata.base.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ break;
+ case AUDIO_STREAM_MUSIC:
+ metadata.base.usage = AUDIO_USAGE_MEDIA;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+ break;
+ case AUDIO_STREAM_ALARM:
+ metadata.base.usage = AUDIO_USAGE_ALARM;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ break;
+ case AUDIO_STREAM_NOTIFICATION:
+ metadata.base.usage = AUDIO_USAGE_NOTIFICATION;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ break;
+ case AUDIO_STREAM_DTMF:
+ metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ break;
+ case AUDIO_STREAM_ACCESSIBILITY:
+ metadata.base.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ break;
+ case AUDIO_STREAM_ASSISTANT:
+ metadata.base.usage = AUDIO_USAGE_ASSISTANT;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ break;
+ case AUDIO_STREAM_REROUTING:
+ metadata.base.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
+ // unknown content type
+ break;
+ case AUDIO_STREAM_CALL_ASSISTANT:
+ metadata.base.usage = AUDIO_USAGE_CALL_ASSISTANT;
+ metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ break;
+ default:
+ break;
+ }
+ }
+
metadata.channel_mask = mChannelMask,
strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
*backInserter++ = metadata;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 4078278..09523d0 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -285,8 +285,8 @@
virtual bool isHapticPlaybackSupported() = 0;
- virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<audio_format_t> *formats) = 0;
+ virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+ audio_devices_t device, std::vector<audio_format_t> *formats) = 0;
virtual void setAppState(audio_port_handle_t portId, app_state_t state) = 0;
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 736f8b2..f0636a0 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -127,6 +127,7 @@
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
case AUDIO_DEVICE_OUT_USB_HEADSET:
case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ case AUDIO_DEVICE_OUT_BLE_BROADCAST:
return DEVICE_CATEGORY_HEADSET;
case AUDIO_DEVICE_OUT_HEARING_AID:
return DEVICE_CATEGORY_HEARING_AID;
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 1722032..c9c8ede 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -58,15 +58,6 @@
mDeclaredAddress(DeviceDescriptorBase::address())
{
mCurrentEncodedFormat = AUDIO_FORMAT_DEFAULT;
- /* If framework runs against a pre 5.0 Audio HAL, encoded formats are absent from the config.
- * FIXME: APM should know the version of the HAL and don't add the formats for V5.0.
- * For now, the workaround to remove AC3 and IEC61937 support on HDMI is to declare
- * something like 'encodedFormats="AUDIO_FORMAT_PCM_16_BIT"' on the HDMI devicePort.
- */
- if (mDeviceTypeAddr.mType == AUDIO_DEVICE_OUT_HDMI && mEncodedFormats.empty()) {
- mEncodedFormats.push_back(AUDIO_FORMAT_AC3);
- mEncodedFormats.push_back(AUDIO_FORMAT_IEC61937);
- }
}
void DeviceDescriptor::attach(const sp<HwModule>& module)
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index 8c61b90..5986069 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -30,9 +30,9 @@
// --- PolicyAudioPort class implementation
void PolicyAudioPort::attach(const sp<HwModule>& module)
{
+ mModule = module;
ALOGV("%s: attaching module %s to port %s",
__FUNCTION__, getModuleName(), asAudioPort()->getName().c_str());
- mModule = module;
}
void PolicyAudioPort::detach()
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 84ed656..a631963 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -861,10 +861,10 @@
ALOGE("%s: No version found in root node %s", __func__, rootName);
return BAD_VALUE;
}
- if (version == "7.0") {
+ if (version == "7.0" || version == "7.1") {
mChannelMasksSeparator = mSamplingRatesSeparator = mFlagsSeparator = " ";
} else if (version != "1.0") {
- ALOGE("%s: Version does not match; expected \"1.0\" or \"7.0\" got \"%s\"",
+ ALOGE("%s: Version does not match; expected \"1.0\", \"7.0\", or \"7.1\" got \"%s\"",
__func__, version.c_str());
return BAD_VALUE;
}
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 98415b7..22ff954 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -22,6 +22,17 @@
samplingRates="8000,16000,24000,32000,44100,48000"
channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
+ <mixPort name="le audio input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,16000,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+ samplingRates="8000,16000,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+ samplingRates="8000,16000,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
</mixPorts>
<devicePorts>
<!-- A2DP Audio Ports -->
@@ -49,6 +60,7 @@
-->
<devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
<devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+ <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
</devicePorts>
<routes>
<route type="mix" sink="BT A2DP Out"
@@ -61,6 +73,8 @@
sources="hearing aid output"/>
<route type="mix" sink="BLE Headset Out"
sources="le audio output"/>
+ <route type="mix" sink="le audio input"
+ sources="BLE Headset In"/>
<route type="mix" sink="BLE Speaker Out"
sources="le audio output"/>
</routes>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index fbe7571..aad00d6 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -22,6 +22,17 @@
samplingRates="8000 16000 24000 32000 44100 48000"
channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
+ <mixPort name="le audio input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+ samplingRates="8000 16000 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+ samplingRates="8000 16000 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
</mixPorts>
<devicePorts>
<!-- A2DP Audio Ports -->
@@ -45,6 +56,7 @@
<!-- BLE Audio Ports -->
<devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
<devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+ <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
</devicePorts>
<routes>
<route type="mix" sink="BT A2DP Out"
@@ -57,6 +69,8 @@
sources="hearing aid output"/>
<route type="mix" sink="BLE Headset Out"
sources="le audio output"/>
+ <route type="mix" sink="le audio input"
+ sources="BLE Headset In"/>
<route type="mix" sink="BLE Speaker Out"
sources="le audio output"/>
</routes>
diff --git a/services/audiopolicy/config/le_audio_policy_configuration.xml b/services/audiopolicy/config/le_audio_policy_configuration.xml
index a3dc72b..dcdd805 100644
--- a/services/audiopolicy/config/le_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/le_audio_policy_configuration.xml
@@ -7,13 +7,20 @@
samplingRates="8000,16000,24000,32000,44100,48000"
channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
+ <mixPort name="le audio input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT,AUDIO_FORMAT_PCM_24_BIT,AUDIO_FORMAT_PCM_32_BIT"
+ samplingRates="8000,16000,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
</mixPorts>
<devicePorts>
<devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
<devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+ <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
</devicePorts>
<routes>
<route type="mix" sink="BLE Headset Out" sources="le audio output"/>
<route type="mix" sink="BLE Speaker Out" sources="le audio output"/>
+ <route type="mix" sink="le audio input" sources="BLE Headset In"/>
</routes>
</module>
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index d39eff6..665c2dd 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -69,12 +69,6 @@
{
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_NONE, ""},
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_EVENT,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
}
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
index b3f8947..06cc799 100644
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -85,6 +85,7 @@
case AUDIO_DEVICE_OUT_HEARING_AID:
case AUDIO_DEVICE_OUT_BLE_HEADSET:
case AUDIO_DEVICE_OUT_BLE_SPEAKER:
+ case AUDIO_DEVICE_OUT_BLE_BROADCAST:
return GROUP_BT_A2DP;
default:
return GROUP_NONE;
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index bc32416..0ddf66d 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -57,9 +57,6 @@
<ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
<AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
<Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
- <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/> </Attributes>
- <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/> </Attributes>
- <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/> </Attributes>
<Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
</AttributesGroup>
</ProductStrategy>
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index b0c376a..9a61a05 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -299,8 +299,13 @@
if (device != nullptr) {
return DeviceVector(device);
}
+ return fromCache? getCachedDevices(strategy) : getDevicesForProductStrategy(strategy);
+}
- return fromCache? mDevicesForStrategies.at(strategy) : getDevicesForProductStrategy(strategy);
+DeviceVector Engine::getCachedDevices(product_strategy_t ps) const
+{
+ return mDevicesForStrategies.find(ps) != mDevicesForStrategies.end() ?
+ mDevicesForStrategies.at(ps) : DeviceVector{};
}
DeviceVector Engine::getOutputDevicesForStream(audio_stream_type_t stream, bool fromCache) const
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index d8e2742..f665da5 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -126,6 +126,7 @@
status_t loadAudioPolicyEngineConfig();
DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
+ DeviceVector getCachedDevices(product_strategy_t ps) const;
/**
* Policy Parameter Manager hidden through a wrapper.
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index bc32416..0ddf66d 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -57,9 +57,6 @@
<ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
<AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
<Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
- <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/> </Attributes>
- <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/> </Attributes>
- <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/> </Attributes>
<Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
</AttributesGroup>
</ProductStrategy>
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e334532..d0f605c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -525,10 +525,10 @@
return NO_ERROR;
}
-status_t AudioPolicyManager::getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<audio_format_t> *formats)
+status_t AudioPolicyManager::getHwOffloadFormatsSupportedForBluetoothMedia(
+ audio_devices_t device, std::vector<audio_format_t> *formats)
{
- ALOGV("getHwOffloadEncodingFormatsSupportedForA2DP()");
+ ALOGV("getHwOffloadFormatsSupportedForBluetoothMedia()");
status_t status = NO_ERROR;
std::unordered_set<audio_format_t> formatSet;
sp<HwModule> primaryModule =
@@ -537,8 +537,23 @@
ALOGE("%s() unable to get primary module", __func__);
return NO_INIT;
}
+
+ DeviceTypeSet audioDeviceSet;
+
+ switch(device) {
+ case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+ audioDeviceSet = getAudioDeviceOutAllA2dpSet();
+ break;
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ audioDeviceSet = getAudioDeviceOutAllBleSet();
+ break;
+ default:
+ ALOGE("%s() device type 0x%08x not supported", __func__, device);
+ return BAD_VALUE;
+ }
+
DeviceVector declaredDevices = primaryModule->getDeclaredDevices().getDevicesFromTypes(
- getAudioDeviceOutAllA2dpSet());
+ audioDeviceSet);
for (const auto& device : declaredDevices) {
formatSet.insert(device->encodedFormats().begin(), device->encodedFormats().end());
}
@@ -2182,7 +2197,7 @@
}
// Explicit routing?
- sp<DeviceDescriptor> explicitRoutingDevice =
+ sp<DeviceDescriptor> explicitRoutingDevice =
mAvailableInputDevices.getDeviceFromId(*selectedDeviceId);
// special case for mmap capture: if an input IO handle is specified, we reuse this input if
@@ -2368,7 +2383,7 @@
profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
} else { // fail
ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
- "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
+ "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
config->sample_rate, config->format, config->channel_mask, flags);
return input;
}
@@ -5189,9 +5204,8 @@
continue;
}
mHwModules.push_back(hwModule);
- // open all output streams needed to access attached devices
- // except for direct output streams that are only opened when they are actually
- // required by an app.
+ // open all output streams needed to access attached devices.
+ // direct outputs are closed immediately after checking the availability of attached devices
// This also validates mAvailableOutputDevices list
for (const auto& outProfile : hwModule->getOutputProfiles()) {
if (!outProfile->canOpenNewIo()) {
@@ -5588,7 +5602,7 @@
} // endif input != 0
if (input == AUDIO_IO_HANDLE_NONE) {
- ALOGW("%s could not open input for device %s", __func__,
+ ALOGW("%s could not open input for device %s", __func__,
device->toString().c_str());
profiles.removeAt(profile_index);
profile_index--;
@@ -6080,7 +6094,8 @@
return hasVoiceStream(streams) && (outputDesc == mPrimaryOutput ||
outputDesc->isActive(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) &&
(isInCall() ||
- mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc));
+ mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) &&
+ !isStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE, 0);
};
// With low-latency playing on speaker, music on WFD, when the first low-latency
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 967aa10..3758429 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -320,8 +320,8 @@
audio_format_t *surroundFormats);
virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
- virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<audio_format_t> *formats);
+ virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+ audio_devices_t device, std::vector<audio_format_t> *formats);
virtual void setAppState(audio_port_handle_t portId, app_state_t state);
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 197f183..f3d4f2f 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -55,6 +55,7 @@
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
"framework-permission-aidl-cpp",
+ "packagemanager_aidl-cpp",
"spatializer-aidl-cpp",
],
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 4bd1260..ff1e674 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1885,8 +1885,8 @@
return Status::ok();
}
-Status AudioPolicyService::getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<media::audio::common::AudioFormat>* _aidl_return) {
+Status AudioPolicyService::getHwOffloadFormatsSupportedForBluetoothMedia(
+ int32_t deviceAidl, std::vector<media::audio::common::AudioFormat>* _aidl_return) {
std::vector<audio_format_t> formats;
if (mAudioPolicyManager == NULL) {
@@ -1894,8 +1894,10 @@
}
Mutex::Autolock _l(mLock);
AutoCallerClear acc;
+ audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_audio_devices_t(deviceAidl));
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
- mAudioPolicyManager->getHwOffloadEncodingFormatsSupportedForA2DP(&formats)));
+ mAudioPolicyManager->getHwOffloadFormatsSupportedForBluetoothMedia(device, &formats)));
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
convertContainer<std::vector<media::audio::common::AudioFormat>>(
formats,
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 102b376..cd83900 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -476,7 +476,7 @@
}
}
-void AudioPolicyService::NotificationClient::onAudioVolumeGroupChanged(volume_group_t group,
+void AudioPolicyService::NotificationClient::onAudioVolumeGroupChanged(volume_group_t group,
int flags)
{
if (mAudioPolicyServiceClient != 0 && mAudioVolumeGroupCallbacksEnabled) {
@@ -1037,7 +1037,7 @@
case TRANSACTION_removeUidDeviceAffinities:
case TRANSACTION_setUserIdDeviceAffinities:
case TRANSACTION_removeUserIdDeviceAffinities:
- case TRANSACTION_getHwOffloadEncodingFormatsSupportedForA2DP:
+ case TRANSACTION_getHwOffloadFormatsSupportedForBluetoothMedia:
case TRANSACTION_listAudioVolumeGroups:
case TRANSACTION_getVolumeGroupFromAudioAttributes:
case TRANSACTION_acquireSoundTriggerSession:
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 27c4e1c..d189224 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -197,8 +197,8 @@
std::vector<bool>* formatsEnabled) override;
binder::Status getReportedSurroundFormats(
media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) override;
- binder::Status getHwOffloadEncodingFormatsSupportedForA2DP(
- std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
+ binder::Status getHwOffloadFormatsSupportedForBluetoothMedia(
+ int32_t device, std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
binder::Status setSurroundFormatEnabled(media::audio::common::AudioFormat audioFormat,
bool enabled) override;
binder::Status setAssistantUid(int32_t uid) override;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a16ab7d..9d2d2b3 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1375,7 +1375,8 @@
if (type == AUDIO_DEVICE_OUT_HDMI) {
// Set device connection state failed due to no device descriptor found
// For HDMI case, it is easier to simulate device descriptor not found error
- // by using a undeclared encoded format.
+ // by using an encoded format which isn't listed in the 'encodedFormats'
+ // attribute for this devicePort.
ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
address.c_str(), name.c_str(), AUDIO_FORMAT_MAT_2_1));
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 87f0ab9..41ed70c 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -50,7 +50,8 @@
</devicePort>
<devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
</devicePort>
- <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+ <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink"
+ encodedFormats="AUDIO_FORMAT_AC3">
</devicePort>
<devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
</devicePort>
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 4dfbb6f..229964c 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -363,16 +363,18 @@
std::string cameraId(id.c_str());
hardware::camera::common::V1_0::CameraResourceCost cost;
status_t res = mCameraProviderManager->getResourceCost(cameraId, &cost);
- SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
if (res != OK) {
ALOGE("Failed to query device resource cost: %s (%d)", strerror(-res), res);
return;
}
+ SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
res = mCameraProviderManager->getSystemCameraKind(cameraId, &deviceKind);
if (res != OK) {
ALOGE("Failed to query device kind: %s (%d)", strerror(-res), res);
return;
}
+ std::vector<std::string> physicalCameraIds;
+ mCameraProviderManager->isLogicalCamera(cameraId, &physicalCameraIds);
std::set<String8> conflicting;
for (size_t i = 0; i < cost.conflictingDevices.size(); i++) {
conflicting.emplace(String8(cost.conflictingDevices[i].c_str()));
@@ -381,7 +383,7 @@
{
Mutex::Autolock lock(mCameraStatesLock);
mCameraStates.emplace(id, std::make_shared<CameraState>(id, cost.resourceCost,
- conflicting, deviceKind));
+ conflicting, deviceKind, physicalCameraIds));
}
if (mFlashlight->hasFlashUnit(id)) {
@@ -560,6 +562,13 @@
onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
}
+
+void CameraService::onTorchStatusChanged(const String8& cameraId,
+ TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
+ Mutex::Autolock al(mTorchStatusMutex);
+ onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
+}
+
void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
@@ -3682,9 +3691,10 @@
// ----------------------------------------------------------------------------
CameraService::CameraState::CameraState(const String8& id, int cost,
- const std::set<String8>& conflicting, SystemCameraKind systemCameraKind) : mId(id),
+ const std::set<String8>& conflicting, SystemCameraKind systemCameraKind,
+ const std::vector<std::string>& physicalCameras) : mId(id),
mStatus(StatusInternal::NOT_PRESENT), mCost(cost), mConflicting(conflicting),
- mSystemCameraKind(systemCameraKind) {}
+ mSystemCameraKind(systemCameraKind), mPhysicalCameras(physicalCameras) {}
CameraService::CameraState::~CameraState() {}
@@ -3723,6 +3733,11 @@
return mSystemCameraKind;
}
+bool CameraService::CameraState::containsPhysicalCamera(const std::string& physicalCameraId) const {
+ return std::find(mPhysicalCameras.begin(), mPhysicalCameras.end(), physicalCameraId)
+ != mPhysicalCameras.end();
+}
+
bool CameraService::CameraState::addUnavailablePhysicalId(const String8& physicalId) {
Mutex::Autolock lock(mStatusLock);
auto result = mUnavailablePhysicalIds.insert(physicalId);
@@ -4341,18 +4356,9 @@
std::list<String16> retList;
Mutex::Autolock lock(mCameraStatesLock);
for (const auto& state : mCameraStates) {
- std::vector<std::string> physicalCameraIds;
- if (!mCameraProviderManager->isLogicalCamera(state.first.c_str(), &physicalCameraIds)) {
- // This is not a logical multi-camera.
- continue;
+ if (state.second->containsPhysicalCamera(physicalCameraId.c_str())) {
+ retList.emplace_back(String16(state.first));
}
- if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalCameraId.c_str())
- == physicalCameraIds.end()) {
- // cameraId is not a physical camera of this logical multi-camera.
- continue;
- }
-
- retList.emplace_back(String16(state.first));
}
return retList;
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index bc2e347..d5feeeb 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -110,8 +110,16 @@
virtual void onDeviceStatusChanged(const String8 &cameraId,
const String8 &physicalCameraId,
hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
+ // This method may hold CameraProviderManager::mInterfaceMutex as a part
+ // of calling getSystemCameraKind() internally. Care should be taken not to
+ // directly / indirectly call this from callers who also hold
+ // mInterfaceMutex.
virtual void onTorchStatusChanged(const String8& cameraId,
hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
+ // Does not hold CameraProviderManager::mInterfaceMutex.
+ virtual void onTorchStatusChanged(const String8& cameraId,
+ hardware::camera::common::V1_0::TorchModeStatus newStatus,
+ SystemCameraKind kind) override;
virtual void onNewProviderRegistered() override;
/////////////////////////////////////////////////////////////////////
@@ -563,7 +571,7 @@
* returned in the HAL's camera_info struct for each device.
*/
CameraState(const String8& id, int cost, const std::set<String8>& conflicting,
- SystemCameraKind deviceKind);
+ SystemCameraKind deviceKind, const std::vector<std::string>& physicalCameras);
virtual ~CameraState();
/**
@@ -621,6 +629,12 @@
SystemCameraKind getSystemCameraKind() const;
/**
+ * Return whether this camera is a logical multi-camera and has a
+ * particular physical sub-camera.
+ */
+ bool containsPhysicalCamera(const std::string& physicalCameraId) const;
+
+ /**
* Add/Remove the unavailable physical camera ID.
*/
bool addUnavailablePhysicalId(const String8& physicalId);
@@ -641,6 +655,7 @@
mutable Mutex mStatusLock;
CameraParameters mShimParams;
const SystemCameraKind mSystemCameraKind;
+ const std::vector<std::string> mPhysicalCameras; // Empty if not a logical multi-camera
}; // class CameraState
// Observer for UID lifecycle enforcing that UIDs in idle
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 83b8e95..971628a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1696,7 +1696,7 @@
bool isCompositeStream = false;
for (const auto& gbp : mConfiguredOutputs[streamId].getGraphicBufferProducers()) {
sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
- isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+ isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
camera3::HeicCompositeStream::isHeicCompositeStream(s);
if (isCompositeStream) {
auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 2f74df9..0cce2ca 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1960,16 +1960,19 @@
const hardware::hidl_string& cameraDeviceName,
TorchModeStatus newStatus) {
sp<StatusListener> listener;
+ SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
std::string id;
+ bool known = false;
{
- std::lock_guard<std::mutex> lock(mManager->mStatusListenerMutex);
- bool known = false;
+ // Hold mLock for accessing mDevices
+ std::lock_guard<std::mutex> lock(mLock);
for (auto& deviceInfo : mDevices) {
if (deviceInfo->mName == cameraDeviceName) {
ALOGI("Camera device %s torch status is now %s", cameraDeviceName.c_str(),
torchStatusToString(newStatus));
id = deviceInfo->mId;
known = true;
+ systemCameraKind = deviceInfo->mSystemCameraKind;
if (TorchModeStatus::AVAILABLE_ON != newStatus) {
mManager->removeRef(DeviceMode::TORCH, id);
}
@@ -1981,11 +1984,19 @@
mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
return hardware::Void();
}
+ // no lock needed since listener is set up only once during
+ // CameraProviderManager initialization and then never changed till it is
+ // destructed.
listener = mManager->getStatusListener();
- }
+ }
// Call without lock held to allow reentrancy into provider manager
+ // The problem with holding mLock here is that we
+ // might be limiting re-entrancy : CameraService::onTorchStatusChanged calls
+ // back into CameraProviderManager which might try to hold mLock again (eg:
+ // findDeviceInfo, which should be holding mLock while iterating through
+ // each provider's devices).
if (listener != nullptr) {
- listener->onTorchStatusChanged(String8(id.c_str()), newStatus);
+ listener->onTorchStatusChanged(String8(id.c_str()), newStatus, systemCameraKind);
}
return hardware::Void();
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e3763a1..fdb2673 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -155,6 +155,9 @@
const String8 &physicalCameraId,
hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
virtual void onTorchStatusChanged(const String8 &cameraId,
+ hardware::camera::common::V1_0::TorchModeStatus newStatus,
+ SystemCameraKind kind) = 0;
+ virtual void onTorchStatusChanged(const String8 &cameraId,
hardware::camera::common::V1_0::TorchModeStatus newStatus) = 0;
virtual void onNewProviderRegistered() = 0;
};
@@ -329,8 +332,6 @@
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;
- // the status listener update callbacks will lock mStatusMutex
- mutable std::mutex mStatusListenerMutex;
wp<StatusListener> mListener;
ServiceInteractionProxy* mServiceProxy;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3738d01..e60fdb3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -550,7 +550,7 @@
mHandoutTotalBufferCount = 0;
mFrameCount = 0;
mLastTimestamp = 0;
- mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
+ mUseMonoTimestamp = (isConsumedByHWComposer() || isVideoStream());
res = native_window_set_buffer_count(mConsumer.get(),
mTotalBufferCount);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index d765b02..5c54dc7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -472,7 +472,7 @@
// Note down the just completed frame number
if (request.hasInputBuffer) {
states.lastCompletedReprocessFrameNumber = frameNumber;
- } else if (request.zslCapture) {
+ } else if (request.zslCapture && request.stillCapture) {
states.lastCompletedZslFrameNumber = frameNumber;
} else {
states.lastCompletedRegularFrameNumber = frameNumber;
@@ -969,7 +969,8 @@
void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
sp<NotificationListener> listener, InFlightRequest& request,
SessionStatsBuilder& sessionStatsBuilder) {
- bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+ bool timestampIncreasing =
+ !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
returnOutputBuffers(useHalBufManager, listener,
request.pendingOutputBuffers.array(),
request.pendingOutputBuffers.size(),
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a74fd9d..c8a6b32 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -274,6 +274,8 @@
hardware::camera::common::V1_0::CameraDeviceStatus) override {}
void onTorchStatusChanged(const String8 &,
hardware::camera::common::V1_0::TorchModeStatus) override {}
+ void onTorchStatusChanged(const String8 &,
+ hardware::camera::common::V1_0::TorchModeStatus, SystemCameraKind) override {}
void onNewProviderRegistered() override {}
};
diff --git a/services/mediacodec/OWNERS b/services/mediacodec/OWNERS
index c716cce..3453a76 100644
--- a/services/mediacodec/OWNERS
+++ b/services/mediacodec/OWNERS
@@ -1,2 +1,3 @@
jeffv@google.com
-marcone@google.com
+essick@google.com
+wonsik@google.com
diff --git a/services/mediacodec/android.hardware.media.omx@1.0-service.rc b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
index 3ef9a85..845e5cc 100644
--- a/services/mediacodec/android.hardware.media.omx@1.0-service.rc
+++ b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
@@ -3,4 +3,4 @@
user mediacodec
group camera drmrpc mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 696b967..d10e339 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -7,7 +7,7 @@
default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
}
-cc_library_shared {
+cc_library {
name: "libmedia_codecserviceregistrant",
vendor_available: true,
srcs: [
diff --git a/services/mediacodec/registrant/fuzzer/Android.bp b/services/mediacodec/registrant/fuzzer/Android.bp
new file mode 100644
index 0000000..43afbf1
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
+}
+
+cc_fuzz {
+ name: "codecServiceRegistrant_fuzzer",
+ srcs: [
+ "codecServiceRegistrant_fuzzer.cpp",
+ ],
+ static_libs: [
+ "libmedia_codecserviceregistrant",
+ ],
+ header_libs: [
+ "libmedia_headers",
+ ],
+ defaults: [
+ "libcodec2-hidl-defaults",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/services/mediacodec/registrant/fuzzer/README.md b/services/mediacodec/registrant/fuzzer/README.md
new file mode 100644
index 0000000..0ffa063
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libmedia_codecserviceregistrant
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedia_codecserviceregistrant is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libmedia_codecserviceregistrant supports the following parameters:
+1. C2String (parameter name: `c2String`)
+2. Width (parameter name: `width`)
+3. Height (parameter name: `height`)
+4. SamplingRate (parameter name: `samplingRate`)
+5. Channels (parameter name: `channels`)
+6. Stream (parameter name: `stream`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `c2String` |`String` | Value obtained from FuzzedDataProvider|
+| `width` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `height` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `samplingRate` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `channels` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `stream` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the libmedia_codecserviceregistrant module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build codecServiceRegistrant_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) codecServiceRegistrant_fuzzer
+```
+#### Steps to run
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/${TARGET_ARCH}/codecServiceRegistrant_fuzzer/codecServiceRegistrant_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
new file mode 100644
index 0000000..e5983e4
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "../CodecServiceRegistrant.cpp"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <C2Config.h>
+#include <C2Param.h>
+
+using namespace std;
+
+constexpr char kServiceName[] = "software";
+
+class CodecServiceRegistrantFuzzer {
+public:
+ void process(const uint8_t *data, size_t size);
+ ~CodecServiceRegistrantFuzzer() {
+ delete mH2C2;
+ if (mInputSize) {
+ delete mInputSize;
+ }
+ if (mSampleRateInfo) {
+ delete mSampleRateInfo;
+ }
+ if (mChannelCountInfo) {
+ delete mChannelCountInfo;
+ }
+ }
+
+private:
+ void initH2C2ComponentStore();
+ void invokeH2C2ComponentStore();
+ void invokeConfigSM();
+ void invokeQuerySM();
+ H2C2ComponentStore *mH2C2 = nullptr;
+ C2StreamPictureSizeInfo::input *mInputSize = nullptr;
+ C2StreamSampleRateInfo::output *mSampleRateInfo = nullptr;
+ C2StreamChannelCountInfo::output *mChannelCountInfo = nullptr;
+ C2Param::Index mIndex = C2StreamProfileLevelInfo::output::PARAM_TYPE;
+ C2StreamFrameRateInfo::output mFrameRate;
+ FuzzedDataProvider *mFDP = nullptr;
+};
+
+void CodecServiceRegistrantFuzzer::initH2C2ComponentStore() {
+ using namespace ::android::hardware::media::c2;
+ shared_ptr<C2ComponentStore> store =
+ android::GetCodec2PlatformComponentStore();
+ if (!store) {
+ return;
+ }
+ android::sp<V1_1::IComponentStore> storeV1_1 =
+ new V1_1::utils::ComponentStore(store);
+ if (storeV1_1->registerAsService(string(kServiceName)) != android::OK) {
+ return;
+ }
+ string const preferredStoreName = string(kServiceName);
+ sp<IComponentStore> preferredStore =
+ IComponentStore::getService(preferredStoreName.c_str());
+ mH2C2 = new H2C2ComponentStore(preferredStore);
+}
+
+void CodecServiceRegistrantFuzzer::invokeConfigSM() {
+ vector<C2Param *> configParams;
+ uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t samplingRate = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t channels = mFDP->ConsumeIntegral<uint32_t>();
+ if (mFDP->ConsumeBool()) {
+ mInputSize = new C2StreamPictureSizeInfo::input(0u, width, height);
+ configParams.push_back(mInputSize);
+ } else {
+ if (mFDP->ConsumeBool()) {
+ mSampleRateInfo = new C2StreamSampleRateInfo::output(0u, samplingRate);
+ configParams.push_back(mSampleRateInfo);
+ }
+ if (mFDP->ConsumeBool()) {
+ mChannelCountInfo = new C2StreamChannelCountInfo::output(0u, channels);
+ configParams.push_back(mChannelCountInfo);
+ }
+ }
+ vector<unique_ptr<C2SettingResult>> failures;
+ mH2C2->config_sm(configParams, &failures);
+}
+
+void CodecServiceRegistrantFuzzer::invokeQuerySM() {
+ vector<C2Param *> stackParams;
+ vector<C2Param::Index> heapParamIndices;
+ if (mFDP->ConsumeBool()) {
+ stackParams = {};
+ heapParamIndices = {};
+ } else {
+ uint32_t stream = mFDP->ConsumeIntegral<uint32_t>();
+ mFrameRate.setStream(stream);
+ stackParams.push_back(&mFrameRate);
+ heapParamIndices.push_back(mIndex);
+ }
+ vector<unique_ptr<C2Param>> heapParams;
+ mH2C2->query_sm(stackParams, heapParamIndices, &heapParams);
+}
+
+void CodecServiceRegistrantFuzzer::invokeH2C2ComponentStore() {
+ initH2C2ComponentStore();
+ shared_ptr<C2Component> component;
+ shared_ptr<C2ComponentInterface> interface;
+ string c2String = mFDP->ConsumeRandomLengthString();
+ mH2C2->createComponent(c2String, &component);
+ mH2C2->createInterface(c2String, &interface);
+ invokeConfigSM();
+ invokeQuerySM();
+
+ vector<shared_ptr<C2ParamDescriptor>> params;
+ mH2C2->querySupportedParams_nb(¶ms);
+
+ C2StoreIonUsageInfo usageInfo;
+ std::vector<C2FieldSupportedValuesQuery> query = {
+ C2FieldSupportedValuesQuery::Possible(
+ C2ParamField::Make(usageInfo, usageInfo.usage)),
+ C2FieldSupportedValuesQuery::Possible(
+ C2ParamField::Make(usageInfo, usageInfo.capacity)),
+ };
+ mH2C2->querySupportedValues_sm(query);
+
+ mH2C2->getName();
+ shared_ptr<C2ParamReflector> paramReflector = mH2C2->getParamReflector();
+ if (paramReflector) {
+ paramReflector->describe(C2ComponentDomainSetting::CORE_INDEX);
+ }
+ mH2C2->listComponents();
+ shared_ptr<C2GraphicBuffer> src;
+ shared_ptr<C2GraphicBuffer> dst;
+ mH2C2->copyBuffer(src, dst);
+}
+
+void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ invokeH2C2ComponentStore();
+ /** RegisterCodecServices is called here to improve code coverage */
+ /** as currently it is not called by codecServiceRegistrant */
+ RegisterCodecServices();
+ delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ CodecServiceRegistrantFuzzer codecServiceRegistrantFuzzer;
+ codecServiceRegistrantFuzzer.process(data, size);
+ return 0;
+}
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 9058f10..41efce0 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -84,5 +84,6 @@
getgid32: 1
getegid32: 1
getgroups32: 1
+sysinfo: 1
@include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
index 4c51a9c..e151a06 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
@@ -78,5 +78,6 @@
getgid: 1
getegid: 1
getgroups: 1
+sysinfo: 1
@include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm64.policy
diff --git a/services/mediaextractor/OWNERS b/services/mediaextractor/OWNERS
index c716cce..2a779c2 100644
--- a/services/mediaextractor/OWNERS
+++ b/services/mediaextractor/OWNERS
@@ -1,2 +1,3 @@
jeffv@google.com
-marcone@google.com
+essick@google.com
+aquilescanta@google.com
diff --git a/services/mediaextractor/mediaextractor.rc b/services/mediaextractor/mediaextractor.rc
index 5fc2941..4fb50d0 100644
--- a/services/mediaextractor/mediaextractor.rc
+++ b/services/mediaextractor/mediaextractor.rc
@@ -3,4 +3,4 @@
user mediaex
group drmrpc mediadrm
ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
+ task_profiles ProcessCapacityHigh
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index cfc4c40..8088ef0 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -26,6 +26,7 @@
"libmediautils",
"libnblog",
"libutils",
+ "packagemanager_aidl-cpp",
],
cflags: [
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index b2c9465..c98d5fc 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -114,6 +114,7 @@
"libmediautils",
"libutils",
"mediametricsservice-aidl-cpp",
+ "packagemanager_aidl-cpp",
],
header_libs: [
"libaudioutils_headers",
@@ -172,6 +173,7 @@
"libstatspull",
"libstatssocket",
"libutils",
+ "packagemanager_aidl-cpp",
],
export_shared_lib_headers: [
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/TransactionLog.h
index 0ca4639..fd42518 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/TransactionLog.h
@@ -158,7 +158,7 @@
++it) {
if (ll <= 0) break;
if (prefix != nullptr && !startsWith(it->first, prefix)) break;
- auto [s, l] = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
+ std::tie(s, l) = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
if (l == 0) continue; // don't show empty groups (due to sinceNs).
ss << " " << it->first << "\n" << s;
ll -= l + 1;
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index b03e518..9da7282 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -56,6 +56,7 @@
"libstatssocket",
"libutils",
"mediametricsservice-aidl-cpp",
+ "packagemanager_aidl-cpp",
],
include_dirs: [
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 8b0b479..06ab16e 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -48,6 +48,7 @@
void invokeAudioAnalytics(const uint8_t *data, size_t size);
void invokeTimedAction(const uint8_t *data, size_t size);
void process(const uint8_t *data, size_t size);
+ std::atomic_int mValue = 0;
};
void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
@@ -342,11 +343,10 @@
void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
android::mediametrics::TimedAction timedAction;
- std::atomic_int value = 0;
while (fdp.remaining_bytes()) {
timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
- [&value] { ++value; });
+ [this] { ++mValue; });
timedAction.size();
}
}
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 8581437..17a3a5f 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -390,6 +390,48 @@
}
AStatsEvent_writeInt32(event, qpBMaxOri);
+ // int32_t configColorStandard = -1;
+ // if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
+ // metrics_proto.set_config_color_standard(configColorStandard);
+ // }
+ // AStatsEvent_writeInt32(event, configColorStandard);
+
+ // int32_t configColorRange = -1;
+ // if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
+ // metrics_proto.set_config_color_range(configColorRange);
+ // }
+ // AStatsEvent_writeInt32(event, configColorRange);
+
+ // int32_t configColorTransfer = -1;
+ // if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
+ // metrics_proto.set_config_color_transfer(configColorTransfer);
+ // }
+ // AStatsEvent_writeInt32(event, configColorTransfer);
+
+ // int32_t parsedColorStandard = -1;
+ // if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
+ // metrics_proto.set_parsed_color_standard(parsedColorStandard);
+ // }
+ // AStatsEvent_writeInt32(event, parsedColorStandard);
+
+ // int32_t parsedColorRange = -1;
+ // if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
+ // metrics_proto.set_parsed_color_range(parsedColorRange);
+ // }
+ // AStatsEvent_writeInt32(event, parsedColorRange);
+
+ // int32_t parsedColorTransfer = -1;
+ // if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
+ // metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
+ // }
+ // AStatsEvent_writeInt32(event, parsedColorTransfer);
+
+ // int32_t hdrMetadataFlags = -1;
+ // if (item->getInt32("android.media.mediacodec.hdr-metadata-flags", &hdrMetadataFlags)) {
+ // metrics_proto.set_hdr_metadata_flags(hdrMetadataFlags);
+ // }
+ // AStatsEvent_writeInt32(event, hdrMetadataFlags);
+
int err = AStatsEvent_write(event);
if (err < 0) {
ALOGE("Failed to write codec metrics to statsd (%d)", err);
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index 3baf739..f46fbad 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -33,6 +33,7 @@
"libmediautils",
"libutils",
"mediametricsservice-aidl-cpp",
+ "packagemanager_aidl-cpp",
],
header_libs: [
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index f31202b..5d80744 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -90,7 +90,7 @@
],
static_libs: [
- "resourceobserver_aidl_interface-V1-ndk_platform",
+ "resourceobserver_aidl_interface-V1-ndk",
],
include_dirs: ["frameworks/av/include"],
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index ec4ba58..618626f 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -56,7 +56,7 @@
test_suites: ["device-tests"],
static_libs: [
"libresourcemanagerservice",
- "resourceobserver_aidl_interface-V1-ndk_platform",
+ "resourceobserver_aidl_interface-V1-ndk",
],
shared_libs: [
"libbinder",
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index a9fd14f..fa5eb4e 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -47,7 +47,7 @@
],
static_libs: [
- "mediatranscoding_aidl_interface-ndk_platform",
+ "mediatranscoding_aidl_interface-ndk",
],
cflags: [
@@ -80,7 +80,7 @@
],
static_libs: [
- "mediatranscoding_aidl_interface-ndk_platform",
+ "mediatranscoding_aidl_interface-ndk",
],
cflags: [
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index cb180ec..ae13656 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -34,8 +34,8 @@
],
static_libs: [
- "mediatranscoding_aidl_interface-ndk_platform",
- "resourcemanager_aidl_interface-ndk_platform",
+ "mediatranscoding_aidl_interface-ndk",
+ "resourcemanager_aidl_interface-ndk",
"libmediatranscodingservice",
],
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 0cb2fad..8e17f55 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -259,9 +259,7 @@
static constexpr bool success = true;
static constexpr bool fail = false;
-struct TestClientCallback : public BnTranscodingClientCallback,
- public EventTracker,
- public std::enable_shared_from_this<TestClientCallback> {
+struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
TestClientCallback(const char* packageName, int32_t id)
: mClientId(id), mClientPid(PID(id)), mClientUid(UID(id)), mPackageName(packageName) {
ALOGI("TestClientCallback %d created: pid %d, uid %d", id, PID(id), UID(id));
@@ -348,8 +346,8 @@
ALOGD("registering %s with uid %d", packageName, mClientUid);
std::shared_ptr<ITranscodingClient> client;
- Status status =
- service->registerClient(shared_from_this(), kClientName, packageName, &client);
+ Status status = service->registerClient(ref<TestClientCallback>(), kClientName, packageName,
+ &client);
mClient = status.isOk() ? client : nullptr;
return status;
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index 3a89e12..038197f 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -31,17 +31,6 @@
export_include_dirs: ["."],
}
-// By adding "vendor_available: true" to "libavservices_minijail", we don't
-// need to have "libavservices_minijail_vendor" any longer.
-// "libavservices_minijail_vendor" will be removed, once we replace it with
-// "libavservices_minijail" in all vendor modules. (b/146313710)
-cc_library_shared {
- name: "libavservices_minijail_vendor",
- vendor: true,
- defaults: ["libavservices_minijail_defaults"],
- export_include_dirs: ["."],
-}
-
// Unit tests.
cc_test {
name: "libavservices_minijail_unittest",
diff --git a/services/minijail/OWNERS b/services/minijail/OWNERS
index 19f4f9f..9ebf41e 100644
--- a/services/minijail/OWNERS
+++ b/services/minijail/OWNERS
@@ -1,2 +1,2 @@
jorgelo@google.com
-marcone@google.com
+essick@google.com
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 4c58040..3563d66 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -68,6 +68,7 @@
"aaudio-aidl-cpp",
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
+ "packagemanager_aidl-cpp",
],
export_shared_lib_headers: [
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index cd11c88..1dcfe53 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -89,13 +89,14 @@
"liblog",
"libmedia",
"libutils",
- "tv_tuner_aidl_interface-ndk_platform",
- "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+ "packagemanager_aidl-cpp",
+ "tv_tuner_aidl_interface-ndk",
+ "tv_tuner_resource_manager_aidl_interface-ndk",
"tv_tuner_resource_manager_aidl_interface-cpp",
],
static_libs: [
- "android.hardware.common.fmq-V1-ndk_platform",
+ "android.hardware.common.fmq-V1-ndk",
"libaidlcommonsupport",
],
@@ -128,12 +129,12 @@
"liblog",
"libtunerservice",
"libutils",
- "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+ "tv_tuner_resource_manager_aidl_interface-ndk",
"tv_tuner_resource_manager_aidl_interface-cpp",
],
static_libs: [
- "tv_tuner_aidl_interface-ndk_platform",
+ "tv_tuner_aidl_interface-ndk",
],
init_rc: ["mediatuner.rc"],