Merge "CCodecConfig: map KEY_HDR10_PLUS_INFO to C2StreamHdrDynamicMetadataInfo"
diff --git a/apex/manifest.json b/apex/manifest.json
index 5d72031..4b75b04 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media",
- "version": 319999900,
+ "version": 339990000,
"requireNativeLibs": [
"libandroid.so",
"libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index b0d962d..fbcbb69 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media.swcodec",
- "version": 319999900,
+ "version": 339990000,
"requireNativeLibs": [
":sphal"
]
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 160e250..809f942 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -27,6 +27,11 @@
"libsfplugin_ccodec_utils",
],
+ header_libs: [
+ "libarect_headers",
+ "libnativewindow_headers",
+ ],
+
shared_libs: [
"libcutils", // for properties
"liblog", // for ALOG
@@ -61,6 +66,11 @@
"libsfplugin_ccodec_utils",
],
+ header_libs: [
+ "libarect_headers",
+ "libnativewindow_headers",
+ ],
+
shared_libs: [
"libcodec2_soft_common",
"libcutils", // for properties
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 1eec8f9..01f5ffb 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "SimpleC2Component"
#include <log/log.h>
+#include <android/hardware_buffer.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -26,19 +27,14 @@
#include <C2Config.h>
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <SimpleC2Component.h>
namespace android {
constexpr uint8_t kNeutralUVBitDepth8 = 128;
constexpr uint16_t kNeutralUVBitDepth10 = 512;
-bool isAtLeastT() {
- char deviceCodeName[PROP_VALUE_MAX];
- __system_property_get("ro.build.version.codename", deviceCodeName);
- return android_get_device_api_level() >= __ANDROID_API_T__ ||
- !strcmp(deviceCodeName, "Tiramisu");
-}
-
void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -885,25 +881,14 @@
// Save supported hal pixel formats for bit depth of 10, the first time this is called
if (!mBitDepth10HalPixelFormats.size()) {
std::vector<int> halPixelFormats;
- if (isAtLeastT()) {
- halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
- }
+ halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+
// since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
// is populated only once, allowRGBA1010102 is not considered at this stage.
halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
for (int halPixelFormat : halPixelFormats) {
- std::shared_ptr<C2GraphicBlock> block;
-
- uint32_t gpuConsumerFlags = halPixelFormat == HAL_PIXEL_FORMAT_RGBA_1010102
- ? C2AndroidMemoryUsage::HW_TEXTURE_READ
- : 0;
- C2MemoryUsage usage = {C2MemoryUsage::CPU_READ | gpuConsumerFlags,
- C2MemoryUsage::CPU_WRITE};
- // TODO(b/214411172) Use AHardwareBuffer_isSupported once it supports P010
- c2_status_t status =
- mOutputBlockPool->fetchGraphicBlock(320, 240, halPixelFormat, usage, &block);
- if (status == C2_OK) {
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)halPixelFormat)) {
mBitDepth10HalPixelFormats.push_back(halPixelFormat);
}
}
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 3172f29..cc23b60 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -27,7 +27,6 @@
#include <media/stagefright/foundation/Mutexed.h>
namespace android {
-bool isAtLeastT();
void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 701c22c..386723d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -21,6 +21,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <Codec2Mapper.h>
#include <SimpleC2Interface.h>
#include <log/log.h>
@@ -190,7 +191,7 @@
.build());
std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
- if (isAtLeastT()) {
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
// TODO: support more formats?
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index c2ccfa0..d969f2e 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -25,6 +25,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <SimpleC2Interface.h>
#include "C2SoftVpxDec.h"
@@ -219,7 +220,7 @@
// TODO: support more formats?
std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
#ifdef VP9
- if (isAtLeastT()) {
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
#endif
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 70e742c..fca8f4f 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -60,6 +60,7 @@
enum drc_effect_type_t : int32_t; ///< DRC effect type
enum drc_album_mode_t : int32_t; ///< DRC album mode
enum hdr_dynamic_metadata_type_t : uint32_t; ///< HDR dynamic metadata type
+ enum hdr_format_t : uint32_t; ///< HDR format
enum intra_refresh_mode_t : uint32_t; ///< intra refresh modes
enum level_t : uint32_t; ///< coding level
enum ordinal_key_t : uint32_t; ///< work ordering keys
@@ -192,10 +193,9 @@
kParamIndexPictureType,
// deprecated
kParamIndexHdr10PlusMetadata,
-
kParamIndexPictureQuantization,
-
kParamIndexHdrDynamicMetadata,
+ kParamIndexHdrFormat,
/* ------------------------------------ video components ------------------------------------ */
@@ -1664,6 +1664,34 @@
constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+ UNKNOWN, ///< HDR format not known (default)
+ SDR, ///< not HDR (SDR)
+ HLG, ///< HLG
+ HDR10, ///< HDR10
+ HDR10_PLUS, ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+ kParamIndexHdrFormat>
+ C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
+
/* ------------------------------------ block-based coding ----------------------------------- */
/**
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 72f7c43..5c24bd7 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -26,6 +26,7 @@
#include <C2BlockInternal.h>
#include <C2Buffer.h>
#include <C2Component.h>
+#include <C2FenceFactory.h>
#include <C2Param.h>
#include <C2ParamInternal.h>
#include <C2PlatformSupport.h>
@@ -759,17 +760,14 @@
// Note: File descriptors are not duplicated. The original file descriptor must
// not be closed before the transaction is complete.
bool objcpy(hidl_handle* d, const C2Fence& s) {
- (void)s; // TODO: implement s.fd()
- int fenceFd = -1;
d->setTo(nullptr);
- if (fenceFd >= 0) {
- native_handle_t *handle = native_handle_create(1, 0);
- if (!handle) {
- LOG(ERROR) << "Failed to create a native handle.";
- return false;
- }
- handle->data[0] = fenceFd;
+ native_handle_t* handle = _C2FenceFactory::CreateNativeHandle(s);
+ if (handle) {
d->setTo(handle, true /* owns */);
+// } else if (!s.ready()) {
+// // TODO: we should wait for unmarshallable fences but this may not be
+// // the best place for it. We can safely ignore here as at this time
+// // all fences used here are marshallable.
}
return true;
}
@@ -1184,9 +1182,8 @@
// Note: File descriptors are not duplicated. The original file descriptor must
// not be closed before the transaction is complete.
bool objcpy(C2Fence* d, const hidl_handle& s) {
- // TODO: Implement.
- (void)s;
- *d = C2Fence();
+ const native_handle_t* handle = s.getNativeHandle();
+ *d = _C2FenceFactory::CreateFromNativeHandle(handle);
return true;
}
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
index b942be7..5c13b0e 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
@@ -807,7 +807,8 @@
// affectedParams
{
C2StreamHdrStaticInfo::output::PARAM_TYPE,
- C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+ C2StreamHdr10PlusInfo::output::PARAM_TYPE, // will be deprecated
+ C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
C2StreamColorAspectsInfo::output::PARAM_TYPE,
},
};
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 39fa4fc..134bc53 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -37,6 +37,10 @@
"media_ndk_headers",
],
+ static_libs: [
+ "SurfaceFlingerProperties",
+ ],
+
shared_libs: [
"android.hardware.cas.native@1.0",
"android.hardware.drm@1.0",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index c0a6816..94aa4ae 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2396,7 +2396,8 @@
C2StreamColorAspectsInfo::output::PARAM_TYPE,
C2StreamDataSpaceInfo::output::PARAM_TYPE,
C2StreamHdrStaticInfo::output::PARAM_TYPE,
- C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+ C2StreamHdr10PlusInfo::output::PARAM_TYPE, // will be deprecated
+ C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
C2StreamSurfaceScalingInfo::output::PARAM_TYPE
};
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 62a1d02..a00adde 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -880,6 +880,19 @@
return UNKNOWN_ERROR;
}
const C2ConstGraphicBlock &block = blocks.front();
+ C2Fence c2fence = block.fence();
+ sp<Fence> fence = Fence::NO_FENCE;
+ // TODO: it's not sufficient to just check isHW() and then construct android::fence from it.
+ // Once C2Fence::type() is added, check the exact C2Fence type
+ if (c2fence.isHW()) {
+ int fenceFd = c2fence.fd();
+ fence = sp<Fence>::make(fenceFd);
+ if (!fence) {
+ ALOGE("[%s] Failed to allocate a fence", mName);
+ close(fenceFd);
+ return NO_MEMORY;
+ }
+ }
// TODO: revisit this after C2Fence implementation.
android::IGraphicBufferProducer::QueueBufferInput qbi(
@@ -892,7 +905,7 @@
blocks.front().crop().bottom()),
videoScalingMode,
transform,
- Fence::NO_FENCE, 0);
+ fence, 0);
if (hdrStaticInfo || hdrDynamicInfo) {
HdrMetadata hdr;
if (hdrStaticInfo) {
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 696198c..ae14ae7 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -400,10 +400,10 @@
// Rotation
// Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
- .limitTo(D::VIDEO & D::CODED)
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED)
.withMappers(negate, negate));
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_ROTATION, "value")
- .limitTo(D::VIDEO & D::RAW)
+ .limitTo((D::VIDEO | D::IMAGE) & D::RAW)
.withMappers(negate, negate));
// android 'video-scaling'
@@ -513,6 +513,9 @@
add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
.limitTo((D::VIDEO | D::IMAGE) & D::RAW));
+ add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
C2_PARAMKEY_SECURE_MODE, "value"));
@@ -1670,6 +1673,27 @@
params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
}
}
+
+ // add HDR format for video encoding
+ if (configDomain == IS_CONFIG) {
+ // don't assume here that transfer is set for HDR, only require it for HLG
+ int transfer = 0;
+ params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+ int profile;
+ if (params->findInt32(KEY_PROFILE, &profile)) {
+ std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+ C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+ C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+ if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+ if (c2 == C2Config::hdr_format_t::HLG &&
+ transfer != COLOR_TRANSFER_HLG) {
+ c2 = C2Config::hdr_format_t::UNKNOWN;
+ }
+ params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+ }
+ }
+ }
}
{ // reflect temporal layering into a binary blob
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index c2405e8..6084ee3 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -1178,9 +1178,6 @@
}
if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
ALOGV("Setting dynamic HDR info as gralloc4 metadata");
- hidl_vec<uint8_t> vec;
- vec.resize(dynamicInfo->flexCount());
- memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
std::optional<IMapper4::MetadataType> metadataType;
switch (dynamicInfo->m.type_) {
case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
@@ -1190,12 +1187,20 @@
metadataType = MetadataType_Smpte2094_40;
break;
}
+
if (metadataType) {
- Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
- if (!ret.isOk()) {
- err = C2_REFUSED;
- } else if (ret != Error4::NONE) {
- err = C2_CORRUPTED;
+ std::vector<uint8_t> smpte2094_40;
+ smpte2094_40.resize(dynamicInfo->flexCount());
+ memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+
+ hidl_vec<uint8_t> vec;
+ if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (ret != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
}
} else {
err = C2_BAD_VALUE;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 2b8a160..e9adac4 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -33,6 +33,7 @@
#include <OMX_Video.h>
#include <OMX_VideoExt.h>
#include <OMX_AsString.h>
+#include <SurfaceFlingerProperties.sysprop.h>
#include <android/hardware/media/omx/1.0/IOmx.h>
#include <android/hardware/media/omx/1.0/IOmxObserver.h>
@@ -136,7 +137,9 @@
continue;
}
switch (type.coreIndex()) {
- case C2StreamHdr10PlusInfo::CORE_INDEX:
+ case C2StreamHdrDynamicMetadataInfo::CORE_INDEX:
+ [[fallthrough]];
+ case C2StreamHdr10PlusInfo::CORE_INDEX: // will be deprecated
supportsHdr10Plus = true;
break;
case C2StreamHdrStaticInfo::CORE_INDEX:
@@ -156,6 +159,12 @@
// TODO: directly check this from the component interface
supports10Bit = (supportsHdr || supportsHdr10Plus);
+ // If the device doesn't support HDR display, then no codec on the device
+ // can advertise support for HDR profiles.
+ // Default to true to maintain backward compatibility
+ auto ret = sysprop::SurfaceFlingerProperties::has_HDR_display();
+ bool hasHDRDisplay = ret.has_value() ? *ret : true;
+
bool added = false;
for (C2Value::Primitive profile : profileQuery[0].values.values) {
@@ -181,8 +190,8 @@
if (mapper && mapper->mapProfile(pl.profile, &sdkProfile)
&& mapper->mapLevel(pl.level, &sdkLevel)) {
caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
- // also list HDR profiles if component supports HDR
- if (supportsHdr) {
+ // also list HDR profiles if component supports HDR and device has HDR display
+ if (supportsHdr && hasHDRDisplay) {
auto hdrMapper = C2Mapper::GetHdrProfileLevelMapper(trait.mediaType);
if (hdrMapper && hdrMapper->mapProfile(pl.profile, &sdkProfile)) {
caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 2f4d6b1..5c2f110 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -15,6 +15,7 @@
srcs: [
"Codec2BufferUtils.cpp",
+ "Codec2CommonUtils.cpp",
"Codec2Mapper.cpp",
],
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
new file mode 100644
index 0000000..e432dd5
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Log.h>
+
+#include <android/hardware_buffer.h>
+#include <cutils/properties.h>
+#include <media/hardware/HardwareAPI.h>
+#include <system/graphics.h>
+
+#include <C2Debug.h>
+
+#include "Codec2CommonUtils.h"
+
+namespace android {
+
+bool isAtLeastT() {
+ char deviceCodeName[PROP_VALUE_MAX];
+ __system_property_get("ro.build.version.codename", deviceCodeName);
+ return android_get_device_api_level() >= __ANDROID_API_T__ ||
+ !strcmp(deviceCodeName, "Tiramisu");
+}
+
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
+ // HAL_PIXEL_FORMAT_YCBCR_P010 was added in Android T, return false for older versions
+ if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 && !isAtLeastT()) {
+ return false;
+ }
+
+ const AHardwareBuffer_Desc desc = {
+ .width = 320,
+ .height = 240,
+ .format = format,
+ .layers = 1,
+ .usage = AHARDWAREBUFFER_USAGE_CPU_READ_RARELY | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
+ AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE,
+ .stride = 0,
+ .rfu0 = 0,
+ .rfu1 = 0,
+ };
+
+ return AHardwareBuffer_isSupported(&desc);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
new file mode 100644
index 0000000..dfc8551
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2022, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_UTILS_H_
+#define CODEC2_COMMON_UTILS_H_
+
+#include <android/hardware_buffer.h>
+
+namespace android {
+
+bool isAtLeastT();
+
+/**
+ * Check if a given pixel format is supported.
+ * enums listed in android_pixel_format_t, android_pixel_format_v1_1_t
+ * and so on can be passed as these enums have an equivalent definition in
+ * AHardwareBuffer_Format as well.
+ */
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format);
+
+} // namespace android
+
+#endif // CODEC2_COMMON_UTILS_H_
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 3a94016..c606d6f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -276,6 +276,13 @@
{ C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+ { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+ { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
{ C2Config::LEVEL_MP2V_LOW, MPEG2LevelLL },
{ C2Config::LEVEL_MP2V_MAIN, MPEG2LevelML },
@@ -365,6 +372,17 @@
{ C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+ { C2Config::hdr_format_t::SDR, VP9Profile0 },
+ { C2Config::hdr_format_t::SDR, VP9Profile1 },
+ { C2Config::hdr_format_t::HLG, VP9Profile2 },
+ { C2Config::hdr_format_t::HLG, VP9Profile3 },
+ { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+ { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sAv1Levels = {
{ C2Config::LEVEL_AV1_2, AV1Level2 },
{ C2Config::LEVEL_AV1_2_1, AV1Level21 },
@@ -411,6 +429,13 @@
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+ { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+ { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -494,6 +519,10 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sAacProfiles.map(from, to);
}
+ // AAC does not have HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+ return false;
+ }
};
struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -524,6 +553,12 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sDolbyVisionProfiles.map(from, to);
}
+ // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+ // HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+ *to = C2Config::hdr_format_t::UNKNOWN;
+ return true;
+ }
};
struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -562,6 +597,9 @@
mIsHdr ? sHevcHdrProfiles.map(from, to) :
sHevcProfiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sHevcHdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -640,6 +678,9 @@
mIsHdr ? sVp9HdrProfiles.map(from, to) :
sVp9Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sVp9HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -669,6 +710,9 @@
mIsHdr ? sAv1HdrProfiles.map(from, to) :
sAv1Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sAv1HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -678,6 +722,13 @@
} // namespace
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+ // by default map all (including vendor) profiles to SDR
+ *to = C2Config::hdr_format_t::SDR;
+ return true;
+}
+
// static
std::shared_ptr<C2Mapper::ProfileLevelMapper>
C2Mapper::GetProfileLevelMapper(std::string mediaType) {
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 33d305e..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+ /**
+ * Mapper method that maps a MediaCodec profile to the supported
+ * HDR format for that profile. Since 10-bit profiles are used for
+ * HLG, this method will return HLG for all 10-bit profiles, but
+ * the caller should also verify that the transfer function is
+ * indeed HLG.
+ */
+ // not an abstract method as we have a default implementation for SDR
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
virtual ~ProfileLevelMapper() = default;
};
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 9c5183e..de18df89 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -16,13 +16,24 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2FenceFactory"
+#include <cutils/native_handle.h>
#include <utils/Log.h>
+#include <ui/Fence.h>
#include <C2FenceFactory.h>
#include <C2SurfaceSyncObj.h>
+#define MAX_FENCE_FDS 1
+
class C2Fence::Impl {
public:
+ enum type_t : uint32_t {
+ INVALID_FENCE,
+ NULL_FENCE,
+ SURFACE_FENCE,
+ SYNC_FENCE,
+ };
+
virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
virtual bool valid() const = 0;
@@ -33,9 +44,26 @@
virtual bool isHW() const = 0;
+ virtual type_t type() const = 0;
+
+ /**
+ * Create a native handle for the fence so it can be marshalled.
+ * The native handle must store fence type in the first integer.
+ *
+ * \return a valid native handle if the fence can be marshalled, otherwise return null.
+ */
+ virtual native_handle_t *createNativeHandle() const = 0;
+
virtual ~Impl() = default;
Impl() = default;
+
+ static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
+ if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
+ return static_cast<type_t>(nh->data[nh->numFds]);
+ }
+ return INVALID_FENCE;
+ }
};
c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
@@ -115,6 +143,15 @@
return false;
}
+ virtual type_t type() const {
+ return SURFACE_FENCE;
+ }
+
+ virtual native_handle_t *createNativeHandle() const {
+ ALOG_ASSERT(false, "Cannot create native handle from surface fence");
+ return nullptr;
+ }
+
virtual ~SurfaceFenceImpl() {};
SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
@@ -143,3 +180,119 @@
}
return C2Fence();
}
+
+using namespace android;
+
+class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
+public:
+ virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+ c2_nsecs_t timeoutMs = timeoutNs / 1000;
+ if (timeoutMs > INT_MAX) {
+ timeoutMs = INT_MAX;
+ }
+
+ switch (mFence->wait((int)timeoutMs)) {
+ case NO_ERROR:
+ return C2_OK;
+ case -ETIME:
+ return C2_TIMED_OUT;
+ default:
+ return C2_CORRUPTED;
+ }
+ }
+
+ virtual bool valid() const {
+ return mFence->getStatus() != Fence::Status::Invalid;
+ }
+
+ virtual bool ready() const {
+ return mFence->getStatus() == Fence::Status::Signaled;
+ }
+
+ virtual int fd() const {
+ return mFence->dup();
+ }
+
+ virtual bool isHW() const {
+ return true;
+ }
+
+ virtual type_t type() const {
+ return SYNC_FENCE;
+ }
+
+ virtual native_handle_t *createNativeHandle() const {
+ native_handle_t* nh = native_handle_create(1, 1);
+ if (!nh) {
+ ALOGE("Failed to allocate native handle for sync fence");
+ return nullptr;
+ }
+ nh->data[0] = fd();
+ nh->data[1] = type();
+ return nh;
+ }
+
+ virtual ~SyncFenceImpl() {};
+
+ SyncFenceImpl(int fenceFd) :
+ mFence(sp<Fence>::make(fenceFd)) {}
+
+ static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
+ if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+ ALOGE("Invalid handle for sync fence");
+ return nullptr;
+ }
+ int fd = dup(nh->data[0]);
+ std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+ if (!p) {
+ ALOGE("Failed to allocate sync fence impl");
+ close(fd);
+ }
+ return p;
+ }
+
+private:
+ const sp<Fence> mFence;
+};
+
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+ std::shared_ptr<C2Fence::Impl> p;
+ if (fenceFd >= 0) {
+ p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
+ if (!p) {
+ ALOGE("Failed to allocate sync fence impl");
+ close(fenceFd);
+ }
+ if (!p->valid()) {
+ p.reset();
+ }
+ } else {
+ ALOGE("Create sync fence from invalid fd");
+ }
+ return C2Fence(p);
+}
+
+native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
+ return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
+}
+
+C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+ if (!handle) {
+ return C2Fence();
+ }
+ C2Fence::Impl::type_t type = C2Fence::Impl::GetTypeFromNativeHandle(handle);
+ std::shared_ptr<C2Fence::Impl> p;
+ switch (type) {
+ case C2Fence::Impl::SYNC_FENCE:
+ p = SyncFenceImpl::CreateFromNativeHandle(handle);
+ break;
+ default:
+ ALOGW("Unsupported fence type %d", type);
+ break;
+ }
+ if (p && !p->valid()) {
+ p.reset();
+ }
+ return C2Fence(p);
+}
+
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index d4bed26..4944115 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -28,6 +28,7 @@
struct _C2FenceFactory {
class SurfaceFenceImpl;
+ class SyncFenceImpl;
/*
* Create C2Fence for BufferQueueBased blockpool.
@@ -38,6 +39,30 @@
static C2Fence CreateSurfaceFence(
std::shared_ptr<C2SurfaceSyncMemory> syncMem,
uint32_t waitId);
+
+ /*
+ * Create C2Fence from a fence file fd.
+ *
+ * \param fenceFd Fence file descriptor.
+ * It will be owned and closed by the returned fence object.
+ */
+ static C2Fence CreateSyncFence(int fenceFd);
+
+ /**
+ * Create a native handle from fence for marshalling
+ *
+ * \return a non-null pointer if the fence can be marshalled, otherwise return nullptr
+ */
+ static native_handle_t* CreateNativeHandle(const C2Fence& fence);
+
+ /*
+ * Create C2Fence from a native handle.
+
+ * \param handle A native handle representing a fence
+ * The fd in the native handle will be duplicated, so the caller will
+ * still own the handle and have to close it.
+ */
+ static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
};
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index eccbf46..5ca874e 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1969,26 +1969,8 @@
}
if (chunk_type == FOURCC("fLaC")) {
-
- // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
- // 4 for mime, 4 for blockType and BlockLen, 34 for metadata
- uint8_t flacInfo[4 + 4 + 34];
- // skipping dFla, version
- data_offset += sizeof(buffer) + 12;
- size_t flacOffset = 4;
- // Add flaC header mime type to CSD
- strncpy((char *)flacInfo, "fLaC", 4);
- if (mDataSource->readAt(
- data_offset, flacInfo + flacOffset, sizeof(flacInfo) - flacOffset) <
- (ssize_t)sizeof(flacInfo) - flacOffset) {
- return ERROR_IO;
- }
- data_offset += sizeof(flacInfo) - flacOffset;
-
- AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
- sizeof(flacInfo));
+ data_offset += sizeof(buffer);
*offset = data_offset;
- CHECK_EQ(*offset, stop_offset);
}
while (*offset < stop_offset) {
@@ -2521,6 +2503,35 @@
break;
}
+ case FOURCC("dfLa"):
+ {
+ *offset += chunk_size;
+
+ // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
+ // 4 for mediaType, 4 for blockType and BlockLen, 34 for metadata
+ uint8_t flacInfo[4 + 4 + 34];
+
+ if (chunk_data_size != sizeof(flacInfo)) {
+ return ERROR_MALFORMED;
+ }
+
+ data_offset += 4;
+ size_t flacOffset = 4;
+ // Add flaC header mediaType to CSD
+ strncpy((char *)flacInfo, "fLaC", 4);
+
+ ssize_t bytesToRead = sizeof(flacInfo) - flacOffset;
+ if (mDataSource->readAt(
+ data_offset, flacInfo + flacOffset, bytesToRead) < bytesToRead) {
+ return ERROR_IO;
+ }
+
+ data_offset += bytesToRead;
+ AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
+ sizeof(flacInfo));
+ break;
+ }
+
case FOURCC("avcC"):
{
*offset += chunk_size;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 382f4a7..6f1974e 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -4099,26 +4099,29 @@
break;
}
- if (asyncNotify != nullptr) {
- if (mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
+ bool forceSync = false;
+ if (asyncNotify != nullptr && mSurface != NULL) {
+ if (!mReleaseSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
}
- if (mSurface != mReleaseSurface->getSurface()) {
- status_t err = connectToSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- }
+ mReleaseSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mReleaseSurface->getSurface()) {
+ status_t err = connectToSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mReleaseSurface->getSurface();
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
}
}
}
@@ -4142,8 +4145,10 @@
}
if (asyncNotify != nullptr) {
- mResourceManagerProxy->markClientForPendingRemoval();
- postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ if (!forceSync) {
+ mResourceManagerProxy->markClientForPendingRemoval();
+ postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ }
asyncNotifyPost.clear();
mAsyncReleaseCompleteNotification = asyncNotify;
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4b6470a..a443ed9 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -126,14 +126,10 @@
}
}
-static bool isHdr(const sp<AMessage> &format) {
- // if CSD specifies HDR transfer(s), we assume HDR. Otherwise, if it specifies non-HDR
- // transfers, we must assume non-HDR. This is because CSD trumps any color-transfer key
- // in the format.
- int32_t isHdr;
- if (format->findInt32("android._is-hdr", &isHdr)) {
- return isHdr;
- }
+/**
+ * Returns true if, and only if, the given format corresponds to HDR10 or HDR10+.
+ */
+static bool isHdr10or10Plus(const sp<AMessage> &format) {
// if user/container supplied HDR static info without transfer set, assume true
if ((format->contains("hdr-static-info") || format->contains("hdr10-plus-info"))
@@ -143,8 +139,7 @@
// otherwise, verify that an HDR transfer function is set
int32_t transfer;
if (format->findInt32("color-transfer", &transfer)) {
- return transfer == ColorUtils::kColorTransferST2084
- || transfer == ColorUtils::kColorTransferHLG;
+ return transfer == ColorUtils::kColorTransferST2084;
}
return false;
}
@@ -419,8 +414,12 @@
}
// bump to HDR profile
- if (isHdr(format) && codecProfile == HEVCProfileMain10) {
- codecProfile = HEVCProfileMain10HDR10;
+ if (isHdr10or10Plus(format) && codecProfile == HEVCProfileMain10) {
+ if (format->contains("hdr10-plus-info")) {
+ codecProfile = HEVCProfileMain10HDR10Plus;
+ } else {
+ codecProfile = HEVCProfileMain10HDR10;
+ }
}
format->setInt32("profile", codecProfile);
@@ -615,16 +614,25 @@
{ 3, VP9Profile3 },
};
- const static ALookup<int32_t, int32_t> toHdr {
+ const static ALookup<int32_t, int32_t> toHdr10 {
{ VP9Profile2, VP9Profile2HDR },
{ VP9Profile3, VP9Profile3HDR },
};
+ const static ALookup<int32_t, int32_t> toHdr10Plus {
+ { VP9Profile2, VP9Profile2HDR10Plus },
+ { VP9Profile3, VP9Profile3HDR10Plus },
+ };
+
int32_t profile;
if (profiles.map(data[0], &profile)) {
// convert to HDR profile
- if (isHdr(format)) {
- toHdr.lookup(profile, &profile);
+ if (isHdr10or10Plus(format)) {
+ if (format->contains("hdr10-plus-info")) {
+ toHdr10Plus.lookup(profile, &profile);
+ } else {
+ toHdr10.lookup(profile, &profile);
+ }
}
format->setInt32("profile", profile);
@@ -684,7 +692,7 @@
int32_t profile;
if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
// bump to HDR profile
- if (isHdr(format) && profile == AV1ProfileMain10) {
+ if (isHdr10or10Plus(format) && profile == AV1ProfileMain10) {
if (format->contains("hdr10-plus-info")) {
profile = AV1ProfileMain10HDR10Plus;
} else {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index de418da..f7fd5d6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -4091,6 +4091,26 @@
// Verify buffer caches
std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
offlineStream.circulatingBufferIds.end());
+ {
+ // Due to timing it is possible that we may not have any remaining pending capture
+ // requests that can update the caches on Hal side. This can result in buffer cache
+ // mismatch between the service and the Hal and must be accounted for.
+ std::lock_guard<std::mutex> l(mFreedBuffersLock);
+ for (const auto& it : mFreedBuffers) {
+ if (it.first == id) {
+ ALOGV("%s: stream ID %d buffer id %" PRIu64 " cache removal still pending",
+ __FUNCTION__, id, it.second);
+ const auto& cachedEntry = std::find(bufIds.begin(), bufIds.end(), it.second);
+ if (cachedEntry != bufIds.end()) {
+ bufIds.erase(cachedEntry);
+ } else {
+ ALOGE("%s: stream ID %d buffer id %" PRIu64 " cache removal still pending "
+ "however buffer is no longer in the offline stream info!",
+ __FUNCTION__, id, it.second);
+ }
+ }
+ }
+ }
if (!verifyBufferIds(id, bufIds)) {
ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
return UNKNOWN_ERROR;