Merge "Spatializer: Fix spurious sensor setting" into tm-dev
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index bda664a..8b9d1de 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -219,6 +219,7 @@
     }
 
     mPlugin.reset();
+    mInitCheck = NO_INIT;
     return OK;
 }
 
@@ -372,6 +373,10 @@
 
     Mutex::Autolock autoLock(mLock);
 
+    if (mInitCheck != OK) {
+        return -1;
+    }
+
     int32_t seqNum = mHeapSeqNum++;
     uint32_t bufferId = static_cast<uint32_t>(seqNum);
     mHeapSizes.add(seqNum, heap->size());
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
index a290704..55364b5 100644
--- a/drm/libmediadrm/CryptoHalHidl.cpp
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -190,6 +190,7 @@
 
     mPlugin.clear();
     mPluginV1_2.clear();
+    mInitCheck = NO_INIT;
     return OK;
 }
 
@@ -221,6 +222,10 @@
 
     Mutex::Autolock autoLock(mLock);
 
+    if (mInitCheck != OK) {
+        return -1;
+    }
+
     int32_t seqNum = mHeapSeqNum++;
     uint32_t bufferId = static_cast<uint32_t>(seqNum);
     mHeapSizes.add(seqNum, heap->size());
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 07c2864..31840a2 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -60,6 +60,7 @@
     enum drc_effect_type_t : int32_t;       ///< DRC effect type
     enum drc_album_mode_t : int32_t;        ///< DRC album mode
     enum hdr_dynamic_metadata_type_t : uint32_t;  ///< HDR dynamic metadata type
+    enum hdr_format_t : uint32_t;           ///< HDR format
     enum intra_refresh_mode_t : uint32_t;   ///< intra refresh modes
     enum level_t : uint32_t;                ///< coding level
     enum ordinal_key_t : uint32_t;          ///< work ordering keys
@@ -192,10 +193,9 @@
     kParamIndexPictureType,
     // deprecated
     kParamIndexHdr10PlusMetadata,
-
     kParamIndexPictureQuantization,
-
     kParamIndexHdrDynamicMetadata,
+    kParamIndexHdrFormat,
 
     /* ------------------------------------ video components ------------------------------------ */
 
@@ -1667,6 +1667,34 @@
 constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
 constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
 
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+    UNKNOWN,     ///< HDR format not known (default)
+    SDR,         ///< not HDR (SDR)
+    HLG,         ///< HLG
+    HDR10,       ///< HDR10
+    HDR10_PLUS,  ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+                kParamIndexHdrFormat>
+        C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
+
 /* ------------------------------------ block-based coding ----------------------------------- */
 
 /**
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 72f7c43..5c24bd7 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -26,6 +26,7 @@
 #include <C2BlockInternal.h>
 #include <C2Buffer.h>
 #include <C2Component.h>
+#include <C2FenceFactory.h>
 #include <C2Param.h>
 #include <C2ParamInternal.h>
 #include <C2PlatformSupport.h>
@@ -759,17 +760,14 @@
 // Note: File descriptors are not duplicated. The original file descriptor must
 // not be closed before the transaction is complete.
 bool objcpy(hidl_handle* d, const C2Fence& s) {
-    (void)s; // TODO: implement s.fd()
-    int fenceFd = -1;
     d->setTo(nullptr);
-    if (fenceFd >= 0) {
-        native_handle_t *handle = native_handle_create(1, 0);
-        if (!handle) {
-            LOG(ERROR) << "Failed to create a native handle.";
-            return false;
-        }
-        handle->data[0] = fenceFd;
+    native_handle_t* handle = _C2FenceFactory::CreateNativeHandle(s);
+    if (handle) {
         d->setTo(handle, true /* owns */);
+//  } else if (!s.ready()) {
+//      // TODO: we should wait for unmarshallable fences but this may not be
+//      // the best place for it. We can safely ignore here as at this time
+//      // all fences used here are marshallable.
     }
     return true;
 }
@@ -1184,9 +1182,8 @@
 // Note: File descriptors are not duplicated. The original file descriptor must
 // not be closed before the transaction is complete.
 bool objcpy(C2Fence* d, const hidl_handle& s) {
-    // TODO: Implement.
-    (void)s;
-    *d = C2Fence();
+    const native_handle_t* handle = s.getNativeHandle();
+    *d = _C2FenceFactory::CreateFromNativeHandle(handle);
     return true;
 }
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 19bb206..93b79c3 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1435,7 +1435,7 @@
                 int64_t blockUsage =
                     usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
                 std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
-                        width, height, pixelFormat, blockUsage, {comp->getName()});
+                        width, height, componentColorFormat, blockUsage, {comp->getName()});
                 sp<GraphicBlockBuffer> buffer;
                 if (block) {
                     buffer = GraphicBlockBuffer::Allocate(
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 159e885..cb2ffa0 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -880,6 +880,19 @@
         return UNKNOWN_ERROR;
     }
     const C2ConstGraphicBlock &block = blocks.front();
+    C2Fence c2fence = block.fence();
+    sp<Fence> fence = Fence::NO_FENCE;
+    // TODO: it's not sufficient to just check isHW() and then construct android::fence from it.
+    // Once C2Fence::type() is added, check the exact C2Fence type
+    if (c2fence.isHW()) {
+        int fenceFd = c2fence.fd();
+        fence = sp<Fence>::make(fenceFd);
+        if (!fence) {
+            ALOGE("[%s] Failed to allocate a fence", mName);
+            close(fenceFd);
+            return NO_MEMORY;
+        }
+    }
 
     // TODO: revisit this after C2Fence implementation.
     android::IGraphicBufferProducer::QueueBufferInput qbi(
@@ -892,7 +905,7 @@
                  blocks.front().crop().bottom()),
             videoScalingMode,
             transform,
-            Fence::NO_FENCE, 0);
+            fence, 0);
     if (hdrStaticInfo || hdrDynamicInfo) {
         HdrMetadata hdr;
         if (hdrStaticInfo) {
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 87fa917..316c70d 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -513,6 +513,9 @@
     add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
         .limitTo((D::VIDEO | D::IMAGE) & D::RAW));
 
+    add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+        .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
     add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
                      C2_PARAMKEY_SECURE_MODE, "value"));
 
@@ -1624,6 +1627,27 @@
                 params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
             }
         }
+
+        // add HDR format for video encoding
+        if (configDomain == IS_CONFIG) {
+            // don't assume here that transfer is set for HDR, only require it for HLG
+            int transfer = 0;
+            params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+            int profile;
+            if (params->findInt32(KEY_PROFILE, &profile)) {
+                std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+                    C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+                C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+                if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+                    if (c2 == C2Config::hdr_format_t::HLG &&
+                        transfer != COLOR_TRANSFER_HLG) {
+                        c2 = C2Config::hdr_format_t::UNKNOWN;
+                    }
+                    params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+                }
+            }
+        }
     }
 
     {   // reflect temporal layering into a binary blob
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 93f29ca..89c9a07 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -276,6 +276,13 @@
     { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+    { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+    { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+    { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
 ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
     { C2Config::LEVEL_MP2V_LOW,         MPEG2LevelLL },
     { C2Config::LEVEL_MP2V_MAIN,        MPEG2LevelML },
@@ -365,6 +372,17 @@
     { C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+    { C2Config::hdr_format_t::SDR, VP9Profile0 },
+    { C2Config::hdr_format_t::SDR, VP9Profile1 },
+    { C2Config::hdr_format_t::HLG, VP9Profile2 },
+    { C2Config::hdr_format_t::HLG, VP9Profile3 },
+    { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+    { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+    { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+    { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
 ALookup<C2Config::level_t, int32_t> sAv1Levels = {
     { C2Config::LEVEL_AV1_2,    AV1Level2  },
     { C2Config::LEVEL_AV1_2_1,  AV1Level21 },
@@ -411,6 +429,13 @@
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+    { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+    { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+    { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
 // HAL_PIXEL_FORMAT_* -> COLOR_Format*
 ALookup<uint32_t, int32_t> sPixelFormats = {
     { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -487,6 +512,10 @@
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
         return sAacProfiles.map(from, to);
     }
+    // AAC does not have HDR format
+    virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+        return false;
+    }
 };
 
 struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -517,6 +546,12 @@
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
         return sDolbyVisionProfiles.map(from, to);
     }
+    // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+    // HDR format
+    virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+        *to = C2Config::hdr_format_t::UNKNOWN;
+        return true;
+    }
 };
 
 struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -555,6 +590,9 @@
                      mIsHdr ? sHevcHdrProfiles.map(from, to) :
                               sHevcProfiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sHevcHdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -633,6 +671,9 @@
                      mIsHdr ? sVp9HdrProfiles.map(from, to) :
                               sVp9Profiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sVp9HdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -662,6 +703,9 @@
                           mIsHdr ? sAv1HdrProfiles.map(from, to) :
                                    sAv1Profiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sAv1HdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -671,6 +715,13 @@
 
 } // namespace
 
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+    // by default map all (including vendor) profiles to SDR
+    *to = C2Config::hdr_format_t::SDR;
+    return true;
+}
+
 // static
 std::shared_ptr<C2Mapper::ProfileLevelMapper>
 C2Mapper::GetProfileLevelMapper(std::string mediaType) {
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 33d305e..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
             virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
             virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
             virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+            /**
+             * Mapper method that maps a MediaCodec profile to the supported
+             * HDR format for that profile. Since 10-bit profiles are used for
+             * HLG, this method will return HLG for all 10-bit profiles, but
+             * the caller should also verify that the transfer function is
+             * indeed HLG.
+             */
+            // not an abstract method as we have a default implementation for SDR
+            virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
             virtual ~ProfileLevelMapper() = default;
         };
 
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 9c5183e..6f98d11 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -16,13 +16,24 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2FenceFactory"
+#include <cutils/native_handle.h>
 #include <utils/Log.h>
+#include <ui/Fence.h>
 
 #include <C2FenceFactory.h>
 #include <C2SurfaceSyncObj.h>
 
+#define MAX_FENCE_FDS 1
+
 class C2Fence::Impl {
 public:
+    enum type_t : uint32_t {
+        INVALID_FENCE,
+        NULL_FENCE,
+        SURFACE_FENCE,
+        SYNC_FENCE,
+    };
+
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
 
     virtual bool valid() const = 0;
@@ -33,9 +44,26 @@
 
     virtual bool isHW() const = 0;
 
+    virtual type_t type() const = 0;
+
+    /**
+     * Create a native handle for the fence so it can be marshalled.
+     * The native handle must store fence type in the first integer.
+     *
+     * \return a valid native handle if the fence can be marshalled, otherwise return null.
+     */
+    virtual native_handle_t *createNativeHandle() const = 0;
+
     virtual ~Impl() = default;
 
     Impl() = default;
+
+    static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
+        if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
+            return static_cast<type_t>(nh->data[nh->numFds]);
+        }
+        return INVALID_FENCE;
+    }
 };
 
 c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
@@ -115,6 +143,15 @@
         return false;
     }
 
+    virtual type_t type() const {
+        return SURFACE_FENCE;
+    }
+
+    virtual native_handle_t *createNativeHandle() const {
+        ALOG_ASSERT(false, "Cannot create native handle from surface fence");
+        return nullptr;
+    }
+
     virtual ~SurfaceFenceImpl() {};
 
     SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
@@ -143,3 +180,119 @@
     }
     return C2Fence();
 }
+
+using namespace android;
+
+class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+        c2_nsecs_t timeoutMs = timeoutNs / 1000;
+        if (timeoutMs > INT_MAX) {
+            timeoutMs = INT_MAX;
+        }
+
+        switch (mFence->wait((int)timeoutMs)) {
+            case NO_ERROR:
+                return C2_OK;
+            case -ETIME:
+                return C2_TIMED_OUT;
+            default:
+                return C2_CORRUPTED;
+        }
+    }
+
+    virtual bool valid() const {
+        return mFence->getStatus() != Fence::Status::Invalid;
+    }
+
+    virtual bool ready() const {
+        return mFence->getStatus() == Fence::Status::Signaled;
+    }
+
+    virtual int fd() const {
+        return mFence->dup();
+    }
+
+    virtual bool isHW() const {
+        return true;
+    }
+
+    virtual type_t type() const {
+        return SYNC_FENCE;
+    }
+
+    virtual native_handle_t *createNativeHandle() const {
+        native_handle_t* nh = native_handle_create(1, 1);
+        if (!nh) {
+            ALOGE("Failed to allocate native handle for sync fence");
+            return nullptr;
+        }
+        nh->data[0] = fd();
+        nh->data[1] = type();
+        return nh;
+    }
+
+    virtual ~SyncFenceImpl() {};
+
+    SyncFenceImpl(int fenceFd) :
+            mFence(sp<Fence>::make(fenceFd)) {}
+
+    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
+        if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+            ALOGE("Invalid handle for sync fence");
+            return nullptr;
+        }
+        int fd = dup(nh->data[0]);
+        std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl");
+            close(fd);
+        }
+        return p;
+    }
+
+private:
+    const sp<Fence> mFence;
+};
+
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+    std::shared_ptr<C2Fence::Impl> p;
+    if (fenceFd >= 0) {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl");
+            close(fenceFd);
+        }
+        if (!p->valid()) {
+            p.reset();
+        }
+    } else {
+        ALOGE("Create sync fence from invalid fd");
+    }
+    return C2Fence(p);
+}
+
+native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
+    return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
+}
+
+C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+    if (!handle) {
+        return C2Fence();
+    }
+    C2Fence::Impl::type_t type = C2Fence::Impl::GetTypeFromNativeHandle(handle);
+    std::shared_ptr<C2Fence::Impl> p;
+    switch (type) {
+        case C2Fence::Impl::SYNC_FENCE:
+            p = SyncFenceImpl::CreateFromNativeHandle(handle);
+            break;
+        default:
+            ALOG_ASSERT(false, "Unsupported fence type %d", type);
+            break;
+    }
+    if (p && !p->valid()) {
+        p.reset();
+    }
+    return C2Fence(p);
+}
+
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index d4bed26..4944115 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -28,6 +28,7 @@
 struct _C2FenceFactory {
 
     class SurfaceFenceImpl;
+    class SyncFenceImpl;
 
     /*
      * Create C2Fence for BufferQueueBased blockpool.
@@ -38,6 +39,30 @@
     static C2Fence CreateSurfaceFence(
             std::shared_ptr<C2SurfaceSyncMemory> syncMem,
             uint32_t waitId);
+
+    /*
+     * Create C2Fence from a fence file fd.
+     *
+     * \param fenceFd           Fence file descriptor.
+     *                          It will be owned and closed by the returned fence object.
+     */
+    static C2Fence CreateSyncFence(int fenceFd);
+
+    /**
+     * Create a native handle from fence for marshalling
+     *
+     * \return a non-null pointer if the fence can be marshalled, otherwise return nullptr
+     */
+    static native_handle_t* CreateNativeHandle(const C2Fence& fence);
+
+    /*
+     * Create C2Fence from a native handle.
+
+     * \param handle           A native handle representing a fence
+     *                         The fd in the native handle will be duplicated, so the caller will
+     *                         still own the handle and have to close it.
+     */
+    static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
 };
 
 
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
index ce2bc82..5e90588 100644
--- a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -39,9 +39,9 @@
     static const float limneg = -1.;
 
     if (f <= limneg) {
-        return -0x80000000; /* or 0x80000000 */
+        return INT32_MIN;
     } else if (f >= limpos) {
-        return 0x7fffffff;
+        return INT32_MAX;
     }
     f *= scale;
     /* integer conversion is through truncation (though int to float is not).
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 913feb0..66b77eb 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -16,6 +16,9 @@
 
 /*
  * Test FlowGraph
+ *
+ * This file also tests a few different conversion techniques because
+ * sometimes that have caused compiler bugs.
  */
 
 #include <iostream>
@@ -30,6 +33,7 @@
 #include "flowgraph/SinkFloat.h"
 #include "flowgraph/SinkI16.h"
 #include "flowgraph/SinkI24.h"
+#include "flowgraph/SinkI32.h"
 #include "flowgraph/SourceI16.h"
 #include "flowgraph/SourceI24.h"
 
@@ -37,6 +41,22 @@
 
 constexpr int kBytesPerI24Packed = 3;
 
+constexpr int kNumSamples = 8;
+constexpr std::array<float, kNumSamples> kInputFloat = {
+    1.0f, 0.5f, -0.25f, -1.0f,
+    0.0f, 53.9f, -87.2f, -1.02f};
+
+// Corresponding PCM values  as integers.
+constexpr std::array<int16_t, kNumSamples>  kExpectedI16 = {
+    INT16_MAX, 1 << 14, INT16_MIN / 4, INT16_MIN,
+    0, INT16_MAX, INT16_MIN, INT16_MIN};
+
+constexpr std::array<int32_t, kNumSamples>  kExpectedI32 = {
+    INT32_MAX, 1 << 30, INT32_MIN / 4, INT32_MIN,
+    0, INT32_MAX, INT32_MIN, INT32_MIN};
+
+// =================================== FLOAT to I16 ==============
+
 // Simple test that tries to reproduce a Clang compiler bug.
 __attribute__((noinline))
 void local_convert_float_to_int16(const float *input,
@@ -49,18 +69,11 @@
 }
 
 TEST(test_flowgraph, local_convert_float_int16) {
-    static constexpr int kNumSamples = 8;
-    static constexpr std::array<float, kNumSamples> input = {
-        1.0f, 0.5f, -0.25f, -1.0f,
-        0.0f, 53.9f, -87.2f, -1.02f};
-    static constexpr std::array<int16_t, kNumSamples>  expected = {
-        32767, 16384, -8192, -32768,
-        0, 32767, -32768, -32768};
     std::array<int16_t, kNumSamples> output;
 
     // Do it inline, which will probably work even with the buggy compiler.
     // This validates the expected data.
-    const float *in = input.data();
+    const float *in = kInputFloat.data();
     int16_t *out = output.data();
     output.fill(777);
     for (int i = 0; i < kNumSamples; i++) {
@@ -68,38 +81,106 @@
         *out++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
     }
     for (int i = 0; i < kNumSamples; i++) {
-        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
     }
 
     // Convert audio signal using the function.
     output.fill(777);
-    local_convert_float_to_int16(input.data(), output.data(), kNumSamples);
+    local_convert_float_to_int16(kInputFloat.data(), output.data(), kNumSamples);
     for (int i = 0; i < kNumSamples; i++) {
-        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
     }
 }
 
 TEST(test_flowgraph, module_sinki16) {
     static constexpr int kNumSamples = 8;
-    static constexpr std::array<float, kNumSamples> input = {
-        1.0f, 0.5f, -0.25f, -1.0f,
-        0.0f, 53.9f, -87.2f, -1.02f};
-    static constexpr std::array<int16_t, kNumSamples>  expected = {
-        32767, 16384, -8192, -32768,
-        0, 32767, -32768, -32768};
     std::array<int16_t, kNumSamples + 10> output; // larger than input
 
     SourceFloat sourceFloat{1};
     SinkI16 sinkI16{1};
 
-    sourceFloat.setData(input.data(), kNumSamples);
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
     sourceFloat.output.connect(&sinkI16.input);
 
     output.fill(777);
     int32_t numRead = sinkI16.read(output.data(), output.size());
     ASSERT_EQ(kNumSamples, numRead);
     for (int i = 0; i < numRead; i++) {
-        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+// =================================== FLOAT to I32 ==============
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+static int32_t clamp32FromFloat(float f)
+{
+    static const float scale = (float)(1UL << 31);
+    static const float limpos = 1.;
+    static const float limneg = -1.;
+
+    if (f <= limneg) {
+        return INT32_MIN;
+    } else if (f >= limpos) {
+        return INT32_MAX;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+void local_convert_float_to_int32(const float *input,
+                                  int32_t *output,
+                                  int count) {
+    for (int i = 0; i < count; i++) {
+        *output++ = clamp32FromFloat(*input++);
+    }
+}
+
+TEST(test_flowgraph, simple_convert_float_int32) {
+    std::array<int32_t, kNumSamples> output;
+
+    // Do it inline, which will probably work even with a buggy compiler.
+    // This validates the expected data.
+    const float *in = kInputFloat.data();
+    output.fill(777);
+    int32_t *out = output.data();
+    for (int i = 0; i < kNumSamples; i++) {
+        int64_t n = (int64_t) (*in++ * 2147483648.0f);
+        *out++ = (int32_t)std::min((int64_t)INT32_MAX,
+                                   std::max((int64_t)INT32_MIN, n)); // clip
+    }
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+TEST(test_flowgraph, local_convert_float_int32) {
+    std::array<int32_t, kNumSamples> output;
+    // Convert audio signal using the function.
+    output.fill(777);
+    local_convert_float_to_int32(kInputFloat.data(), output.data(), kNumSamples);
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+TEST(test_flowgraph, module_sinki32) {
+    std::array<int32_t, kNumSamples + 10> output; // larger than input
+
+    SourceFloat sourceFloat{1};
+    SinkI32 sinkI32{1};
+
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
+    sourceFloat.output.connect(&sinkI32.input);
+
+    output.fill(777);
+    int32_t numRead = sinkI32.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
     }
 }
 
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 57fc49d..ecb248d 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -26,6 +26,7 @@
 #include <unordered_map>
 
 #include <binder/Parcel.h>
+#include <cutils/multiuser.h>
 #include <cutils/properties.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -343,7 +344,8 @@
         // now.
         // TODO(b/190151205): Either allow the HotwordDetectionService to access MediaMetrics or
         // make this disabling specific to that process.
-        if (uid >= AID_ISOLATED_START && uid <= AID_ISOLATED_END) {
+        uid_t appid = multiuser_get_app_id(uid);
+        if (appid >= AID_ISOLATED_START && appid <= AID_ISOLATED_END) {
             return false;
         }
         break;
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index a71631a..8e19d02 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -611,29 +611,42 @@
                 IPCThreadState::self()->getCallingUid());
         result.append(buffer);
     } else {
-        Mutex::Autolock lock(mLock);
-        for (int i = 0, n = mClients.size(); i < n; ++i) {
-            sp<Client> c = mClients[i].promote();
-            if (c != 0) c->dump(fd, args);
-            clients.add(c);
-        }
-        if (mMediaRecorderClients.size() == 0) {
-                result.append(" No media recorder client\n\n");
-        } else {
+        {
+            // capture clients under lock
+            Mutex::Autolock lock(mLock);
+            for (int i = 0, n = mClients.size(); i < n; ++i) {
+                sp<Client> c = mClients[i].promote();
+                if (c != nullptr) {
+                    clients.add(c);
+                }
+            }
+
             for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) {
                 sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote();
-                if (c != 0) {
-                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
-                            c->mAttributionSource.pid);
-                    result.append(buffer);
-                    write(fd, result.string(), result.size());
-                    result = "\n";
-                    c->dump(fd, args);
+                if (c != nullptr) {
                     mediaRecorderClients.add(c);
                 }
             }
         }
 
+        // dump clients outside of lock
+        for (const sp<Client> &c : clients) {
+            c->dump(fd, args);
+        }
+        if (mediaRecorderClients.size() == 0) {
+            result.append(" No media recorder client\n\n");
+        } else {
+            for (const sp<MediaRecorderClient> &c : mediaRecorderClients) {
+                snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
+                        c->mAttributionSource.pid);
+                result.append(buffer);
+                write(fd, result.string(), result.size());
+                result = "\n";
+                c->dump(fd, args);
+
+            }
+        }
+
         result.append(" Files opened and/or mapped:\n");
         snprintf(buffer, SIZE - 1, "/proc/%d/maps", getpid());
         FILE *f = fopen(buffer, "r");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5a27362..4b35830 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -120,7 +120,9 @@
 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRMetadataFlags = "android.media.mediacodec.hdr-metadata-flags";
+static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
 
 // Min/Max QP before shaping
 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -805,7 +807,9 @@
       mWidth(0),
       mHeight(0),
       mRotationDegrees(0),
-      mHDRMetadataFlags(0),
+      mConfigColorTransfer(-1),
+      mHDRStaticInfo(false),
+      mHDR10PlusInfo(false),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -951,13 +955,71 @@
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
 
-    mediametrics_setInt32(mMetricsHandle, kCodecHDRMetadataFlags, mHDRMetadataFlags);
+    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
+    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
 #endif
 }
 
+void MediaCodec::updateHDRFormatMetric() {
+    int32_t profile = -1;
+    AString mediaType;
+    if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
+            && mOutputFormat->findString("mime", &mediaType)) {
+        hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+        mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+    }
+}
+
+hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
+        const AString &mediaType) {
+    switch (transfer) {
+        case COLOR_TRANSFER_ST2084:
+            if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+                switch (profile) {
+                    case VP9Profile2HDR:
+                        return HDR_FORMAT_HDR10;
+                    case VP9Profile2HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+                switch (profile) {
+                    case AV1ProfileMain10HDR10:
+                        return HDR_FORMAT_HDR10;
+                    case AV1ProfileMain10HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+                switch (profile) {
+                    case HEVCProfileMain10HDR10:
+                        return HDR_FORMAT_HDR10;
+                    case HEVCProfileMain10HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else {
+                return HDR_FORMAT_NONE;
+            }
+        case COLOR_TRANSFER_HLG:
+            if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+                return HDR_FORMAT_HLG;
+            } else {
+                // TODO: DOLBY format
+                return HDR_FORMAT_NONE;
+            }
+        default:
+            return HDR_FORMAT_NONE;
+    }
+}
+
+
 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
     ALOGD("MediaCodec::updateEphemeralMediametrics()");
 
@@ -1647,12 +1709,13 @@
             }
             int32_t colorTransfer = -1;
             if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+                mConfigColorTransfer = colorTransfer;
                 mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
             }
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
                     && ColorUtils::isHDRStaticInfoValid(&info)) {
-                mHDRMetadataFlags |= kFlagHDRStaticInfo;
+                mHDRStaticInfo = true;
             }
         }
 
@@ -3307,6 +3370,8 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
+                    updateHDRFormatMetric();
+
                     // limit to confirming the opt-in behavior to minimize any behavioral change
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
@@ -3388,6 +3453,7 @@
                                 mComponentName.c_str(),
                                 mInputFormat->debugString(4).c_str(),
                                 mOutputFormat->debugString(4).c_str());
+                        updateHDRFormatMetric();
                         CHECK(obj != NULL);
                         response->setObject("input-surface", obj);
                         mHaveInputSurface = true;
@@ -3412,6 +3478,7 @@
                     if (!msg->findInt32("err", &err)) {
                         CHECK(msg->findMessage("input-format", &mInputFormat));
                         CHECK(msg->findMessage("output-format", &mOutputFormat));
+                        updateHDRFormatMetric();
                         mHaveInputSurface = true;
                     } else {
                         response->setInt32("err", err);
@@ -4583,6 +4650,7 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
+    updateHDRFormatMetric();
     mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4609,7 +4677,7 @@
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
                 if (ColorUtils::isHDRStaticInfoValid(&info)) {
-                    mHDRMetadataFlags |= kFlagHDRStaticInfo;
+                    mHDRStaticInfo = true;
                 }
             }
         }
@@ -4619,7 +4687,7 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
-            mHDRMetadataFlags |= kFlagHDR10PlusInfo;
+            mHDR10PlusInfo = true;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f5af50d..a00a3e6 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -25,6 +25,7 @@
 #include <media/hardware/CryptoAPI.h>
 #include <media/MediaCodecInfo.h>
 #include <media/MediaMetrics.h>
+#include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <utils/Vector.h>
@@ -451,11 +452,12 @@
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
-    uint32_t mHDRMetadataFlags; /* bitmask of kFlagHDR* */
-    enum {
-        kFlagHDRStaticInfo = 1 << 0,
-        kFlagHDR10PlusInfo = 1 << 1,
-    };
+    int32_t mConfigColorTransfer;
+    bool mHDRStaticInfo;
+    bool mHDR10PlusInfo;
+    void updateHDRFormatMetric();
+    hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
+            const AString &mediaType);
 
     // initial create parameters
     AString mInitName;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b133263..dec439f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -4265,8 +4265,8 @@
                 if (parent != nullptr) {
                     parent->mRequestBufferSM.onRequestThreadPaused();
                 }
-                mRequestClearing = false;
             }
+            mRequestClearing = false;
             // Stop waiting for now and let thread management happen
             return NULL;
         }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 4a9b259..d2167e3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -1406,9 +1406,13 @@
     nsecs_t expectedPresentT = mLastPresentTime;
     nsecs_t minDiff = INT64_MAX;
     // Derive minimum intervals between presentation times based on minimal
-    // expected duration.
-    size_t minVsyncs = (mMinExpectedDuration + vsyncEventData.frameInterval - 1) /
-            vsyncEventData.frameInterval - 1;
+    // expected duration. The minimum number of Vsyncs is:
+    // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
+    // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
+    // - and so on.
+    int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
+            vsyncEventData.frameInterval;
+    if (minVsyncs < 0) minVsyncs = 0;
     nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval + kTimelineThresholdNs;
     // Find best timestamp in the vsync timeline:
     // - closest to the ideal present time,
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index e322d62..a737ba0 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -390,47 +390,59 @@
     }
     AStatsEvent_writeInt32(event, qpBMaxOri);
 
-    // int32_t configColorStandard = -1;
-    // if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
-    //     metrics_proto.set_config_color_standard(configColorStandard);
-    // }
-    // AStatsEvent_writeInt32(event, configColorStandard);
+    int32_t configColorStandard = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
+        metrics_proto.set_config_color_standard(configColorStandard);
+    }
+    AStatsEvent_writeInt32(event, configColorStandard);
 
-    // int32_t configColorRange = -1;
-    // if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
-    //     metrics_proto.set_config_color_range(configColorRange);
-    // }
-    // AStatsEvent_writeInt32(event, configColorRange);
+    int32_t configColorRange = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
+        metrics_proto.set_config_color_range(configColorRange);
+    }
+    AStatsEvent_writeInt32(event, configColorRange);
 
-    // int32_t configColorTransfer = -1;
-    // if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
-    //     metrics_proto.set_config_color_transfer(configColorTransfer);
-    // }
-    // AStatsEvent_writeInt32(event, configColorTransfer);
+    int32_t configColorTransfer = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
+        metrics_proto.set_config_color_transfer(configColorTransfer);
+    }
+    AStatsEvent_writeInt32(event, configColorTransfer);
 
-    // int32_t parsedColorStandard = -1;
-    // if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
-    //     metrics_proto.set_parsed_color_standard(parsedColorStandard);
-    // }
-    // AStatsEvent_writeInt32(event, parsedColorStandard);
+    int32_t parsedColorStandard = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
+        metrics_proto.set_parsed_color_standard(parsedColorStandard);
+    }
+    AStatsEvent_writeInt32(event, parsedColorStandard);
 
-    // int32_t parsedColorRange = -1;
-    // if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
-    //     metrics_proto.set_parsed_color_range(parsedColorRange);
-    // }
-    // AStatsEvent_writeInt32(event, parsedColorRange);
+    int32_t parsedColorRange = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
+        metrics_proto.set_parsed_color_range(parsedColorRange);
+    }
+    AStatsEvent_writeInt32(event, parsedColorRange);
 
-    // int32_t parsedColorTransfer = -1;
-    // if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
-    //     metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
-    // }
-    // AStatsEvent_writeInt32(event, parsedColorTransfer);
+    int32_t parsedColorTransfer = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
+        metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
+    }
+    AStatsEvent_writeInt32(event, parsedColorTransfer);
 
-    // int32_t hdrMetadataFlags = -1;
-    // if (item->getInt32("android.media.mediacodec.hdr-metadata-flags", &hdrMetadataFlags)) {
-    //     metrics_proto.set_hdr_metadata_flags(hdrMetadataFlags);
-    // }
-    // AStatsEvent_writeInt32(event, hdrMetadataFlags);
+    int32_t hdrStaticInfo = -1;
+    if (item->getInt32("android.media.mediacodec.hdr-static-info", &hdrStaticInfo)) {
+        metrics_proto.set_hdr_static_info(hdrStaticInfo);
+    }
+    AStatsEvent_writeInt32(event, hdrStaticInfo);
+
+    int32_t hdr10PlusInfo = -1;
+    if (item->getInt32("android.media.mediacodec.hdr10-plus-info", &hdr10PlusInfo)) {
+        metrics_proto.set_hdr10_plus_info(hdr10PlusInfo);
+    }
+    AStatsEvent_writeInt32(event, hdr10PlusInfo);
+
+    int32_t hdrFormat= -1;
+    if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
+        metrics_proto.set_hdr_format(hdrFormat);
+    }
+    AStatsEvent_writeInt32(event, hdrFormat);
 
     int err = AStatsEvent_write(event);
     if (err < 0) {