Merge "camera_service_fuzzer: Bug Fix"
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 22bb234..094a3c1 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -47,7 +47,6 @@
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
-        "android.hardware.camera.device@3.8",
     ],
     compile_multilib: "first",
     cflags: [
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index bda664a..8b9d1de 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -219,6 +219,7 @@
     }
 
     mPlugin.reset();
+    mInitCheck = NO_INIT;
     return OK;
 }
 
@@ -372,6 +373,10 @@
 
     Mutex::Autolock autoLock(mLock);
 
+    if (mInitCheck != OK) {
+        return -1;
+    }
+
     int32_t seqNum = mHeapSeqNum++;
     uint32_t bufferId = static_cast<uint32_t>(seqNum);
     mHeapSizes.add(seqNum, heap->size());
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
index a290704..55364b5 100644
--- a/drm/libmediadrm/CryptoHalHidl.cpp
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -190,6 +190,7 @@
 
     mPlugin.clear();
     mPluginV1_2.clear();
+    mInitCheck = NO_INIT;
     return OK;
 }
 
@@ -221,6 +222,10 @@
 
     Mutex::Autolock autoLock(mLock);
 
+    if (mInitCheck != OK) {
+        return -1;
+    }
+
     int32_t seqNum = mHeapSeqNum++;
     uint32_t bufferId = static_cast<uint32_t>(seqNum);
     mHeapSizes.add(seqNum, heap->size());
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 3b7a314..9a2aa0d 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,24 +1,7 @@
 // for frameworks/av/media
 {
-    "presubmit-large": [
-        // runs whenever we change something in this tree
-        {
-            "name": "CtsMediaCodecTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
-                }
-            ]
-        },
-        {
-            "name": "CtsMediaCodecTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
-                }
-            ]
-        }
-    ],
+    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
+    // presubmit-large once issues in cuttlefish are fixed
     "presubmit": [
         {
             "name": "GtsMediaTestCases",
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 57cdcd0..4e4a9a1 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -221,6 +221,12 @@
                 .withFields({C2F(mDrcOutputLoudness, value).inRange(-57.75, 0.25)})
                 .withSetter(Setter<decltype(*mDrcOutputLoudness)>::StrictValueWithNoDeps)
                 .build());
+
+        addParameter(DefineParam(mChannelMask, C2_PARAMKEY_CHANNEL_MASK)
+                .withDefault(new C2StreamChannelMaskInfo::output(0u, 0))
+                .withFields({C2F(mChannelMask, value).inRange(0, 4294967292)})
+                .withSetter(Setter<decltype(*mChannelMask)>::StrictValueWithNoDeps)
+                .build());
     }
 
     bool isAdts() const { return mAacFormat->value == C2Config::AAC_PACKAGING_ADTS; }
@@ -255,6 +261,7 @@
     std::shared_ptr<C2StreamDrcAlbumModeTuning::input> mDrcAlbumMode;
     std::shared_ptr<C2StreamMaxChannelCountInfo::input> mMaxChannelCount;
     std::shared_ptr<C2StreamDrcOutputLoudnessTuning::output> mDrcOutputLoudness;
+    std::shared_ptr<C2StreamChannelMaskInfo::output> mChannelMask;
     // TODO Add : C2StreamAacSbrModeTuning
 };
 
@@ -829,9 +836,11 @@
 
                 C2StreamSampleRateInfo::output sampleRateInfo(0u, mStreamInfo->sampleRate);
                 C2StreamChannelCountInfo::output channelCountInfo(0u, mStreamInfo->numChannels);
+                C2StreamChannelMaskInfo::output channelMaskInfo(0u,
+                        maskFromCount(mStreamInfo->numChannels));
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
                 c2_status_t err = mIntf->config(
-                        { &sampleRateInfo, &channelCountInfo },
+                        { &sampleRateInfo, &channelCountInfo, &channelMaskInfo },
                         C2_MAY_BLOCK,
                         &failures);
                 if (err == OK) {
@@ -840,6 +849,7 @@
                     C2FrameData &output = work->worklets.front()->output;
                     output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
                     output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+                    output.configUpdate.push_back(C2Param::Copy(channelMaskInfo));
                 } else {
                     ALOGE("Config Update failed");
                     mSignalledError = true;
@@ -1056,6 +1066,47 @@
     }
 }
 
+// definitions based on android.media.AudioFormat.CHANNEL_OUT_*
+#define CHANNEL_OUT_FL  0x4
+#define CHANNEL_OUT_FR  0x8
+#define CHANNEL_OUT_FC  0x10
+#define CHANNEL_OUT_LFE 0x20
+#define CHANNEL_OUT_BL  0x40
+#define CHANNEL_OUT_BR  0x80
+#define CHANNEL_OUT_SL  0x800
+#define CHANNEL_OUT_SR  0x1000
+
+uint32_t C2SoftAacDec::maskFromCount(uint32_t channelCount) {
+    // KEY_CHANNEL_MASK expects masks formatted according to Java android.media.AudioFormat
+    // where the two left-most bits are 0 for output channel mask
+    switch (channelCount) {
+        case 1: // mono is front left
+            return (CHANNEL_OUT_FL);
+        case 2: // stereo
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FR);
+        case 4: // 4.0 = stereo with backs
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR);
+        case 5: // 5.0
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR);
+        case 6: // 5.1 = 5.0 + LFE
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR
+                    | CHANNEL_OUT_LFE);
+        case 7: // 7.0 = 5.0 + Sides
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR
+                    | CHANNEL_OUT_SL | CHANNEL_OUT_SR);
+        case 8: // 7.1 = 7.0 + LFE
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR | CHANNEL_OUT_SL | CHANNEL_OUT_SR
+                    | CHANNEL_OUT_LFE);
+        default:
+            return 0;
+    }
+}
+
 class C2SoftAacDecFactory : public C2ComponentFactory {
 public:
     C2SoftAacDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index a03fc70..b45f148 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -101,6 +101,7 @@
     int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
     int32_t outputDelayRingBufferSamplesAvailable();
     int32_t outputDelayRingBufferSpaceLeft();
+    uint32_t maskFromCount(uint32_t channelCount);
 
     C2_DO_NOT_COPY(C2SoftAacDec);
 };
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index d65488e..96b81d7 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -536,9 +536,10 @@
 
     std::shared_ptr<C2GraphicBlock> block;
     uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects;
     if (img->fmt == AOM_IMG_FMT_I42016) {
         IntfImpl::Lock lock = mIntf->lock();
-        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+        defaultColorAspects = mIntf->getDefaultColorAspects_l();
 
         if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
             defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -587,7 +588,8 @@
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
             convertYUV420Planar16ToY410OrRGBA1010102(
                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
-                    srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
+                    srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(defaultColorAspects));
         } else {
             convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
                                         srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 8c7f8db..664647a 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -37,6 +37,11 @@
         "libsfplugin_ccodec_utils",
     ],
 
+    header_libs: [
+        "libarect_headers",
+        "libnativewindow_headers",
+    ],
+
     shared_libs: [
         "libcutils", // for properties
         "liblog", // for ALOG
@@ -77,6 +82,11 @@
         "libsfplugin_ccodec_utils",
     ],
 
+    header_libs: [
+        "libarect_headers",
+        "libnativewindow_headers",
+    ],
+
     shared_libs: [
         "libcodec2_soft_common",
         "libcutils", // for properties
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 678c269..9d4f049 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "SimpleC2Component"
 #include <log/log.h>
 
+#include <android/hardware_buffer.h>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/AMessage.h>
 
@@ -26,19 +27,14 @@
 #include <C2Config.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
 #include <SimpleC2Component.h>
 
 namespace android {
 constexpr uint8_t kNeutralUVBitDepth8 = 128;
 constexpr uint16_t kNeutralUVBitDepth10 = 512;
 
-bool isAtLeastT() {
-    char deviceCodeName[PROP_VALUE_MAX];
-    __system_property_get("ro.build.version.codename", deviceCodeName);
-    return android_get_device_api_level() >= __ANDROID_API_T__ ||
-           !strcmp(deviceCodeName, "Tiramisu");
-}
-
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -137,11 +133,126 @@
         dst += dstStride * 2;
     }
 }
+
+namespace {
+
+static C2ColorAspectsStruct FillMissingColorAspects(
+        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+        int32_t width, int32_t height) {
+    C2ColorAspectsStruct _aspects;
+    if (aspects) {
+        _aspects = *aspects;
+    }
+
+    // use matrix for conversion
+    if (_aspects.matrix == C2Color::MATRIX_UNSPECIFIED) {
+        // if not specified, deduce matrix from primaries
+        if (_aspects.primaries == C2Color::PRIMARIES_UNSPECIFIED) {
+            // if those are also not specified, deduce primaries first from transfer, then from
+            // width and height
+            if (_aspects.transfer == C2Color::TRANSFER_ST2084
+                    || _aspects.transfer == C2Color::TRANSFER_HLG) {
+                _aspects.primaries = C2Color::PRIMARIES_BT2020;
+            } else if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
+                // TODO: stagefright defaults to BT.2020 for UHD, but perhaps we should default to
+                // BT.709 for non-HDR 10-bit UHD content
+                // (see media/libstagefright/foundation/ColorUtils.cpp)
+                _aspects.primaries = C2Color::PRIMARIES_BT2020;
+            } else if ((width <= 720 && height <= 576)
+                    || (height <= 720 && width <= 576)) {
+                // note: it does not actually matter whether to use 525 or 625 here as the
+                // conversion is the same
+                _aspects.primaries = C2Color::PRIMARIES_BT601_625;
+            } else {
+                _aspects.primaries = C2Color::PRIMARIES_BT709;
+            }
+        }
+
+        switch (_aspects.primaries) {
+        case C2Color::PRIMARIES_BT601_525:
+        case C2Color::PRIMARIES_BT601_625:
+            _aspects.matrix = C2Color::MATRIX_BT601;
+            break;
+
+        case C2Color::PRIMARIES_BT709:
+            _aspects.matrix = C2Color::MATRIX_BT709;
+            break;
+
+        case C2Color::PRIMARIES_BT2020:
+        default:
+            _aspects.matrix = C2Color::MATRIX_BT2020;
+        }
+    }
+
+    return _aspects;
+}
+
+// matrix conversion coefficients
+// (see media/libstagefright/colorconverter/ColorConverter.cpp for more details)
+struct Coeffs {
+    int32_t _y, _b_u, _g_u, _g_v, _r_v, _c16;
+};
+
+static const struct Coeffs GetCoeffsForAspects(const C2ColorAspectsStruct &aspects) {
+    bool isFullRange = aspects.range == C2Color::RANGE_FULL;
+
+    switch (aspects.matrix) {
+    case C2Color::MATRIX_BT601:
+        /**
+         * BT.601:  K_R = 0.299;  K_B = 0.114
+         */
+        if (isFullRange) {
+            return Coeffs { 1024, 1436, 352, 731, 1815, 0 };
+        } else {
+            return Coeffs { 1196, 1639, 402, 835, 2072, 64 };
+        }
+        break;
+
+    case C2Color::MATRIX_BT709:
+        /**
+         * BT.709:  K_R = 0.2126;  K_B = 0.0722
+         */
+        if (isFullRange) {
+            return Coeffs { 1024, 1613, 192, 479, 1900, 0 };
+        } else {
+            return Coeffs { 1196, 1841, 219, 547, 2169, 64 };
+        }
+        break;
+
+    case C2Color::MATRIX_BT2020:
+    default:
+        /**
+         * BT.2020:  K_R = 0.2627;  K_B = 0.0593
+         */
+        if (isFullRange) {
+            return Coeffs { 1024, 1510, 169, 585, 1927, 0 };
+        } else {
+            return Coeffs { 1196, 1724, 192, 668, 2200, 64 };
+        }
+    }
+}
+
+}
+
 #define CLIP3(min, v, max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
-void convertYUV420Planar16ToRGBA1010102(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
-                                        const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
-                                        size_t srcVStride, size_t dstStride, size_t width,
-                                        size_t height) {
+void convertYUV420Planar16ToRGBA1010102(
+        uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+        const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+        size_t srcVStride, size_t dstStride, size_t width,
+        size_t height,
+        std::shared_ptr<const C2ColorAspectsStruct> aspects) {
+
+    C2ColorAspectsStruct _aspects = FillMissingColorAspects(aspects, width, height);
+
+    struct Coeffs coeffs = GetCoeffsForAspects(_aspects);
+
+    int32_t _y = coeffs._y;
+    int32_t _b_u = coeffs._b_u;
+    int32_t _neg_g_u = -coeffs._g_u;
+    int32_t _neg_g_v = -coeffs._g_v;
+    int32_t _r_v = coeffs._r_v;
+    int32_t _c16 = coeffs._c16;
+
     // Converting two lines at a time, slightly faster
     for (size_t y = 0; y < height; y += 2) {
         uint32_t *dstTop = (uint32_t *)dst;
@@ -151,25 +262,6 @@
         uint16_t *uSrc = (uint16_t *)srcU;
         uint16_t *vSrc = (uint16_t *)srcV;
 
-        // BT.2020 Limited Range conversion
-
-        // B = 1.168  *(Y - 64) + 2.148  *(U - 512)
-        // G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
-        // R = 1.168  *(Y - 64) + 1.683  *(V - 512)
-
-        // B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
-        // G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
-        // R = .................... + 1723/1024  *(V - 512)
-
-        // min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
-        // min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
-        // min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
-
-        // max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
-        // max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
-        // max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
-
-        int32_t mY = 1196, mU_B = 2200, mV_G = -668, mV_R = 1723, mU_G = -192;
         for (size_t x = 0; x < width; x += 2) {
             int32_t u, v, y00, y01, y10, y11;
             u = *uSrc - 512;
@@ -177,22 +269,22 @@
             v = *vSrc - 512;
             vSrc += 1;
 
-            y00 = *ySrcTop - 64;
+            y00 = *ySrcTop - _c16;
             ySrcTop += 1;
-            y01 = *ySrcTop - 64;
+            y01 = *ySrcTop - _c16;
             ySrcTop += 1;
-            y10 = *ySrcBot - 64;
+            y10 = *ySrcBot - _c16;
             ySrcBot += 1;
-            y11 = *ySrcBot - 64;
+            y11 = *ySrcBot - _c16;
             ySrcBot += 1;
 
-            int32_t u_b = u * mU_B;
-            int32_t u_g = u * mU_G;
-            int32_t v_g = v * mV_G;
-            int32_t v_r = v * mV_R;
+            int32_t u_b = u * _b_u;
+            int32_t u_g = u * _neg_g_u;
+            int32_t v_g = v * _neg_g_v;
+            int32_t v_r = v * _r_v;
 
             int32_t yMult, b, g, r;
-            yMult = y00 * mY;
+            yMult = y00 * _y + 512;
             b = (yMult + u_b) / 1024;
             g = (yMult + v_g + u_g) / 1024;
             r = (yMult + v_r) / 1024;
@@ -201,7 +293,7 @@
             r = CLIP3(0, r, 1023);
             *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
 
-            yMult = y01 * mY;
+            yMult = y01 * _y + 512;
             b = (yMult + u_b) / 1024;
             g = (yMult + v_g + u_g) / 1024;
             r = (yMult + v_r) / 1024;
@@ -210,7 +302,7 @@
             r = CLIP3(0, r, 1023);
             *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
 
-            yMult = y10 * mY;
+            yMult = y10 * _y + 512;
             b = (yMult + u_b) / 1024;
             g = (yMult + v_g + u_g) / 1024;
             r = (yMult + v_r) / 1024;
@@ -219,7 +311,7 @@
             r = CLIP3(0, r, 1023);
             *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
 
-            yMult = y11 * mY;
+            yMult = y11 * _y + 512;
             b = (yMult + u_b) / 1024;
             g = (yMult + v_g + u_g) / 1024;
             r = (yMult + v_r) / 1024;
@@ -236,19 +328,21 @@
     }
 }
 
-void convertYUV420Planar16ToY410OrRGBA1010102(uint32_t *dst, const uint16_t *srcY,
-                                              const uint16_t *srcU, const uint16_t *srcV,
-                                              size_t srcYStride, size_t srcUStride,
-                                              size_t srcVStride, size_t dstStride, size_t width,
-                                              size_t height) {
+void convertYUV420Planar16ToY410OrRGBA1010102(
+        uint32_t *dst, const uint16_t *srcY,
+        const uint16_t *srcU, const uint16_t *srcV,
+        size_t srcYStride, size_t srcUStride,
+        size_t srcVStride, size_t dstStride, size_t width, size_t height,
+        std::shared_ptr<const C2ColorAspectsStruct> aspects) {
     if (isAtLeastT()) {
         convertYUV420Planar16ToRGBA1010102(dst, srcY, srcU, srcV, srcYStride, srcUStride,
-                                           srcVStride, dstStride, width, height);
+                                           srcVStride, dstStride, width, height, aspects);
     } else {
         convertYUV420Planar16ToY410(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
                                     dstStride, width, height);
     }
 }
+
 void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
                                  const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
                                  size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -885,25 +979,14 @@
     // Save supported hal pixel formats for bit depth of 10, the first time this is called
     if (!mBitDepth10HalPixelFormats.size()) {
         std::vector<int> halPixelFormats;
-        if (isAtLeastT()) {
-            halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
-        }
+        halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+
         // since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
         // is populated only once, allowRGBA1010102 is not considered at this stage.
         halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
 
         for (int halPixelFormat : halPixelFormats) {
-            std::shared_ptr<C2GraphicBlock> block;
-
-            uint32_t gpuConsumerFlags = halPixelFormat == HAL_PIXEL_FORMAT_RGBA_1010102
-                                                ? C2AndroidMemoryUsage::HW_TEXTURE_READ
-                                                : 0;
-            C2MemoryUsage usage = {C2MemoryUsage::CPU_READ | gpuConsumerFlags,
-                                   C2MemoryUsage::CPU_WRITE};
-            // TODO(b/214411172) Use AHardwareBuffer_isSupported once it supports P010
-            c2_status_t status =
-                    mOutputBlockPool->fetchGraphicBlock(320, 240, halPixelFormat, usage, &block);
-            if (status == C2_OK) {
+            if (isHalPixelFormatSupported((AHardwareBuffer_Format)halPixelFormat)) {
                 mBitDepth10HalPixelFormats.push_back(halPixelFormat);
             }
         }
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 3172f29..7600c5b 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -26,28 +26,35 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/Mutexed.h>
 
+struct C2ColorAspectsStruct;
+
 namespace android {
-bool isAtLeastT();
+
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
                                 size_t dstUVStride, uint32_t width, uint32_t height,
                                 bool isMonochrome = false);
-void convertYUV420Planar16ToY410OrRGBA1010102(uint32_t *dst, const uint16_t *srcY,
-                                              const uint16_t *srcU, const uint16_t *srcV,
-                                              size_t srcYStride, size_t srcUStride,
-                                              size_t srcVStride, size_t dstStride, size_t width,
-                                              size_t height);
+
+void convertYUV420Planar16ToY410OrRGBA1010102(
+        uint32_t *dst, const uint16_t *srcY,
+        const uint16_t *srcU, const uint16_t *srcV,
+        size_t srcYStride, size_t srcUStride,
+        size_t srcVStride, size_t dstStride, size_t width, size_t height,
+        std::shared_ptr<const C2ColorAspectsStruct> aspects = nullptr);
+
 void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
                                  const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
                                  size_t srcUStride, size_t srcVStride, size_t dstYStride,
                                  size_t dstUVStride, size_t width, size_t height,
                                  bool isMonochrome = false);
+
 void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
                                  const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
                                  size_t srcUStride, size_t srcVStride, size_t dstYStride,
                                  size_t dstUVStride, size_t width, size_t height,
                                  bool isMonochrome = false);
+
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
 public:
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index a22c750..4dec57f 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -21,6 +21,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
 #include <Codec2Mapper.h>
 #include <SimpleC2Interface.h>
 #include <log/log.h>
@@ -191,9 +192,14 @@
               .build());
 
     std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
-    if (isAtLeastT()) {
+    if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
         pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
     }
+    // If color format surface isn't added to supported formats, there is no way to know
+    // when the color-format is configured to surface. This is necessary to be able to
+    // choose 10-bit format while decoding 10-bit clips in surface mode.
+    pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
     // TODO: support more formats?
     addParameter(
             DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
@@ -322,6 +328,9 @@
     return C2R::Ok();
   }
 
+  // unsafe getters
+  std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
+
  private:
   std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
   std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -410,6 +419,10 @@
   mSignalledError = false;
   mSignalledOutputEos = false;
   mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+  {
+      IntfImpl::Lock lock = mIntf->lock();
+      mPixelFormatInfo = mIntf->getPixelFormat_l();
+  }
   mCodecCtx.reset(new libgav1::Decoder());
 
   if (mCodecCtx == nullptr) {
@@ -633,10 +646,10 @@
 
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
-  if (buffer->bitdepth == 10) {
+  std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
-    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects =
-        mIntf->getColorAspects_l();
+    codedColorAspects = mIntf->getColorAspects_l();
     bool allowRGBA1010102 = false;
     if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
         codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -714,9 +727,11 @@
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-        convertYUV420Planar16ToY410OrRGBA1010102((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
-                                                 srcUStride / 2, srcVStride / 2,
-                                                 dstYStride / sizeof(uint32_t), mWidth, mHeight);
+        convertYUV420Planar16ToY410OrRGBA1010102(
+                (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
+                srcUStride / 2, srcVStride / 2,
+                dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
     } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
         convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
                                     srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 4b13fef..3d4db55 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -51,6 +51,10 @@
   std::shared_ptr<IntfImpl> mIntf;
   std::unique_ptr<libgav1::Decoder> mCodecCtx;
 
+  // configurations used by component in process
+  // (TODO: keep this in intf but make them internal only)
+  std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
   uint32_t mHalPixelFormat;
   uint32_t mWidth;
   uint32_t mHeight;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 5d7ef5c..18cd1bf 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -25,6 +25,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
 #include <SimpleC2Interface.h>
 
 #include "C2SoftVpxDec.h"
@@ -219,9 +220,13 @@
         // TODO: support more formats?
         std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
 #ifdef VP9
-        if (isAtLeastT()) {
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
             pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
         }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
 #endif
         addParameter(
                 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
@@ -306,6 +311,11 @@
         return C2R::Ok();
     }
 
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
 private:
     std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
     std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -433,6 +443,11 @@
     mMode = MODE_VP8;
 #endif
     mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
     mWidth = 320;
     mHeight = 240;
     mFrameParallelMode = false;
@@ -687,9 +702,11 @@
 
     std::shared_ptr<C2GraphicBlock> block;
     uint32_t format = HAL_PIXEL_FORMAT_YV12;
-    if (img->fmt == VPX_IMG_FMT_I42016) {
+    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects;
+    if (img->fmt == VPX_IMG_FMT_I42016 &&
+            mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
         IntfImpl::Lock lock = mIntf->lock();
-        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+        defaultColorAspects = mIntf->getDefaultColorAspects_l();
         bool allowRGBA1010102 = false;
         if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
             defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -764,11 +781,14 @@
                 queue->entries.push_back(
                         [dstY, srcY, srcU, srcV,
                          srcYStride, srcUStride, srcVStride, dstYStride,
-                         width = mWidth, height = std::min(mHeight - i, kHeight)] {
+                         width = mWidth, height = std::min(mHeight - i, kHeight),
+                         defaultColorAspects] {
                             convertYUV420Planar16ToY410OrRGBA1010102(
                                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
-                                    width, height);
+                                    width, height,
+                                    std::static_pointer_cast<const C2ColorAspectsStruct>(
+                                            defaultColorAspects));
                         });
                 srcY += srcYStride / 2 * kHeight;
                 srcU += srcUStride / 2 * (kHeight / 2);
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 5564766..e9d6dc9 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -63,6 +63,10 @@
         std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
     };
 
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
     bool mFrameParallelMode;  // Frame parallel is only supported by VP9 decoder.
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 07c2864..6ff3dbc 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -60,6 +60,7 @@
     enum drc_effect_type_t : int32_t;       ///< DRC effect type
     enum drc_album_mode_t : int32_t;        ///< DRC album mode
     enum hdr_dynamic_metadata_type_t : uint32_t;  ///< HDR dynamic metadata type
+    enum hdr_format_t : uint32_t;           ///< HDR format
     enum intra_refresh_mode_t : uint32_t;   ///< intra refresh modes
     enum level_t : uint32_t;                ///< coding level
     enum ordinal_key_t : uint32_t;          ///< work ordering keys
@@ -78,6 +79,7 @@
 
 struct C2PlatformConfig {
     enum encoding_quality_level_t : uint32_t; ///< encoding quality level
+    enum tunnel_peek_mode_t: uint32_t;      ///< tunnel peek mode
 };
 
 namespace {
@@ -192,10 +194,9 @@
     kParamIndexPictureType,
     // deprecated
     kParamIndexHdr10PlusMetadata,
-
     kParamIndexPictureQuantization,
-
     kParamIndexHdrDynamicMetadata,
+    kParamIndexHdrFormat,
 
     /* ------------------------------------ video components ------------------------------------ */
 
@@ -280,6 +281,9 @@
 
     // channel mask for decoded audio
     kParamIndexAndroidChannelMask, // uint32
+
+    // allow tunnel peek behavior to be unspecified for app compatibility
+    kParamIndexTunnelPeekMode, // tunnel mode, enum
 };
 
 }
@@ -1667,6 +1671,34 @@
 constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
 constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
 
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+    UNKNOWN,     ///< HDR format not known (default)
+    SDR,         ///< not HDR (SDR)
+    HLG,         ///< HLG
+    HDR10,       ///< HDR10
+    HDR10_PLUS,  ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+                kParamIndexHdrFormat>
+        C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
+
 /* ------------------------------------ block-based coding ----------------------------------- */
 
 /**
@@ -2454,6 +2486,28 @@
         C2StreamTunnelStartRender;
 constexpr char C2_PARAMKEY_TUNNEL_START_RENDER[] = "output.tunnel-start-render";
 
+/** Tunnel Peek Mode. */
+C2ENUM(C2PlatformConfig::tunnel_peek_mode_t, uint32_t,
+    UNSPECIFIED_PEEK = 0,
+    SPECIFIED_PEEK = 1
+);
+
+/**
+ * Tunnel Peek Mode Tuning parameter.
+ *
+ * If set to UNSPECIFIED_PEEK_MODE, the decoder is free to ignore the
+ * C2StreamTunnelHoldRender and C2StreamTunnelStartRender flags and associated
+ * features. Additionally, it becomes up to the decoder to display any frame
+ * before receiving synchronization information.
+ *
+ * Note: This parameter allows a decoder to ignore the video peek machinery and
+ * to revert to its preferred behavior.
+ */
+typedef C2StreamParam<C2Tuning, C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>,
+        kParamIndexTunnelPeekMode> C2StreamTunnelPeekModeTuning;
+constexpr char C2_PARAMKEY_TUNNEL_PEEK_MODE[] =
+        "output.tunnel-peek-mode";
+
 /**
  * Encoding quality level signaling.
  *
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 72f7c43..5c24bd7 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -26,6 +26,7 @@
 #include <C2BlockInternal.h>
 #include <C2Buffer.h>
 #include <C2Component.h>
+#include <C2FenceFactory.h>
 #include <C2Param.h>
 #include <C2ParamInternal.h>
 #include <C2PlatformSupport.h>
@@ -759,17 +760,14 @@
 // Note: File descriptors are not duplicated. The original file descriptor must
 // not be closed before the transaction is complete.
 bool objcpy(hidl_handle* d, const C2Fence& s) {
-    (void)s; // TODO: implement s.fd()
-    int fenceFd = -1;
     d->setTo(nullptr);
-    if (fenceFd >= 0) {
-        native_handle_t *handle = native_handle_create(1, 0);
-        if (!handle) {
-            LOG(ERROR) << "Failed to create a native handle.";
-            return false;
-        }
-        handle->data[0] = fenceFd;
+    native_handle_t* handle = _C2FenceFactory::CreateNativeHandle(s);
+    if (handle) {
         d->setTo(handle, true /* owns */);
+//  } else if (!s.ready()) {
+//      // TODO: we should wait for unmarshallable fences but this may not be
+//      // the best place for it. We can safely ignore here as at this time
+//      // all fences used here are marshallable.
     }
     return true;
 }
@@ -1184,9 +1182,8 @@
 // Note: File descriptors are not duplicated. The original file descriptor must
 // not be closed before the transaction is complete.
 bool objcpy(C2Fence* d, const hidl_handle& s) {
-    // TODO: Implement.
-    (void)s;
-    *d = C2Fence();
+    const native_handle_t* handle = s.getNativeHandle();
+    *d = _C2FenceFactory::CreateFromNativeHandle(handle);
     return true;
 }
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index c36ae94..5a652a3 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -43,6 +43,10 @@
         "media_ndk_headers",
     ],
 
+    static_libs: [
+        "SurfaceFlingerProperties",
+    ],
+
     shared_libs: [
         "android.hardware.cas.native@1.0",
         "android.hardware.drm@1.0",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 19bb206..529ee36 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1435,7 +1435,7 @@
                 int64_t blockUsage =
                     usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
                 std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
-                        width, height, pixelFormat, blockUsage, {comp->getName()});
+                        width, height, componentColorFormat, blockUsage, {comp->getName()});
                 sp<GraphicBlockBuffer> buffer;
                 if (block) {
                     buffer = GraphicBlockBuffer::Allocate(
@@ -1826,13 +1826,20 @@
         return;
     }
 
+    // preparation of input buffers may not succeed due to the lack of
+    // memory; returning correct error code (NO_MEMORY) as an error allows
+    // MediaCodec to try reclaim and restart codec gracefully.
+    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+    err2 = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    if (err2 != OK) {
+        ALOGE("Initial preparation for Input Buffers failed");
+        mCallback->onError(err2, ACTION_CODE_FATAL);
+        return;
+    }
+
     mCallback->onStartCompleted();
 
-    err2 = mChannel->requestInitialInputBuffers();
-    if (err2 != OK) {
-        ALOGE("Initial request for Input Buffers failed");
-        mCallback->onError(err2, ACTION_CODE_FATAL);
-    }
+    mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
 }
 
 void CCodec::initiateShutdown(bool keepComponentAllocated) {
@@ -2126,11 +2133,14 @@
         state->set(RUNNING);
     }
 
-    status_t err = mChannel->requestInitialInputBuffers();
+    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
     if (err != OK) {
         ALOGE("Resume request for Input Buffers failed");
         mCallback->onError(err, ACTION_CODE_FATAL);
+        return;
     }
+    mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
 }
 
 void CCodec::signalSetParameters(const sp<AMessage> &msg) {
@@ -2415,7 +2425,8 @@
                         C2StreamColorAspectsInfo::output::PARAM_TYPE,
                         C2StreamDataSpaceInfo::output::PARAM_TYPE,
                         C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,  // will be deprecated
+                        C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
                         C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
                         C2StreamSurfaceScalingInfo::output::PARAM_TYPE
                     };
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 159e885..f27cc21 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -432,6 +432,10 @@
     for (size_t i = 0; i < numSubSamples; ++i) {
         size += subSamples[i].mNumBytesOfClearData + subSamples[i].mNumBytesOfEncryptedData;
     }
+    if (size == 0) {
+        buffer->setRange(0, 0);
+        return OK;
+    }
     std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
     std::shared_ptr<C2LinearBlock> block;
     c2_status_t err = pool->fetchLinearBlock(
@@ -439,6 +443,8 @@
             secure ? kSecureUsage : kDefaultReadWriteUsage,
             &block);
     if (err != C2_OK) {
+        ALOGI("[%s] attachEncryptedBuffer: fetchLinearBlock failed: size = %zu (%s) err = %d",
+              mName, size, secure ? "secure" : "non-secure", err);
         return NO_MEMORY;
     }
     if (!secure) {
@@ -463,18 +469,9 @@
                 key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
                 dst, &errorDetailMsg);
         if (result < 0) {
+            ALOGI("[%s] attachEncryptedBuffer: decrypt failed: result = %zd", mName, result);
             return result;
         }
-        if (dst.type == DrmBufferType::SHARED_MEMORY) {
-            C2WriteView view = block->map().get();
-            if (view.error() != C2_OK) {
-                return false;
-            }
-            if (view.size() < result) {
-                return false;
-            }
-            memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
-        }
     } else {
         // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
         // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
@@ -523,16 +520,22 @@
         }
 
         if (result < codecDataOffset) {
-            ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+            ALOGD("[%s] invalid codec data offset: %zd, result %zd",
+                  mName, codecDataOffset, result);
             return BAD_VALUE;
         }
     }
     if (!secure) {
         C2WriteView view = block->map().get();
         if (view.error() != C2_OK) {
+            ALOGI("[%s] attachEncryptedBuffer: block map error: %d (non-secure)",
+                  mName, view.error());
             return UNKNOWN_ERROR;
         }
         if (view.size() < result) {
+            ALOGI("[%s] attachEncryptedBuffer: block size too small: size=%u result=%zd "
+                  "(non-secure)",
+                  mName, view.size(), result);
             return UNKNOWN_ERROR;
         }
         memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
@@ -540,6 +543,7 @@
     std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
             block->share(codecDataOffset, result - codecDataOffset, C2Fence{}))};
     if (!buffer->copy(c2Buffer)) {
+        ALOGI("[%s] attachEncryptedBuffer: buffer copy failed", mName);
         return -ENOSYS;
     }
     return OK;
@@ -880,6 +884,19 @@
         return UNKNOWN_ERROR;
     }
     const C2ConstGraphicBlock &block = blocks.front();
+    C2Fence c2fence = block.fence();
+    sp<Fence> fence = Fence::NO_FENCE;
+    // TODO: it's not sufficient to just check isHW() and then construct android::fence from it.
+    // Once C2Fence::type() is added, check the exact C2Fence type
+    if (c2fence.isHW()) {
+        int fenceFd = c2fence.fd();
+        fence = sp<Fence>::make(fenceFd);
+        if (!fence) {
+            ALOGE("[%s] Failed to allocate a fence", mName);
+            close(fenceFd);
+            return NO_MEMORY;
+        }
+    }
 
     // TODO: revisit this after C2Fence implementation.
     android::IGraphicBufferProducer::QueueBufferInput qbi(
@@ -892,7 +909,7 @@
                  blocks.front().crop().bottom()),
             videoScalingMode,
             transform,
-            Fence::NO_FENCE, 0);
+            fence, 0);
     if (hdrStaticInfo || hdrDynamicInfo) {
         HdrMetadata hdr;
         if (hdrStaticInfo) {
@@ -1457,54 +1474,47 @@
     return OK;
 }
 
-status_t CCodecBufferChannel::requestInitialInputBuffers() {
+status_t CCodecBufferChannel::prepareInitialInputBuffers(
+        std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers) {
     if (mInputSurface) {
         return OK;
     }
 
+    size_t numInputSlots = mInput.lock()->numSlots;
+
+    {
+        Mutexed<Input>::Locked input(mInput);
+        while (clientInputBuffers->size() < numInputSlots) {
+            size_t index;
+            sp<MediaCodecBuffer> buffer;
+            if (!input->buffers->requestNewBuffer(&index, &buffer)) {
+                break;
+            }
+            clientInputBuffers->emplace(index, buffer);
+        }
+    }
+    if (clientInputBuffers->empty()) {
+        ALOGW("[%s] start: cannot allocate memory at all", mName);
+        return NO_MEMORY;
+    } else if (clientInputBuffers->size() < numInputSlots) {
+        ALOGD("[%s] start: cannot allocate memory for all slots, "
+              "only %zu buffers allocated",
+              mName, clientInputBuffers->size());
+    } else {
+        ALOGV("[%s] %zu initial input buffers available",
+              mName, clientInputBuffers->size());
+    }
+    return OK;
+}
+
+status_t CCodecBufferChannel::requestInitialInputBuffers(
+        std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers) {
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
     C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
     c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
     if (err != C2_OK && err != C2_BAD_INDEX) {
         return UNKNOWN_ERROR;
     }
-    size_t numInputSlots = mInput.lock()->numSlots;
-
-    struct ClientInputBuffer {
-        size_t index;
-        sp<MediaCodecBuffer> buffer;
-        size_t capacity;
-    };
-    std::list<ClientInputBuffer> clientInputBuffers;
-
-    {
-        Mutexed<Input>::Locked input(mInput);
-        while (clientInputBuffers.size() < numInputSlots) {
-            ClientInputBuffer clientInputBuffer;
-            if (!input->buffers->requestNewBuffer(&clientInputBuffer.index,
-                                                  &clientInputBuffer.buffer)) {
-                break;
-            }
-            clientInputBuffer.capacity = clientInputBuffer.buffer->capacity();
-            clientInputBuffers.emplace_back(std::move(clientInputBuffer));
-        }
-    }
-    if (clientInputBuffers.empty()) {
-        ALOGW("[%s] start: cannot allocate memory at all", mName);
-        return NO_MEMORY;
-    } else if (clientInputBuffers.size() < numInputSlots) {
-        ALOGD("[%s] start: cannot allocate memory for all slots, "
-              "only %zu buffers allocated",
-              mName, clientInputBuffers.size());
-    } else {
-        ALOGV("[%s] %zu initial input buffers available",
-              mName, clientInputBuffers.size());
-    }
-    // Sort input buffers by their capacities in increasing order.
-    clientInputBuffers.sort(
-            [](const ClientInputBuffer& a, const ClientInputBuffer& b) {
-                return a.capacity < b.capacity;
-            });
 
     std::list<std::unique_ptr<C2Work>> flushedConfigs;
     mFlushedConfigs.lock()->swap(flushedConfigs);
@@ -1526,25 +1536,31 @@
         }
     }
     if (oStreamFormat.value == C2BufferData::LINEAR &&
-            (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
-        sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
+            (!prepend || prepend.value == PREPEND_HEADER_TO_NONE) &&
+            !clientInputBuffers.empty()) {
+        size_t minIndex = clientInputBuffers.begin()->first;
+        sp<MediaCodecBuffer> minBuffer = clientInputBuffers.begin()->second;
+        for (const auto &[index, buffer] : clientInputBuffers) {
+            if (minBuffer->capacity() > buffer->capacity()) {
+                minIndex = index;
+                minBuffer = buffer;
+            }
+        }
         // WORKAROUND: Some apps expect CSD available without queueing
         //             any input. Queue an empty buffer to get the CSD.
-        buffer->setRange(0, 0);
-        buffer->meta()->clear();
-        buffer->meta()->setInt64("timeUs", 0);
-        if (queueInputBufferInternal(buffer) != OK) {
+        minBuffer->setRange(0, 0);
+        minBuffer->meta()->clear();
+        minBuffer->meta()->setInt64("timeUs", 0);
+        if (queueInputBufferInternal(minBuffer) != OK) {
             ALOGW("[%s] Error while queueing an empty buffer to get CSD",
                   mName);
             return UNKNOWN_ERROR;
         }
-        clientInputBuffers.pop_front();
+        clientInputBuffers.erase(minIndex);
     }
 
-    for (const ClientInputBuffer& clientInputBuffer: clientInputBuffers) {
-        mCallback->onInputBufferAvailable(
-                clientInputBuffer.index,
-                clientInputBuffer.buffer);
+    for (const auto &[index, buffer] : clientInputBuffers) {
+        mCallback->onInputBufferAvailable(index, buffer);
     }
 
     return OK;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index b3a5f4b..f29a225 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -130,9 +130,23 @@
             bool buffersBoundToCodec);
 
     /**
-     * Request initial input buffers to be filled by client.
+     * Prepare initial input buffers to be filled by client.
+     *
+     * \param clientInputBuffers[out]   pointer to slot index -> buffer map.
+     *                                  On success, it contains prepared
+     *                                  initial input buffers.
      */
-    status_t requestInitialInputBuffers();
+    status_t prepareInitialInputBuffers(
+            std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers);
+
+    /**
+     * Request initial input buffers as prepared in clientInputBuffers.
+     *
+     * \param clientInputBuffers[in]    slot index -> buffer map with prepared
+     *                                  initial input buffers.
+     */
+    status_t requestInitialInputBuffers(
+            std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers);
 
     /**
      * Stop queueing buffers to the component. This object should never queue
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 57c70c1..0f4a8d8 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -208,7 +208,7 @@
 
 bool OutputBuffers::convert(
         const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst) {
-    if (!src || src->data().type() != C2BufferData::LINEAR) {
+    if (src && src->data().type() != C2BufferData::LINEAR) {
         return false;
     }
     int32_t configEncoding = kAudioEncodingPcm16bit;
@@ -237,7 +237,12 @@
     if (!mDataConverter) {
         return false;
     }
-    sp<MediaCodecBuffer> srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+    sp<MediaCodecBuffer> srcBuffer;
+    if (src) {
+        srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+    } else {
+        srcBuffer = new MediaCodecBuffer(mFormat, new ABuffer(0));
+    }
     if (!srcBuffer) {
         return false;
     }
@@ -1259,8 +1264,8 @@
         if (newBuffer == nullptr) {
             return NO_MEMORY;
         }
+        newBuffer->setFormat(mFormat);
     }
-    newBuffer->setFormat(mFormat);
     *index = mImpl.assignSlot(newBuffer);
     handleImageData(newBuffer);
     *clientBuffer = newBuffer;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 3c41949..b7ee2f0 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -400,10 +400,10 @@
     // Rotation
     // Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
     add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
-        .limitTo(D::VIDEO & D::CODED)
+        .limitTo((D::VIDEO | D::IMAGE) & D::CODED)
         .withMappers(negate, negate));
     add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_ROTATION, "value")
-        .limitTo(D::VIDEO & D::RAW)
+        .limitTo((D::VIDEO | D::IMAGE) & D::RAW)
         .withMappers(negate, negate));
 
     // android 'video-scaling'
@@ -513,6 +513,9 @@
     add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
         .limitTo((D::VIDEO | D::IMAGE) & D::RAW));
 
+    add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+        .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
     add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
                      C2_PARAMKEY_SECURE_MODE, "value"));
 
@@ -612,10 +615,30 @@
     add(ConfigMapper("csd-0",           C2_PARAMKEY_INIT_DATA,       "value")
         .limitTo(D::OUTPUT & D::READ));
 
-    add(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
+    deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
         .limitTo(D::VIDEO & D::PARAM & D::INPUT));
 
-    add(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
+    deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
+        .limitTo(D::VIDEO & D::OUTPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".type",
+            C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "type")
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".data",
+            C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "data")
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".type",
+            C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "type")
+        .limitTo(D::VIDEO & D::OUTPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".data",
+            C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "data")
         .limitTo(D::VIDEO & D::OUTPUT));
 
     add(ConfigMapper(C2_PARAMKEY_TEMPORAL_LAYERING, C2_PARAMKEY_TEMPORAL_LAYERING, "")
@@ -962,6 +985,16 @@
             return value == 0 ? C2_FALSE : C2_TRUE;
         }));
 
+    add(ConfigMapper("android._tunnel-peek-set-legacy", C2_PARAMKEY_TUNNEL_PEEK_MODE, "value")
+        .limitTo(D::PARAM & D::VIDEO & D::DECODER)
+        .withMapper([](C2Value v) -> C2Value {
+          int32_t value = 0;
+          (void)v.get(&value);
+          return value == 0
+              ? C2Value(C2PlatformConfig::SPECIFIED_PEEK)
+              : C2Value(C2PlatformConfig::UNSPECIFIED_PEEK);
+        }));
+
     add(ConfigMapper(KEY_VIDEO_QP_AVERAGE, C2_PARAMKEY_AVERAGE_QP, "value")
         .limitTo(D::ENCODER & D::VIDEO & D::READ));
 
@@ -1557,6 +1590,22 @@
             msg->removeEntryAt(msg->findEntryByName("cta861.max-cll"));
             msg->removeEntryAt(msg->findEntryByName("cta861.max-fall"));
         }
+
+        // HDR dynamic info
+        std::string keyPrefix = input ? C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO
+                                      : C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO;
+        std::string typeKey = keyPrefix + ".type";
+        std::string dataKey = keyPrefix + ".data";
+        int32_t type;
+        sp<ABuffer> data;
+        if (msg->findInt32(typeKey.c_str(), &type)
+                && msg->findBuffer(dataKey.c_str(), &data)) {
+            if (type == HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
+                msg->setBuffer(KEY_HDR10_PLUS_INFO, data);
+                msg->removeEntryAt(msg->findEntryByName(typeKey.c_str()));
+                msg->removeEntryAt(msg->findEntryByName(dataKey.c_str()));
+            }
+        }
     }
 
     ALOGV("converted to SDK values as %s", msg->debugString().c_str());
@@ -1640,6 +1689,27 @@
                 params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
             }
         }
+
+        // add HDR format for video encoding
+        if (configDomain == IS_CONFIG) {
+            // don't assume here that transfer is set for HDR, only require it for HLG
+            int transfer = 0;
+            params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+            int profile;
+            if (params->findInt32(KEY_PROFILE, &profile)) {
+                std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+                    C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+                C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+                if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+                    if (c2 == C2Config::hdr_format_t::HLG &&
+                        transfer != COLOR_TRANSFER_HLG) {
+                        c2 = C2Config::hdr_format_t::UNKNOWN;
+                    }
+                    params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+                }
+            }
+        }
     }
 
     {   // reflect temporal layering into a binary blob
@@ -1737,6 +1807,16 @@
                 params->setFloat("cta861.max-fall", meta->sType1.mMaxFrameAverageLightLevel);
             }
         }
+
+        sp<ABuffer> hdrDynamicInfo;
+        if (params->findBuffer(KEY_HDR10_PLUS_INFO, &hdrDynamicInfo)) {
+            for (const std::string &prefix : { C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO,
+                                               C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO }) {
+                params->setInt32((prefix + ".type").c_str(),
+                                 HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+                params->setBuffer((prefix + ".data").c_str(), hdrDynamicInfo);
+            }
+        }
     }
 
     // this is to verify that we set proper signedness for standard parameters
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index c2405e8..6084ee3 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -1178,9 +1178,6 @@
     }
     if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
         ALOGV("Setting dynamic HDR info as gralloc4 metadata");
-        hidl_vec<uint8_t> vec;
-        vec.resize(dynamicInfo->flexCount());
-        memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
         std::optional<IMapper4::MetadataType> metadataType;
         switch (dynamicInfo->m.type_) {
         case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
@@ -1190,12 +1187,20 @@
             metadataType = MetadataType_Smpte2094_40;
             break;
         }
+
         if (metadataType) {
-            Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
-            if (!ret.isOk()) {
-                err = C2_REFUSED;
-            } else if (ret != Error4::NONE) {
-                err = C2_CORRUPTED;
+            std::vector<uint8_t> smpte2094_40;
+            smpte2094_40.resize(dynamicInfo->flexCount());
+            memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+
+            hidl_vec<uint8_t> vec;
+            if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
+                Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
+                if (!ret.isOk()) {
+                    err = C2_REFUSED;
+                } else if (ret != Error4::NONE) {
+                    err = C2_CORRUPTED;
+                }
             }
         } else {
             err = C2_BAD_VALUE;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 2b8a160..1c362ae 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -33,6 +33,7 @@
 #include <OMX_Video.h>
 #include <OMX_VideoExt.h>
 #include <OMX_AsString.h>
+#include <SurfaceFlingerProperties.sysprop.h>
 
 #include <android/hardware/media/omx/1.0/IOmx.h>
 #include <android/hardware/media/omx/1.0/IOmxObserver.h>
@@ -136,7 +137,9 @@
                 continue;
             }
             switch (type.coreIndex()) {
-            case C2StreamHdr10PlusInfo::CORE_INDEX:
+            case C2StreamHdrDynamicMetadataInfo::CORE_INDEX:
+                [[fallthrough]];
+            case C2StreamHdr10PlusInfo::CORE_INDEX:  // will be deprecated
                 supportsHdr10Plus = true;
                 break;
             case C2StreamHdrStaticInfo::CORE_INDEX:
@@ -148,14 +151,21 @@
         }
     }
 
-    // For VP9/AV1, the static info is always propagated by framework.
+    // VP9 does not support HDR metadata in the bitstream and static metadata
+    // can always be carried by the framework. (The framework does not propagate
+    // dynamic metadata as that needs to be frame accurate.)
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
-    supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
 
     // HDR support implies 10-bit support.
     // TODO: directly check this from the component interface
     supports10Bit = (supportsHdr || supportsHdr10Plus);
 
+    // If the device doesn't support HDR display, then no codec on the device
+    // can advertise support for HDR profiles.
+    // Default to true to maintain backward compatibility
+    auto ret = sysprop::SurfaceFlingerProperties::has_HDR_display();
+    bool hasHDRDisplay = ret.has_value() ? *ret : true;
+
     bool added = false;
 
     for (C2Value::Primitive profile : profileQuery[0].values.values) {
@@ -181,8 +191,8 @@
         if (mapper && mapper->mapProfile(pl.profile, &sdkProfile)
                 && mapper->mapLevel(pl.level, &sdkLevel)) {
             caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
-            // also list HDR profiles if component supports HDR
-            if (supportsHdr) {
+            // also list HDR profiles if component supports HDR and device has HDR display
+            if (supportsHdr && hasHDRDisplay) {
                 auto hdrMapper = C2Mapper::GetHdrProfileLevelMapper(trait.mediaType);
                 if (hdrMapper && hdrMapper->mapProfile(pl.profile, &sdkProfile)) {
                     caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
@@ -299,8 +309,13 @@
         if (trait.name.find("android") != std::string::npos) {
             addDefaultColorFormat(COLOR_FormatSurface);
         }
-        for (int32_t colorFormat : supportedColorFormats) {
-            caps->addColorFormat(colorFormat);
+
+        static const int kVendorSdkVersion = ::android::base::GetIntProperty(
+                "ro.vendor.build.version.sdk", android_get_device_api_level());
+        if (kVendorSdkVersion >= __ANDROID_API_T__) {
+            for (int32_t colorFormat : supportedColorFormats) {
+                caps->addColorFormat(colorFormat);
+            }
         }
     }
 }
diff --git a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
index 41e4fff..a471291 100644
--- a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
@@ -861,4 +861,57 @@
                     + std::to_string(std::get<3>(info.param));
         });
 
+TEST(LinearOutputBuffersTest, PcmConvertFormat) {
+    // Prepare LinearOutputBuffers
+    std::shared_ptr<LinearOutputBuffers> buffers =
+        std::make_shared<LinearOutputBuffers>("test");
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_CHANNEL_COUNT, 1);
+    format->setInt32(KEY_SAMPLE_RATE, 8000);
+    format->setInt32(KEY_PCM_ENCODING, kAudioEncodingPcmFloat);
+    format->setInt32("android._config-pcm-encoding", kAudioEncodingPcm16bit);
+    format->setInt32("android._codec-pcm-encoding", kAudioEncodingPcmFloat);
+    buffers->setFormat(format);
+
+    // Prepare a linear C2Buffer
+    std::shared_ptr<C2BlockPool> pool;
+    ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool));
+
+    std::shared_ptr<C2LinearBlock> block;
+    ASSERT_EQ(OK, pool->fetchLinearBlock(
+            1024, C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block));
+    std::shared_ptr<C2Buffer> c2Buffer =
+        C2Buffer::CreateLinearBuffer(block->share(0, 1024, C2Fence()));
+
+    // Test regular buffer convert
+    size_t index;
+    sp<MediaCodecBuffer> clientBuffer;
+    ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+    int32_t pcmEncoding = 0;
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+    // Test null buffer convert
+    ASSERT_EQ(OK, buffers->registerBuffer(nullptr, &index, &clientBuffer));
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+    // Do the same test in the array mode
+    std::shared_ptr<OutputBuffersArray> array = buffers->toArrayMode(8);
+
+    // Test regular buffer convert
+    ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+    // Test null buffer convert
+    ASSERT_EQ(OK, buffers->registerBuffer(nullptr, &index, &clientBuffer));
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index fe63651..54a6fb1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -28,6 +28,7 @@
 
     srcs: [
         "Codec2BufferUtils.cpp",
+        "Codec2CommonUtils.cpp",
         "Codec2Mapper.cpp",
     ],
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 7fc4c27..a3a023a 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -532,12 +532,14 @@
 // Matrix coefficient to convert RGB to Planar YUV data.
 // Each sub-array represents the 3X3 coeff used with R, G and B
 static const int16_t bt601Matrix[2][3][3] = {
-    { { 76, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+    { { 77, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
     { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } },  /* RANGE_LIMITED */
 };
 
 static const int16_t bt709Matrix[2][3][3] = {
-    { { 54, 183, 18 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    // TRICKY: 18 is adjusted to 19 so that sum of row 1 is 256
+    { { 54, 183, 19 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    // TRICKY: -87 is adjusted to -86 so that sum of row 2 is 0
     { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
 };
 
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
new file mode 100644
index 0000000..ef5800d
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
+#include <utils/Log.h>
+
+#include <android/hardware_buffer.h>
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <media/hardware/HardwareAPI.h>
+#include <system/graphics.h>
+
+#include <C2Debug.h>
+
+#include "Codec2CommonUtils.h"
+
+namespace android {
+
+bool isAtLeastT() {
+    char deviceCodeName[PROP_VALUE_MAX];
+    __system_property_get("ro.build.version.codename", deviceCodeName);
+    return android_get_device_api_level() >= __ANDROID_API_T__ ||
+           !strcmp(deviceCodeName, "Tiramisu");
+}
+
+bool isVendorApiOrFirstApiAtLeastT() {
+    // The first SDK the device shipped with.
+    static const int32_t kProductFirstApiLevel =
+        base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+
+    // GRF devices (introduced in Android 11) list the first and possibly the current api levels
+    // to signal which VSR requirements they conform to even if the first device SDK was higher.
+    static const int32_t kBoardFirstApiLevel =
+        base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+    static const int32_t kBoardApiLevel =
+        base::GetIntProperty<int32_t>("ro.board.api_level", 0);
+
+    // For non-GRF devices, use the first SDK version by the product.
+    static const int32_t kFirstApiLevel =
+        kBoardApiLevel != 0 ? kBoardApiLevel :
+        kBoardFirstApiLevel != 0 ? kBoardFirstApiLevel :
+        kProductFirstApiLevel;
+
+    return kFirstApiLevel >= __ANDROID_API_T__;
+}
+
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
+    // HAL_PIXEL_FORMAT_YCBCR_P010 requirement was added in T VSR, although it could have been
+    // supported prior to this.
+    //
+    // Unfortunately, we cannot detect if P010 is properly supported using AHardwareBuffer
+    // API alone. For now limit P010 to devices that launched with Android T or known to conform
+    // to Android T VSR (as opposed to simply limiting to a T vendor image).
+    if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 &&
+            !isVendorApiOrFirstApiAtLeastT()) {
+        return false;
+    }
+
+    const AHardwareBuffer_Desc desc = {
+            .width = 320,
+            .height = 240,
+            .format = format,
+            .layers = 1,
+            .usage = AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
+                     AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
+                     AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
+                     AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY,
+            .stride = 0,
+            .rfu0 = 0,
+            .rfu1 = 0,
+    };
+
+    return AHardwareBuffer_isSupported(&desc);
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
new file mode 100644
index 0000000..98dd65b
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2022, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_UTILS_H_
+#define CODEC2_COMMON_UTILS_H_
+
+#include <android/hardware_buffer.h>
+
+namespace android {
+
+bool isAtLeastT();
+
+bool isVendorApiOrFirstApiAtLeastT();
+
+/**
+ * Check if a given pixel format is supported.
+ * enums listed in android_pixel_format_t, android_pixel_format_v1_1_t
+ * and so on can be passed as these enums have an equivalent definition in
+ * AHardwareBuffer_Format as well.
+ */
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format);
+
+} // namespace android
+
+#endif  // CODEC2_COMMON_UTILS_H_
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 3a94016..c606d6f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -276,6 +276,13 @@
     { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+    { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+    { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+    { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
 ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
     { C2Config::LEVEL_MP2V_LOW,         MPEG2LevelLL },
     { C2Config::LEVEL_MP2V_MAIN,        MPEG2LevelML },
@@ -365,6 +372,17 @@
     { C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+    { C2Config::hdr_format_t::SDR, VP9Profile0 },
+    { C2Config::hdr_format_t::SDR, VP9Profile1 },
+    { C2Config::hdr_format_t::HLG, VP9Profile2 },
+    { C2Config::hdr_format_t::HLG, VP9Profile3 },
+    { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+    { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+    { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+    { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
 ALookup<C2Config::level_t, int32_t> sAv1Levels = {
     { C2Config::LEVEL_AV1_2,    AV1Level2  },
     { C2Config::LEVEL_AV1_2_1,  AV1Level21 },
@@ -411,6 +429,13 @@
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+    { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+    { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+    { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
 // HAL_PIXEL_FORMAT_* -> COLOR_Format*
 ALookup<uint32_t, int32_t> sPixelFormats = {
     { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -494,6 +519,10 @@
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
         return sAacProfiles.map(from, to);
     }
+    // AAC does not have HDR format
+    virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+        return false;
+    }
 };
 
 struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -524,6 +553,12 @@
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
         return sDolbyVisionProfiles.map(from, to);
     }
+    // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+    // HDR format
+    virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+        *to = C2Config::hdr_format_t::UNKNOWN;
+        return true;
+    }
 };
 
 struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -562,6 +597,9 @@
                      mIsHdr ? sHevcHdrProfiles.map(from, to) :
                               sHevcProfiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sHevcHdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -640,6 +678,9 @@
                      mIsHdr ? sVp9HdrProfiles.map(from, to) :
                               sVp9Profiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sVp9HdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -669,6 +710,9 @@
                           mIsHdr ? sAv1HdrProfiles.map(from, to) :
                                    sAv1Profiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sAv1HdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -678,6 +722,13 @@
 
 } // namespace
 
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+    // by default map all (including vendor) profiles to SDR
+    *to = C2Config::hdr_format_t::SDR;
+    return true;
+}
+
 // static
 std::shared_ptr<C2Mapper::ProfileLevelMapper>
 C2Mapper::GetProfileLevelMapper(std::string mediaType) {
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 33d305e..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
             virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
             virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
             virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+            /**
+             * Mapper method that maps a MediaCodec profile to the supported
+             * HDR format for that profile. Since 10-bit profiles are used for
+             * HLG, this method will return HLG for all 10-bit profiles, but
+             * the caller should also verify that the transfer function is
+             * indeed HLG.
+             */
+            // not an abstract method as we have a default implementation for SDR
+            virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
             virtual ~ProfileLevelMapper() = default;
         };
 
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 9c5183e..0b556aa 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -16,13 +16,24 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2FenceFactory"
+#include <cutils/native_handle.h>
 #include <utils/Log.h>
+#include <ui/Fence.h>
 
 #include <C2FenceFactory.h>
 #include <C2SurfaceSyncObj.h>
 
+#define MAX_FENCE_FDS 1
+
 class C2Fence::Impl {
 public:
+    enum type_t : uint32_t {
+        INVALID_FENCE,
+        NULL_FENCE,
+        SURFACE_FENCE,
+        SYNC_FENCE,
+    };
+
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
 
     virtual bool valid() const = 0;
@@ -33,9 +44,26 @@
 
     virtual bool isHW() const = 0;
 
+    virtual type_t type() const = 0;
+
+    /**
+     * Create a native handle for the fence so it can be marshalled.
+     * The native handle must store fence type in the first integer.
+     *
+     * \return a valid native handle if the fence can be marshalled, otherwise return null.
+     */
+    virtual native_handle_t *createNativeHandle() const = 0;
+
     virtual ~Impl() = default;
 
     Impl() = default;
+
+    static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
+        if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
+            return static_cast<type_t>(nh->data[nh->numFds]);
+        }
+        return INVALID_FENCE;
+    }
 };
 
 c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
@@ -115,6 +143,15 @@
         return false;
     }
 
+    virtual type_t type() const {
+        return SURFACE_FENCE;
+    }
+
+    virtual native_handle_t *createNativeHandle() const {
+        ALOGD("Cannot create native handle from surface fence");
+        return nullptr;
+    }
+
     virtual ~SurfaceFenceImpl() {};
 
     SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
@@ -143,3 +180,120 @@
     }
     return C2Fence();
 }
+
+using namespace android;
+
+class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+        c2_nsecs_t timeoutMs = timeoutNs / 1000;
+        if (timeoutMs > INT_MAX) {
+            timeoutMs = INT_MAX;
+        }
+
+        switch (mFence->wait((int)timeoutMs)) {
+            case NO_ERROR:
+                return C2_OK;
+            case -ETIME:
+                return C2_TIMED_OUT;
+            default:
+                return C2_CORRUPTED;
+        }
+    }
+
+    virtual bool valid() const {
+        return mFence->getStatus() != Fence::Status::Invalid;
+    }
+
+    virtual bool ready() const {
+        return mFence->getStatus() == Fence::Status::Signaled;
+    }
+
+    virtual int fd() const {
+        return mFence->dup();
+    }
+
+    virtual bool isHW() const {
+        return true;
+    }
+
+    virtual type_t type() const {
+        return SYNC_FENCE;
+    }
+
+    virtual native_handle_t *createNativeHandle() const {
+        native_handle_t* nh = native_handle_create(1, 1);
+        if (!nh) {
+            ALOGE("Failed to allocate native handle for sync fence");
+            return nullptr;
+        }
+        nh->data[0] = fd();
+        nh->data[1] = type();
+        return nh;
+    }
+
+    virtual ~SyncFenceImpl() {};
+
+    SyncFenceImpl(int fenceFd) :
+            mFence(sp<Fence>::make(fenceFd)) {}
+
+    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
+        if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+            ALOGE("Invalid handle for sync fence");
+            return nullptr;
+        }
+        int fd = dup(nh->data[0]);
+        std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl");
+            close(fd);
+        }
+        return p;
+    }
+
+private:
+    const sp<Fence> mFence;
+};
+
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+    std::shared_ptr<C2Fence::Impl> p;
+    if (fenceFd >= 0) {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl");
+            close(fenceFd);
+        }
+        if (!p->valid()) {
+            p.reset();
+        }
+    } else {
+        ALOGE("Create sync fence from invalid fd");
+    }
+    return C2Fence(p);
+}
+
+native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
+    return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
+}
+
+C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+    if (!handle) {
+        return C2Fence();
+    }
+    C2Fence::Impl::type_t type = C2Fence::Impl::GetTypeFromNativeHandle(handle);
+    std::shared_ptr<C2Fence::Impl> p;
+    switch (type) {
+        case C2Fence::Impl::SYNC_FENCE:
+            p = SyncFenceImpl::CreateFromNativeHandle(handle);
+            break;
+        default:
+            ALOGD("Unsupported fence type %d", type);
+            // return a null-fence in this case
+            break;
+    }
+    if (p && !p->valid()) {
+        p.reset();
+    }
+    return C2Fence(p);
+}
+
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index d4bed26..4944115 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -28,6 +28,7 @@
 struct _C2FenceFactory {
 
     class SurfaceFenceImpl;
+    class SyncFenceImpl;
 
     /*
      * Create C2Fence for BufferQueueBased blockpool.
@@ -38,6 +39,30 @@
     static C2Fence CreateSurfaceFence(
             std::shared_ptr<C2SurfaceSyncMemory> syncMem,
             uint32_t waitId);
+
+    /*
+     * Create C2Fence from a fence file fd.
+     *
+     * \param fenceFd           Fence file descriptor.
+     *                          It will be owned and closed by the returned fence object.
+     */
+    static C2Fence CreateSyncFence(int fenceFd);
+
+    /**
+     * Create a native handle from fence for marshalling
+     *
+     * \return a non-null pointer if the fence can be marshalled, otherwise return nullptr
+     */
+    static native_handle_t* CreateNativeHandle(const C2Fence& fence);
+
+    /*
+     * Create C2Fence from a native handle.
+
+     * \param handle           A native handle representing a fence
+     *                         The fd in the native handle will be duplicated, so the caller will
+     *                         still own the handle and have to close it.
+     */
+    static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
 };
 
 
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index eccbf46..5a03992 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1969,26 +1969,8 @@
             }
 
             if (chunk_type == FOURCC("fLaC")) {
-
-                // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
-                // 4 for mime, 4 for blockType and BlockLen, 34 for metadata
-                uint8_t flacInfo[4 + 4 + 34];
-                // skipping dFla, version
-                data_offset += sizeof(buffer) + 12;
-                size_t flacOffset = 4;
-                // Add flaC header mime type to CSD
-                strncpy((char *)flacInfo, "fLaC", 4);
-                if (mDataSource->readAt(
-                        data_offset, flacInfo + flacOffset, sizeof(flacInfo) - flacOffset) <
-                        (ssize_t)sizeof(flacInfo) - flacOffset) {
-                    return ERROR_IO;
-                }
-                data_offset += sizeof(flacInfo) - flacOffset;
-
-                AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
-                                       sizeof(flacInfo));
+                data_offset += sizeof(buffer);
                 *offset = data_offset;
-                CHECK_EQ(*offset, stop_offset);
             }
 
             while (*offset < stop_offset) {
@@ -2521,6 +2503,35 @@
             break;
         }
 
+        case FOURCC("dfLa"):
+        {
+            *offset += chunk_size;
+
+            // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
+            // 4 for mediaType, 4 for blockType and BlockLen, 34 for metadata
+            uint8_t flacInfo[4 + 4 + 34];
+
+            if (chunk_data_size != sizeof(flacInfo)) {
+                return ERROR_MALFORMED;
+            }
+
+            data_offset += 4;
+            size_t flacOffset = 4;
+            // Add flaC header mediaType to CSD
+            strncpy((char *)flacInfo, "fLaC", 4);
+
+            ssize_t bytesToRead = sizeof(flacInfo) - flacOffset;
+            if (mDataSource->readAt(
+                    data_offset, flacInfo + flacOffset, bytesToRead) < bytesToRead) {
+                return ERROR_IO;
+            }
+
+            data_offset += bytesToRead;
+            AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
+                                    sizeof(flacInfo));
+            break;
+        }
+
         case FOURCC("avcC"):
         {
             *offset += chunk_size;
@@ -4775,7 +4786,7 @@
         if (len2 == 0) {
             return ERROR_MALFORMED;
         }
-        if (offset >= csd_size || csd[offset] != 0x01) {
+        if (offset + len1 > csd_size || csd[offset] != 0x01) {
             return ERROR_MALFORMED;
         }
 
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
index ce2bc82..5e90588 100644
--- a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -39,9 +39,9 @@
     static const float limneg = -1.;
 
     if (f <= limneg) {
-        return -0x80000000; /* or 0x80000000 */
+        return INT32_MIN;
     } else if (f >= limpos) {
-        return 0x7fffffff;
+        return INT32_MAX;
     }
     f *= scale;
     /* integer conversion is through truncation (though int to float is not).
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 913feb0..66b77eb 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -16,6 +16,9 @@
 
 /*
  * Test FlowGraph
+ *
+ * This file also tests a few different conversion techniques because
+ * sometimes that have caused compiler bugs.
  */
 
 #include <iostream>
@@ -30,6 +33,7 @@
 #include "flowgraph/SinkFloat.h"
 #include "flowgraph/SinkI16.h"
 #include "flowgraph/SinkI24.h"
+#include "flowgraph/SinkI32.h"
 #include "flowgraph/SourceI16.h"
 #include "flowgraph/SourceI24.h"
 
@@ -37,6 +41,22 @@
 
 constexpr int kBytesPerI24Packed = 3;
 
+constexpr int kNumSamples = 8;
+constexpr std::array<float, kNumSamples> kInputFloat = {
+    1.0f, 0.5f, -0.25f, -1.0f,
+    0.0f, 53.9f, -87.2f, -1.02f};
+
+// Corresponding PCM values  as integers.
+constexpr std::array<int16_t, kNumSamples>  kExpectedI16 = {
+    INT16_MAX, 1 << 14, INT16_MIN / 4, INT16_MIN,
+    0, INT16_MAX, INT16_MIN, INT16_MIN};
+
+constexpr std::array<int32_t, kNumSamples>  kExpectedI32 = {
+    INT32_MAX, 1 << 30, INT32_MIN / 4, INT32_MIN,
+    0, INT32_MAX, INT32_MIN, INT32_MIN};
+
+// =================================== FLOAT to I16 ==============
+
 // Simple test that tries to reproduce a Clang compiler bug.
 __attribute__((noinline))
 void local_convert_float_to_int16(const float *input,
@@ -49,18 +69,11 @@
 }
 
 TEST(test_flowgraph, local_convert_float_int16) {
-    static constexpr int kNumSamples = 8;
-    static constexpr std::array<float, kNumSamples> input = {
-        1.0f, 0.5f, -0.25f, -1.0f,
-        0.0f, 53.9f, -87.2f, -1.02f};
-    static constexpr std::array<int16_t, kNumSamples>  expected = {
-        32767, 16384, -8192, -32768,
-        0, 32767, -32768, -32768};
     std::array<int16_t, kNumSamples> output;
 
     // Do it inline, which will probably work even with the buggy compiler.
     // This validates the expected data.
-    const float *in = input.data();
+    const float *in = kInputFloat.data();
     int16_t *out = output.data();
     output.fill(777);
     for (int i = 0; i < kNumSamples; i++) {
@@ -68,38 +81,106 @@
         *out++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
     }
     for (int i = 0; i < kNumSamples; i++) {
-        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
     }
 
     // Convert audio signal using the function.
     output.fill(777);
-    local_convert_float_to_int16(input.data(), output.data(), kNumSamples);
+    local_convert_float_to_int16(kInputFloat.data(), output.data(), kNumSamples);
     for (int i = 0; i < kNumSamples; i++) {
-        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
     }
 }
 
 TEST(test_flowgraph, module_sinki16) {
     static constexpr int kNumSamples = 8;
-    static constexpr std::array<float, kNumSamples> input = {
-        1.0f, 0.5f, -0.25f, -1.0f,
-        0.0f, 53.9f, -87.2f, -1.02f};
-    static constexpr std::array<int16_t, kNumSamples>  expected = {
-        32767, 16384, -8192, -32768,
-        0, 32767, -32768, -32768};
     std::array<int16_t, kNumSamples + 10> output; // larger than input
 
     SourceFloat sourceFloat{1};
     SinkI16 sinkI16{1};
 
-    sourceFloat.setData(input.data(), kNumSamples);
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
     sourceFloat.output.connect(&sinkI16.input);
 
     output.fill(777);
     int32_t numRead = sinkI16.read(output.data(), output.size());
     ASSERT_EQ(kNumSamples, numRead);
     for (int i = 0; i < numRead; i++) {
-        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+// =================================== FLOAT to I32 ==============
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+static int32_t clamp32FromFloat(float f)
+{
+    static const float scale = (float)(1UL << 31);
+    static const float limpos = 1.;
+    static const float limneg = -1.;
+
+    if (f <= limneg) {
+        return INT32_MIN;
+    } else if (f >= limpos) {
+        return INT32_MAX;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+void local_convert_float_to_int32(const float *input,
+                                  int32_t *output,
+                                  int count) {
+    for (int i = 0; i < count; i++) {
+        *output++ = clamp32FromFloat(*input++);
+    }
+}
+
+TEST(test_flowgraph, simple_convert_float_int32) {
+    std::array<int32_t, kNumSamples> output;
+
+    // Do it inline, which will probably work even with a buggy compiler.
+    // This validates the expected data.
+    const float *in = kInputFloat.data();
+    output.fill(777);
+    int32_t *out = output.data();
+    for (int i = 0; i < kNumSamples; i++) {
+        int64_t n = (int64_t) (*in++ * 2147483648.0f);
+        *out++ = (int32_t)std::min((int64_t)INT32_MAX,
+                                   std::max((int64_t)INT32_MIN, n)); // clip
+    }
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+TEST(test_flowgraph, local_convert_float_int32) {
+    std::array<int32_t, kNumSamples> output;
+    // Convert audio signal using the function.
+    output.fill(777);
+    local_convert_float_to_int32(kInputFloat.data(), output.data(), kNumSamples);
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+TEST(test_flowgraph, module_sinki32) {
+    std::array<int32_t, kNumSamples + 10> output; // larger than input
+
+    SourceFloat sourceFloat{1};
+    SinkI32 sinkI32{1};
+
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
+    sourceFloat.output.connect(&sinkI32.input);
+
+    output.fill(777);
+    int32_t numRead = sinkI32.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
     }
 }
 
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index d36cf10..888d592 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -13,11 +13,9 @@
           "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
         }
       ]
-    }
-  ],
-  "postsubmit": [
+    },
     {
-      "name": "audieorecord_tests"
+      "name": "audiorecord_tests"
     },
     {
       "name": "audioeffect_tests"
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 4662247..f968a4b 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -867,6 +867,11 @@
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = ToneGenerator::TONEGEN_INF,
           .repeatSegment = 0 },                               // TONE_TW_RINGTONE
+        { .segments = { { .duration = 200, .waveFreq = { 400, 0 }, 0, 0 },
+                        { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = 3,
+          .repeatSegment = 0 },                              // TONE_NZ_CALL_WAITING
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -961,6 +966,16 @@
             TONE_SUP_ERROR,              // TONE_SUP_ERROR
             TONE_SUP_CALL_WAITING,       // TONE_SUP_CALL_WAITING
             TONE_TW_RINGTONE             // TONE_SUP_RINGTONE
+        },
+        {   // NEW ZEALAND
+            TONE_JAPAN_DIAL,              // TONE_SUP_DIAL
+            TONE_JAPAN_BUSY,              // TONE_SUP_BUSY
+            TONE_SUP_CONGESTION,          // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,           // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,      // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,               // TONE_SUP_ERROR
+            TONE_NZ_CALL_WAITING,         // TONE_SUP_CALL_WAITING
+            TONE_GB_RINGTONE              // TONE_SUP_RINGTONE
         }
 };
 
@@ -1038,6 +1053,8 @@
         mRegion = INDIA;
     } else if (strstr(value, "tw") != NULL) {
         mRegion = TAIWAN;
+    } else if (strstr(value, "nz") != NULL) {
+        mRegion = NZ;
     } else {
         mRegion = CEPT;
     }
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
index 9f294cb..8817c35 100644
--- a/media/libaudioclient/include/media/AidlConversionUtil.h
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -279,6 +279,20 @@
     return std::find(er.begin(), er.end(), value) != er.end();
 }
 
+// T is a "container" of enum binder types with a toString().
+template <typename T>
+std::string enumsToString(const T& t) {
+    std::string s;
+    for (const auto item : t) {
+        if (s.empty()) {
+            s = toString(item);
+        } else {
+            s.append("|").append(toString(item));
+        }
+    }
+    return s;
+}
+
 /**
  * Return the equivalent Android status_t from a binder exception code.
  *
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index b6ee483..32576c2 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1411,7 +1411,7 @@
 
     audio_session_t         mSessionId;
     int                     mAuxEffectId;
-    audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+    audio_port_handle_t     mPortId = AUDIO_PORT_HANDLE_NONE; // Id from Audio Policy Manager
 
     /**
      * mPlayerIId is the player id of the AudioTrack used by AudioManager.
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index d00dfd2..46e9501 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -225,7 +225,11 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
+         // TAIWAN supervisory tones
         TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
+         // NEW ZEALAND supervisory tones
+        TONE_NZ_CALL_WAITING,       // Call waiting tone: 400 Hz,  0.2s ON, 3s OFF,
+                                    //        0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
         NUM_ALTERNATE_TONES
     };
 
@@ -239,6 +243,7 @@
         IRELAND,
         INDIA,
         TAIWAN,
+        NZ,
         CEPT,
         NUM_REGIONS
     };
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index d7ce014..018d920 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -48,14 +48,15 @@
 AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
                              audio_channel_mask_t channelMask, audio_output_flags_t flags,
                              audio_session_t sessionId, AudioTrack::transfer_type transferType,
-                             audio_attributes_t* attributes)
+                             audio_attributes_t* attributes, audio_offload_info_t* info)
     : mSampleRate(sampleRate),
       mFormat(format),
       mChannelMask(channelMask),
       mFlags(flags),
       mSessionId(sessionId),
       mTransferType(transferType),
-      mAttributes(attributes) {
+      mAttributes(attributes),
+      mOffloadInfo(info) {
     mStopPlaying = false;
     mBytesUsedSoFar = 0;
     mState = PLAY_NO_INIT;
@@ -81,13 +82,14 @@
         mTrack->set(AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, 0 /* frameCount */,
                     mFlags, nullptr /* callback */, 0 /* notificationFrames */,
                     nullptr /* sharedBuffer */, false /*canCallJava */, mSessionId, mTransferType,
-                    nullptr /* offloadInfo */, attributionSource, mAttributes);
+                    mOffloadInfo, attributionSource, mAttributes);
     } else if (mTransferType == AudioTrack::TRANSFER_SHARED) {
         mTrack = new AudioTrack(AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, mMemory,
                                 mFlags, wp<AudioTrack::IAudioTrackCallback>::fromExisting(this), 0,
                                 mSessionId, mTransferType, nullptr, attributionSource, mAttributes);
     } else {
-        ALOGE("Required Transfer type not existed");
+        ALOGE("Test application is not handling transfer type %s",
+              AudioTrack::convertTransferToText(mTransferType));
         return INVALID_OPERATION;
     }
     mTrack->setCallerName(packageName);
@@ -229,9 +231,15 @@
     std::unique_lock<std::mutex> lock{mMutex};
     mStopPlaying = true;
     if (mState != PLAY_STOPPED) {
+        int32_t msec = 0;
+        (void)mTrack->pendingDuration(&msec);
         mTrack->stopAndJoinCallbacks();
         LOG_FATAL_IF(true != mTrack->stopped());
         mState = PLAY_STOPPED;
+        if (msec > 0) {
+            ALOGD("deleting recycled track, waiting for data drain (%d msec)", msec);
+            usleep(msec * 1000LL);
+        }
     }
 }
 
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index fc269ed..526d5c4 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -64,12 +64,13 @@
 
 // Simple AudioPlayback class.
 class AudioPlayback : public AudioTrack::IAudioTrackCallback {
-  friend sp<AudioPlayback>;
+    friend sp<AudioPlayback>;
     AudioPlayback(uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask,
                   audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                   audio_session_t sessionId = AUDIO_SESSION_NONE,
                   AudioTrack::transfer_type transferType = AudioTrack::TRANSFER_SHARED,
-                  audio_attributes_t* attributes = nullptr);
+                  audio_attributes_t* attributes = nullptr, audio_offload_info_t* info = nullptr);
+
   public:
     status_t loadResource(const char* name);
     status_t create();
@@ -101,6 +102,7 @@
     const audio_session_t mSessionId;
     const AudioTrack::transfer_type mTransferType;
     const audio_attributes_t* mAttributes;
+    const audio_offload_info_t* mOffloadInfo;
 
     size_t mBytesUsedSoFar;
     State mState;
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index 346f4fd..93fe306 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -39,7 +39,7 @@
     attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
     attributionSource.token = sp<BBinder>::make();
     sp<AudioEffect> effect = new AudioEffect(attributionSource);
-    effect->set(type, nullptr, priority, nullptr, nullptr, sessionId);
+    effect->set(type, nullptr /* uid */, priority, nullptr /* callback */, sessionId);
     return effect->initCheck() == ALREADY_EXISTS;
 }
 
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index a49af96..1b42a49 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -47,6 +47,78 @@
     ap->stop();
 }
 
+TEST(AudioTrackTest, OffloadOrDirectPlayback) {
+    audio_offload_info_t info = AUDIO_INFO_INITIALIZER;
+    info.sample_rate = 44100;
+    info.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    info.format = AUDIO_FORMAT_MP3;
+    info.stream_type = AUDIO_STREAM_MUSIC;
+    info.bit_rate = 192;
+    info.duration_us = 120 * 1000000;  // 120 sec
+
+    audio_config_base_t config = {/* .sample_rate = */ info.sample_rate,
+                                  /* .channel_mask = */ info.channel_mask,
+                                  /* .format = */ AUDIO_FORMAT_PCM_16_BIT};
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.flags = AUDIO_FLAG_NONE;
+
+    if (!AudioTrack::isDirectOutputSupported(config, attributes) &&
+        AUDIO_OFFLOAD_NOT_SUPPORTED == AudioSystem::getOffloadSupport(info)) {
+        GTEST_SKIP() << "offload or direct playback is not supported";
+    }
+    sp<AudioPlayback> ap = nullptr;
+    if (AUDIO_OFFLOAD_NOT_SUPPORTED != AudioSystem::getOffloadSupport(info)) {
+        ap = sp<AudioPlayback>::make(info.sample_rate, info.format, info.channel_mask,
+                                     AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD, AUDIO_SESSION_NONE,
+                                     AudioTrack::TRANSFER_OBTAIN, nullptr, &info);
+    } else {
+        ap = sp<AudioPlayback>::make(config.sample_rate, config.format, config.channel_mask,
+                                     AUDIO_OUTPUT_FLAG_DIRECT, AUDIO_SESSION_NONE,
+                                     AudioTrack::TRANSFER_OBTAIN);
+    }
+    ASSERT_NE(nullptr, ap);
+    EXPECT_EQ(OK, ap->create()) << "track creation failed";
+    audio_dual_mono_mode_t mode;
+    if (OK != ap->getAudioTrackHandle()->getDualMonoMode(&mode)) {
+        std::cerr << "no dual mono presentation is available" << std::endl;
+    }
+    if (OK != ap->getAudioTrackHandle()->setDualMonoMode(AUDIO_DUAL_MONO_MODE_LR)) {
+        std::cerr << "no dual mono presentation is available" << std::endl;
+    } else {
+        EXPECT_EQ(OK, ap->getAudioTrackHandle()->getDualMonoMode(&mode));
+        EXPECT_EQ(AUDIO_DUAL_MONO_MODE_LR, mode);
+    }
+    float leveldB;
+    if (OK != ap->getAudioTrackHandle()->getAudioDescriptionMixLevel(&leveldB)) {
+        std::cerr << "Audio Description mixing is unavailable" << std::endl;
+    }
+    if (OK != ap->getAudioTrackHandle()->setAudioDescriptionMixLevel(3.14f)) {
+        std::cerr << "Audio Description mixing is unavailable" << std::endl;
+    } else {
+        EXPECT_EQ(OK, ap->getAudioTrackHandle()->getAudioDescriptionMixLevel(&leveldB));
+        EXPECT_EQ(3.14f, leveldB);
+    }
+    AudioPlaybackRate audioRate;
+    audioRate = ap->getAudioTrackHandle()->getPlaybackRate();
+    std::cerr << "playback speed :: " << audioRate.mSpeed << std::endl
+              << "playback pitch :: " << audioRate.mPitch << std::endl;
+    audioRate.mSpeed = 2.0f;
+    audioRate.mPitch = 2.0f;
+    audioRate.mStretchMode = AUDIO_TIMESTRETCH_STRETCH_VOICE;
+    audioRate.mFallbackMode = AUDIO_TIMESTRETCH_FALLBACK_MUTE;
+    EXPECT_TRUE(isAudioPlaybackRateValid(audioRate));
+    if (OK != ap->getAudioTrackHandle()->setPlaybackRate(audioRate)) {
+        std::cerr << "unable to set playback rate parameters" << std::endl;
+    } else {
+        AudioPlaybackRate audioRateLocal;
+        audioRateLocal = ap->getAudioTrackHandle()->getPlaybackRate();
+        EXPECT_TRUE(isAudioPlaybackRateEqual(audioRate, audioRateLocal));
+    }
+    ap->stop();
+}
+
 TEST(AudioTrackTest, TestAudioCbNotifier) {
     const auto ap = sp<AudioPlayback>::make(0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
                                             AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_FAST,
@@ -68,6 +140,13 @@
     EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
     EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
     EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
+    EXPECT_EQ(cb->mAudioIo, ap->getAudioTrackHandle()->getOutput());
+    EXPECT_EQ(cb->mDeviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+    String8 keys;
+    keys = ap->getAudioTrackHandle()->getParameters(keys);
+    if (!keys.isEmpty()) {
+        std::cerr << "track parameters :: " << keys << std::endl;
+    }
     EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
     EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 1d41889..9d63f9b 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -46,6 +46,7 @@
     ],
     export_shared_lib_headers: [
         "libheadtracking",
+        "libsensor",
     ],
 }
 
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index 4884ae4..8ebaf6e 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -26,7 +26,6 @@
 
 #include <android-base/thread_annotations.h>
 #include <log/log_main.h>
-#include <sensor/Sensor.h>
 #include <sensor/SensorEventQueue.h>
 #include <sensor/SensorManager.h>
 #include <utils/Looper.h>
@@ -285,7 +284,7 @@
         return DataFormat::kUnknown;
     }
 
-    std::optional<const Sensor> getSensorByHandle(int32_t handle) {
+    std::optional<const Sensor> getSensorByHandle(int32_t handle) override {
         const Sensor* const* list;
         ssize_t size;
 
diff --git a/media/libheadtracking/include/media/SensorPoseProvider.h b/media/libheadtracking/include/media/SensorPoseProvider.h
index d2a6b77..0f42074 100644
--- a/media/libheadtracking/include/media/SensorPoseProvider.h
+++ b/media/libheadtracking/include/media/SensorPoseProvider.h
@@ -20,6 +20,7 @@
 #include <optional>
 
 #include <android/sensor.h>
+#include <sensor/Sensor.h>
 
 #include "Pose.h"
 #include "Twist.h"
@@ -91,6 +92,14 @@
      * @param handle The sensor handle, as provided to startSensor().
      */
     virtual void stopSensor(int32_t handle) = 0;
+
+    /**
+     * Returns the sensor or nullopt if it does not exist.
+     *
+     * The Sensor object has const methods that can be used to
+     * discover properties of the sensor.
+     */
+    virtual std::optional<const Sensor> getSensorByHandle(int32_t handle) = 0;
 };
 
 }  // namespace media
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 57fc49d..ecb248d 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -26,6 +26,7 @@
 #include <unordered_map>
 
 #include <binder/Parcel.h>
+#include <cutils/multiuser.h>
 #include <cutils/properties.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -343,7 +344,8 @@
         // now.
         // TODO(b/190151205): Either allow the HotwordDetectionService to access MediaMetrics or
         // make this disabling specific to that process.
-        if (uid >= AID_ISOLATED_START && uid <= AID_ISOLATED_END) {
+        uid_t appid = multiuser_get_app_id(uid);
+        if (appid >= AID_ISOLATED_START && appid <= AID_ISOLATED_END) {
             return false;
         }
         break;
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 90472eb..1c30510 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -51,6 +51,12 @@
 // The AudioRecord key appends the "trackId" to the prefix.
 #define AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD AMEDIAMETRICS_KEY_PREFIX_AUDIO "record."
 
+// The Audio Sensor key appends the sensor handle integer.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR AMEDIAMETRICS_KEY_PREFIX_AUDIO "sensor."
+
+// The Audio Spatializer key appends the spatializerId (currently 0)
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER AMEDIAMETRICS_KEY_PREFIX_AUDIO "spatializer."
+
 // The AudioStream key appends the "streamId" to the prefix.
 #define AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM  AMEDIAMETRICS_KEY_PREFIX_AUDIO "stream."
 
@@ -135,13 +141,17 @@
 #define AMEDIAMETRICS_PROP_FLAGS          "flags"
 
 #define AMEDIAMETRICS_PROP_FRAMECOUNT     "frameCount"     // int32
+#define AMEDIAMETRICS_PROP_HEADTRACKINGMODES "headTrackingModes" // string |, like modes.
 #define AMEDIAMETRICS_PROP_INPUTDEVICES   "inputDevices"   // string value
 #define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
+#define AMEDIAMETRICS_PROP_LEVELS         "levels"          // string | with levels
 #define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
 #define AMEDIAMETRICS_PROP_METHODCODE     "methodCode"     // int64_t an int indicating method
 #define AMEDIAMETRICS_PROP_METHODNAME     "methodName"     // string method name
+#define AMEDIAMETRICS_PROP_MODE           "mode"           // string
+#define AMEDIAMETRICS_PROP_MODES          "modes"          // string | with modes
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
@@ -241,6 +251,11 @@
 #define AMEDIAMETRICS_PROP_CALLERNAME_VALUE_TONEGENERATOR "tonegenerator"  // dial tones
 #define AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN       "unknown"        // callerName not set
 
+// Many properties are available for the sensor.
+// The mode is how the sensor is being currently used.
+#define AMEDIAMETRICS_PROP_MODE_VALUE_HEAD          "head"        // used for head tracking
+#define AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN        "screen"      // used for screen
+
 // MediaMetrics errors are expected to cover the following sources:
 // https://docs.oracle.com/javase/7/docs/api/java/lang/RuntimeException.html
 // https://docs.oracle.com/javase/7/docs/api/java/lang/Exception.html
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index a71631a..8e19d02 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -611,29 +611,42 @@
                 IPCThreadState::self()->getCallingUid());
         result.append(buffer);
     } else {
-        Mutex::Autolock lock(mLock);
-        for (int i = 0, n = mClients.size(); i < n; ++i) {
-            sp<Client> c = mClients[i].promote();
-            if (c != 0) c->dump(fd, args);
-            clients.add(c);
-        }
-        if (mMediaRecorderClients.size() == 0) {
-                result.append(" No media recorder client\n\n");
-        } else {
+        {
+            // capture clients under lock
+            Mutex::Autolock lock(mLock);
+            for (int i = 0, n = mClients.size(); i < n; ++i) {
+                sp<Client> c = mClients[i].promote();
+                if (c != nullptr) {
+                    clients.add(c);
+                }
+            }
+
             for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) {
                 sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote();
-                if (c != 0) {
-                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
-                            c->mAttributionSource.pid);
-                    result.append(buffer);
-                    write(fd, result.string(), result.size());
-                    result = "\n";
-                    c->dump(fd, args);
+                if (c != nullptr) {
                     mediaRecorderClients.add(c);
                 }
             }
         }
 
+        // dump clients outside of lock
+        for (const sp<Client> &c : clients) {
+            c->dump(fd, args);
+        }
+        if (mediaRecorderClients.size() == 0) {
+            result.append(" No media recorder client\n\n");
+        } else {
+            for (const sp<MediaRecorderClient> &c : mediaRecorderClients) {
+                snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
+                        c->mAttributionSource.pid);
+                result.append(buffer);
+                write(fd, result.string(), result.size());
+                result = "\n";
+                c->dump(fd, args);
+
+            }
+        }
+
         result.append(" Files opened and/or mapped:\n");
         snprintf(buffer, SIZE - 1, "/proc/%d/maps", getpid());
         FILE *f = fopen(buffer, "r");
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c6b22a6..9b4fc8f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2893,6 +2893,38 @@
             in.writeInt32(recvTimeUs & 0xFFFFFFFF);
             break;
         }
+        case ARTPSource::RTCP_RR:
+        {
+            int64_t recvTimeUs;
+            int32_t senderId;
+            int32_t ssrc;
+            int32_t fraction;
+            int32_t lost;
+            int32_t lastSeq;
+            int32_t jitter;
+            int32_t lsr;
+            int32_t dlsr;
+            CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+            CHECK(msg->findInt32("rtcp-rr-ssrc", &senderId));
+            CHECK(msg->findInt32("rtcp-rrb-ssrc", &ssrc));
+            CHECK(msg->findInt32("rtcp-rrb-fraction", &fraction));
+            CHECK(msg->findInt32("rtcp-rrb-lost", &lost));
+            CHECK(msg->findInt32("rtcp-rrb-lastSeq", &lastSeq));
+            CHECK(msg->findInt32("rtcp-rrb-jitter", &jitter));
+            CHECK(msg->findInt32("rtcp-rrb-lsr", &lsr));
+            CHECK(msg->findInt32("rtcp-rrb-dlsr", &dlsr));
+            in.writeInt32(recvTimeUs >> 32);
+            in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+            in.writeInt32(senderId);
+            in.writeInt32(ssrc);
+            in.writeInt32(fraction);
+            in.writeInt32(lost);
+            in.writeInt32(lastSeq);
+            in.writeInt32(jitter);
+            in.writeInt32(lsr);
+            in.writeInt32(dlsr);
+            break;
+        }
         case ARTPSource::RTCP_TSFB:   // RTCP TSFB
         case ARTPSource::RTCP_PSFB:   // RTCP PSFB
         case ARTPSource::RTP_AUTODOWN:
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 7917395..52c4c0f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -77,6 +77,7 @@
 namespace {
 
 constexpr char TUNNEL_PEEK_KEY[] = "android._trigger-tunnel-peek";
+constexpr char TUNNEL_PEEK_SET_LEGACY_KEY[] = "android._tunnel-peek-set-legacy";
 
 }
 
@@ -2483,17 +2484,39 @@
         return BAD_VALUE;
     }
 
-    OMX_CONFIG_BOOLEANTYPE config;
-    InitOMXParams(&config);
-    config.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
+    OMX_CONFIG_BOOLEANTYPE tunnelPeekConfig;
+    InitOMXParams(&tunnelPeekConfig);
+    tunnelPeekConfig.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
     status_t err = mOMXNode->setConfig(
             (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeek,
-            &config, sizeof(config));
+            &tunnelPeekConfig, sizeof(tunnelPeekConfig));
     if (err != OK) {
         ALOGE("decoder cannot set %s to %d (err %d)",
-              TUNNEL_PEEK_KEY, tunnelPeek, err);
+                TUNNEL_PEEK_KEY, tunnelPeek, err);
+    }
+    return err;
+}
+
+status_t ACodec::setTunnelPeekLegacy(int32_t isLegacy) {
+    if (mIsEncoder) {
+        ALOGE("encoder does not support %s", TUNNEL_PEEK_SET_LEGACY_KEY);
+        return BAD_VALUE;
+    }
+    if (!mTunneled) {
+        ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_SET_LEGACY_KEY);
+        return BAD_VALUE;
     }
 
+    OMX_CONFIG_BOOLEANTYPE tunnelPeekLegacyModeConfig;
+    InitOMXParams(&tunnelPeekLegacyModeConfig);
+    tunnelPeekLegacyModeConfig.bEnabled = (OMX_BOOL)(isLegacy != 0);
+    status_t err = mOMXNode->setConfig(
+            (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeekLegacyMode,
+            &tunnelPeekLegacyModeConfig, sizeof(tunnelPeekLegacyModeConfig));
+    if (err != OK) {
+        ALOGE("decoder cannot set video peek legacy mode to %d (err %d)",
+                isLegacy,  err);
+    }
     return err;
 }
 
@@ -7934,11 +7957,22 @@
         }
     }
 
-    int32_t tunnelPeek = 0;
-    if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
-        status_t err = setTunnelPeek(tunnelPeek);
-        if (err != OK) {
-            return err;
+    {
+        int32_t tunnelPeek = 0;
+        if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
+            status_t err = setTunnelPeek(tunnelPeek);
+            if (err != OK) {
+                return err;
+            }
+        }
+    }
+    {
+        int32_t tunnelPeekSetLegacy = 0;
+        if (params->findInt32(TUNNEL_PEEK_SET_LEGACY_KEY, &tunnelPeekSetLegacy)) {
+            status_t err = setTunnelPeekLegacy(tunnelPeekSetLegacy);
+            if (err != OK) {
+                return err;
+            }
         }
     }
 
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index 5f9c20e..60df162 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -102,10 +102,11 @@
 static bool findParam(uint32_t key, T *param,
         KeyedVector<uint32_t, uint64_t> &params) {
     CHECK(param);
-    if (params.indexOfKey(key) < 0) {
+    ssize_t index = params.indexOfKey(key);
+    if (index < 0) {
         return false;
     }
-    *param = (T) params[key];
+    *param = (T) params[index];
     return true;
 }
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 79e1ab1..e50880a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -120,7 +120,9 @@
 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRMetadataFlags = "android.media.mediacodec.hdr-metadata-flags";
+static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
 
 // Min/Max QP before shaping
 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -805,7 +807,9 @@
       mWidth(0),
       mHeight(0),
       mRotationDegrees(0),
-      mHDRMetadataFlags(0),
+      mConfigColorTransfer(-1),
+      mHDRStaticInfo(false),
+      mHDR10PlusInfo(false),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -813,7 +817,7 @@
       mTunneledInputWidth(0),
       mTunneledInputHeight(0),
       mTunneled(false),
-      mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
+      mTunnelPeekState(TunnelPeekState::kLegacyMode),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
@@ -951,13 +955,71 @@
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
 
-    mediametrics_setInt32(mMetricsHandle, kCodecHDRMetadataFlags, mHDRMetadataFlags);
+    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
+    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
 #endif
 }
 
+void MediaCodec::updateHDRFormatMetric() {
+    int32_t profile = -1;
+    AString mediaType;
+    if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
+            && mOutputFormat->findString("mime", &mediaType)) {
+        hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+        mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+    }
+}
+
+hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
+        const AString &mediaType) {
+    switch (transfer) {
+        case COLOR_TRANSFER_ST2084:
+            if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+                switch (profile) {
+                    case VP9Profile2HDR:
+                        return HDR_FORMAT_HDR10;
+                    case VP9Profile2HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+                switch (profile) {
+                    case AV1ProfileMain10HDR10:
+                        return HDR_FORMAT_HDR10;
+                    case AV1ProfileMain10HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+                switch (profile) {
+                    case HEVCProfileMain10HDR10:
+                        return HDR_FORMAT_HDR10;
+                    case HEVCProfileMain10HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else {
+                return HDR_FORMAT_NONE;
+            }
+        case COLOR_TRANSFER_HLG:
+            if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+                return HDR_FORMAT_HLG;
+            } else {
+                // TODO: DOLBY format
+                return HDR_FORMAT_NONE;
+            }
+        default:
+            return HDR_FORMAT_NONE;
+    }
+}
+
+
 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
     ALOGD("MediaCodec::updateEphemeralMediametrics()");
 
@@ -1025,6 +1087,8 @@
 
 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
     switch(state) {
+        case TunnelPeekState::kLegacyMode:
+            return "LegacyMode";
         case TunnelPeekState::kEnabledNoBuffer:
             return "EnabledNoBuffer";
         case TunnelPeekState::kDisabledNoBuffer:
@@ -1051,6 +1115,9 @@
     TunnelPeekState previousState = mTunnelPeekState;
     if(tunnelPeek == 0){
         switch (mTunnelPeekState) {
+            case TunnelPeekState::kLegacyMode:
+                msg->setInt32("android._tunnel-peek-set-legacy", 0);
+                [[fallthrough]];
             case TunnelPeekState::kEnabledNoBuffer:
                 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
                 break;
@@ -1063,6 +1130,9 @@
         }
     } else {
         switch (mTunnelPeekState) {
+            case TunnelPeekState::kLegacyMode:
+                msg->setInt32("android._tunnel-peek-set-legacy", 0);
+                [[fallthrough]];
             case TunnelPeekState::kDisabledNoBuffer:
                 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
                 break;
@@ -1647,12 +1717,13 @@
             }
             int32_t colorTransfer = -1;
             if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+                mConfigColorTransfer = colorTransfer;
                 mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
             }
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
                     && ColorUtils::isHDRStaticInfoValid(&info)) {
-                mHDRMetadataFlags |= kFlagHDRStaticInfo;
+                mHDRStaticInfo = true;
             }
         }
 
@@ -2551,7 +2622,9 @@
     msg->setObject("c2buffer", obj);
     msg->setInt64("timeUs", presentationTimeUs);
     msg->setInt32("flags", flags);
-    msg->setMessage("tunings", tunings);
+    if (tunings && tunings->countEntries() > 0) {
+        msg->setMessage("tunings", tunings);
+    }
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
@@ -2593,7 +2666,9 @@
     msg->setInt32("skipBlocks", pattern.mSkipBlocks);
     msg->setInt64("timeUs", presentationTimeUs);
     msg->setInt32("flags", flags);
-    msg->setMessage("tunings", tunings);
+    if (tunings && tunings->countEntries() > 0) {
+        msg->setMessage("tunings", tunings);
+    }
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
@@ -3307,6 +3382,8 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
+                    updateHDRFormatMetric();
+
                     // limit to confirming the opt-in behavior to minimize any behavioral change
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
@@ -3388,6 +3465,7 @@
                                 mComponentName.c_str(),
                                 mInputFormat->debugString(4).c_str(),
                                 mOutputFormat->debugString(4).c_str());
+                        updateHDRFormatMetric();
                         CHECK(obj != NULL);
                         response->setObject("input-surface", obj);
                         mHaveInputSurface = true;
@@ -3412,6 +3490,7 @@
                     if (!msg->findInt32("err", &err)) {
                         CHECK(msg->findMessage("input-format", &mInputFormat));
                         CHECK(msg->findMessage("output-format", &mOutputFormat));
+                        updateHDRFormatMetric();
                         mHaveInputSurface = true;
                     } else {
                         response->setInt32("err", err);
@@ -3472,10 +3551,12 @@
                         break;
                     }
                     TunnelPeekState previousState = mTunnelPeekState;
-                    mTunnelPeekState = TunnelPeekState::kBufferRendered;
-                    ALOGV("TunnelPeekState: %s -> %s",
-                          asString(previousState),
-                          asString(TunnelPeekState::kBufferRendered));
+                    if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
+                        mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                        ALOGV("TunnelPeekState: %s -> %s",
+                                asString(previousState),
+                                asString(TunnelPeekState::kBufferRendered));
+                    }
                     updatePlaybackDuration(msg);
                     // check that we have a notification set
                     if (mOnFrameRenderedNotification != NULL) {
@@ -3892,6 +3973,14 @@
                 mTunneled = false;
             }
 
+            // If mTunnelPeekState is still in kLegacyMode at this point,
+            // configure the codec in legacy mode
+            if (mTunneled && (mTunnelPeekState == TunnelPeekState::kLegacyMode)) {
+                sp<AMessage> params = new AMessage;
+                params->setInt32("android._tunnel-peek-set-legacy", 1);
+                onSetParameters(params);
+            }
+
             int32_t background = 0;
             if (format->findInt32("android._background-mode", &background) && background) {
                 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
@@ -4010,10 +4099,12 @@
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
             TunnelPeekState previousState = mTunnelPeekState;
-            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
-            ALOGV("TunnelPeekState: %s -> %s",
-                  asString(previousState),
-                  asString(TunnelPeekState::kEnabledNoBuffer));
+            if (previousState != TunnelPeekState::kLegacyMode) {
+                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(TunnelPeekState::kEnabledNoBuffer));
+            }
 
             mReplyID = replyID;
             setState(STARTING);
@@ -4454,10 +4545,12 @@
             mCodec->signalFlush();
             returnBuffersToCodec();
             TunnelPeekState previousState = mTunnelPeekState;
-            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
-            ALOGV("TunnelPeekState: %s -> %s",
-                  asString(previousState),
-                  asString(TunnelPeekState::kEnabledNoBuffer));
+            if (previousState != TunnelPeekState::kLegacyMode) {
+                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(TunnelPeekState::kEnabledNoBuffer));
+            }
             break;
         }
 
@@ -4588,6 +4681,7 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
+    updateHDRFormatMetric();
     mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4614,7 +4708,7 @@
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
                 if (ColorUtils::isHDRStaticInfoValid(&info)) {
-                    mHDRMetadataFlags |= kFlagHDRStaticInfo;
+                    mHDRStaticInfo = true;
                 }
             }
         }
@@ -4624,7 +4718,7 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
-            mHDRMetadataFlags |= kFlagHDR10PlusInfo;
+            mHDR10PlusInfo = true;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
@@ -4774,12 +4868,10 @@
         sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
             new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
         msg->setObject("c2buffer", obj);
-        msg->setMessage("tunings", new AMessage);
     } else if (memory) {
         sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
             new WrapperObject<sp<hardware::HidlMemory>>{memory}};
         msg->setObject("memory", obj);
-        msg->setMessage("tunings", new AMessage);
     }
 
     return onQueueInputBuffer(msg);
@@ -4959,9 +5051,10 @@
     sp<MediaCodecBuffer> buffer = info->mData;
 
     if (c2Buffer || memory) {
-        sp<AMessage> tunings;
-        CHECK(msg->findMessage("tunings", &tunings));
-        onSetParameters(tunings);
+        sp<AMessage> tunings = NULL;
+        if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
+            onSetParameters(tunings);
+        }
 
         status_t err = OK;
         if (c2Buffer) {
@@ -4993,6 +5086,8 @@
         offset = buffer->offset();
         size = buffer->size();
         if (err != OK) {
+            ALOGI("block model buffer attach failed: err = %s (%d)",
+                  StrMediaError(err).c_str(), err);
             return err;
         }
     }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4b6470a..a443ed9 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -126,14 +126,10 @@
     }
 }
 
-static bool isHdr(const sp<AMessage> &format) {
-    // if CSD specifies HDR transfer(s), we assume HDR. Otherwise, if it specifies non-HDR
-    // transfers, we must assume non-HDR. This is because CSD trumps any color-transfer key
-    // in the format.
-    int32_t isHdr;
-    if (format->findInt32("android._is-hdr", &isHdr)) {
-        return isHdr;
-    }
+/**
+ * Returns true if, and only if, the given format corresponds to HDR10 or HDR10+.
+ */
+static bool isHdr10or10Plus(const sp<AMessage> &format) {
 
     // if user/container supplied HDR static info without transfer set, assume true
     if ((format->contains("hdr-static-info") || format->contains("hdr10-plus-info"))
@@ -143,8 +139,7 @@
     // otherwise, verify that an HDR transfer function is set
     int32_t transfer;
     if (format->findInt32("color-transfer", &transfer)) {
-        return transfer == ColorUtils::kColorTransferST2084
-                || transfer == ColorUtils::kColorTransferHLG;
+        return transfer == ColorUtils::kColorTransferST2084;
     }
     return false;
 }
@@ -419,8 +414,12 @@
     }
 
     // bump to HDR profile
-    if (isHdr(format) && codecProfile == HEVCProfileMain10) {
-        codecProfile = HEVCProfileMain10HDR10;
+    if (isHdr10or10Plus(format) && codecProfile == HEVCProfileMain10) {
+        if (format->contains("hdr10-plus-info")) {
+            codecProfile = HEVCProfileMain10HDR10Plus;
+        } else {
+            codecProfile = HEVCProfileMain10HDR10;
+        }
     }
 
     format->setInt32("profile", codecProfile);
@@ -615,16 +614,25 @@
                         { 3, VP9Profile3 },
                     };
 
-                    const static ALookup<int32_t, int32_t> toHdr {
+                    const static ALookup<int32_t, int32_t> toHdr10 {
                         { VP9Profile2, VP9Profile2HDR },
                         { VP9Profile3, VP9Profile3HDR },
                     };
 
+                    const static ALookup<int32_t, int32_t> toHdr10Plus {
+                        { VP9Profile2, VP9Profile2HDR10Plus },
+                        { VP9Profile3, VP9Profile3HDR10Plus },
+                    };
+
                     int32_t profile;
                     if (profiles.map(data[0], &profile)) {
                         // convert to HDR profile
-                        if (isHdr(format)) {
-                            toHdr.lookup(profile, &profile);
+                        if (isHdr10or10Plus(format)) {
+                            if (format->contains("hdr10-plus-info")) {
+                                toHdr10Plus.lookup(profile, &profile);
+                            } else {
+                                toHdr10.lookup(profile, &profile);
+                            }
                         }
 
                         format->setInt32("profile", profile);
@@ -684,7 +692,7 @@
     int32_t profile;
     if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
         // bump to HDR profile
-        if (isHdr(format) && profile == AV1ProfileMain10) {
+        if (isHdr10or10Plus(format) && profile == AV1ProfileMain10) {
             if (format->contains("hdr10-plus-info")) {
                 profile = AV1ProfileMain10HDR10Plus;
             } else {
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 6004cf8..4b4f65f 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -56,20 +56,161 @@
             || colorFormat == COLOR_Format32bitABGR2101010;
 }
 
-bool ColorConverter::ColorSpace::isBt709() {
-    return (mStandard == ColorUtils::kColorStandardBT709);
-}
-
-bool ColorConverter::ColorSpace::isBt2020() {
+bool ColorConverter::ColorSpace::isBt2020() const {
     return (mStandard == ColorUtils::kColorStandardBT2020);
 }
 
-bool ColorConverter::ColorSpace::isJpeg() {
+bool ColorConverter::ColorSpace::isH420() const {
+    return (mStandard == ColorUtils::kColorStandardBT709)
+            && (mRange == ColorUtils::kColorRangeLimited);
+}
+
+// the matrix coefficients are the same for both 601.625 and 601.525 standards
+bool ColorConverter::ColorSpace::isI420() const {
+    return ((mStandard == ColorUtils::kColorStandardBT601_625)
+            || (mStandard == ColorUtils::kColorStandardBT601_525))
+            && (mRange == ColorUtils::kColorRangeLimited);
+}
+
+bool ColorConverter::ColorSpace::isJ420() const {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
             || (mStandard == ColorUtils::kColorStandardBT601_525))
             && (mRange == ColorUtils::kColorRangeFull);
 }
 
+/**
+ * This class approximates the standard YUV to RGB conversions by factoring the matrix
+ * coefficients to 1/256th-s (as dividing by 256 is easy to do with right shift). The chosen value
+ * of 256 is somewhat arbitrary and was not dependent on the bit-depth, but it does limit the
+ * precision of the matrix coefficients (KR & KB).
+ *
+ * The maximum color error after clipping from using 256 is a distance of:
+ *   0.4 (8-bit) / 1.4 (10-bit) for greens in BT.601
+ *   0.5 (8-bit) / 1.9 (10-bit) for cyans in BT.709, and
+ *   0.3 (8-bit) / 1.3 (10-bit) for violets in BT.2020 (it is 0.4 for 10-bit BT.2020 limited)
+ *
+ * Note for reference: libyuv is using a divisor of 64 instead of 256 to ensure no overflow in
+ * 16-bit math. The maximum color error for libyuv is 3.5 / 14.
+ *
+ * The clamping is done using a lookup vector where negative indices are mapped to 0
+ * and indices > 255 are mapped to 255. (For 10-bit these are clamped to 0 to 1023)
+ *
+ * The matrices are assumed to be of the following format (note the sign on the 2nd row):
+ *
+ * [ R ]     [ _y     0    _r_v ]   [ Y -  C16 ]
+ * [ G ]  =  [ _y  -_g_u  -_g_v ] * [ U - C128 ]
+ * [ B ]     [ _y   _b_u     0  ]   [ V - C128 ]
+ *
+ * C16 is 1 << (bitdepth - 4) for limited range, and 0 for full range
+ * C128 is 1 << (bitdepth - 1)
+ * C255 is (1 << bitdepth) - 1
+ *
+ * The min and max values from these equations determine the clip range needed for clamping:
+ *
+ * min = - (_y * C16 + max((_g_u + _g_v) * (C255-C128), max(_r_v, _b_u) * C128)) / 256
+ * max = (_y * (C255 - C16) + max((_g_u + _g_v) * C128, max(_r_v, _b_u) * (C255-C128)) + 128) / 256
+ */
+
+struct ColorConverter::Coeffs {
+    int32_t _y;
+    int32_t _r_v;
+    int32_t _g_u;
+    int32_t _g_v;
+    int32_t _b_u;
+};
+
+/*
+
+Color conversion rules are dictated by ISO (e.g. ISO:IEC 23008:2)
+
+Limited range means Y is in [16, 235], U and V are in [16, 224] corresponding to [-0.5 to 0.5].
+
+Full range means Y is in [0, 255], U and V are in [0.5, 255.5] corresponding to [-0.5 to .5].
+
+RGB is always in full range ([0, 255])
+
+The color primaries determine the KR and KB values:
+
+
+For full range (assuming 8-bits) ISO defines:
+
+(   Y   )   (  KR      1-KR-KB       KB  )
+(       )   (                            )   (R)
+(       )   (-KR/2   -(1-KR-KB)/2        )   ( )
+(U - 128) = (-----   ------------    0.5 ) * (G)
+(       )   ((1-KB)     (1-KB)           )   ( )
+(       )   (                            )   (B)
+(       )   (        -(1-KR-KB)/2  -KB/2 )
+(V - 128)   ( 0.5    ------------  ----- )
+            (           (1-KR)     (1-KR))
+
+(the math is rounded, 128 is (1 << (bitdepth - 1)) )
+
+From this
+
+(R)      ( 1       0        2*(1-KR)   )   (   Y   )
+( )      (                             )   (       )
+( )      (    2*KB*(KB-1)  2*KR*(KR-1) )   (       )
+(G)  =   ( 1  -----------  ----------- ) * (U - 128)
+( )      (      1-KR-KB      1-KR-KB   )   (       )
+( )      (                             )   (       )
+(B)      ( 1   2*(1-KB)         0      )   (V - 128)
+
+For limited range, this becomes
+
+(R)      ( 1       0        2*(1-KR)   )   (255/219  0  0)   (Y -  16)
+( )      (                             )   (             )   (       )
+( )      (    2*KB*(KB-1)  2*KR*(KR-1) )   (             )   (       )
+(G)  =   ( 1  -----------  ----------- ) * (0  255/224  0) * (U - 128)
+( )      (      1-KR-KB      1-KR-KB   )   (             )   (       )
+( )      (                             )   (             )   (       )
+(B)      ( 1   2*(1-KB)         0      )   (0  0  255/224)   (V - 128)
+
+( For non-8-bit, 16 is (1 << (bitdepth - 4)), 128 is (1 << (bitdepth - 1)),
+  255 is ((1 << bitdepth) - 1), 219 is (219 << (bitdepth - 8)) and
+  224 is (224 << (bitdepth - 8)), so the matrix coefficients slightly change. )
+
+*/
+
+namespace {
+
+/**
+ * BT.601:  K_R = 0.299;  K_B = 0.114
+ *
+ * clip range 8-bit: [-277, 535], 10-bit: [-1111, 2155]
+ */
+const struct ColorConverter::Coeffs BT601_FULL      = { 256, 359,  88, 183, 454 };
+const struct ColorConverter::Coeffs BT601_LIMITED   = { 298, 409, 100, 208, 516 };
+const struct ColorConverter::Coeffs BT601_LTD_10BIT = { 299, 410, 101, 209, 518 };
+
+/**
+ * BT.709:  K_R = 0.2126; K_B = 0.0722
+ *
+ * clip range 8-bit: [-289, 547], 10-bit: [-1159, 2202]
+ */
+const struct ColorConverter::Coeffs BT709_FULL      = { 256, 403,  48, 120, 475 };
+const struct ColorConverter::Coeffs BT709_LIMITED   = { 298, 459,  55, 136, 541 };
+const struct ColorConverter::Coeffs BT709_LTD_10BIT = { 290, 460,  55, 137, 542 };
+
+/**
+ * BT.2020:  K_R = 0.2627; K_B = 0.0593
+ *
+ * clip range 8-bit: [-294, 552], 10-bit: [-1175, 2218]
+ *
+ * This is the largest clip range.
+ */
+const struct ColorConverter::Coeffs BT2020_FULL      = { 256, 377,  42, 146, 482 };
+const struct ColorConverter::Coeffs BT2020_LIMITED   = { 298, 430,  48, 167, 548 };
+const struct ColorConverter::Coeffs BT2020_LTD_10BIT = { 299, 431,  48, 167, 550 };
+
+constexpr int CLIP_RANGE_MIN_8BIT = -294;
+constexpr int CLIP_RANGE_MAX_8BIT = 552;
+
+constexpr int CLIP_RANGE_MIN_10BIT = -1175;
+constexpr int CLIP_RANGE_MAX_10BIT = 2218;
+
+}
+
 ColorConverter::ColorConverter(
         OMX_COLOR_FORMATTYPE from, OMX_COLOR_FORMATTYPE to)
     : mSrcFormat(from),
@@ -106,7 +247,8 @@
         case OMX_COLOR_FormatYUV420SemiPlanar:
 #ifdef USE_LIBYUV
             return mDstFormat == OMX_COLOR_Format16bitRGB565
-                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888;
+                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+                    || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
 #else
             return mDstFormat == OMX_COLOR_Format16bitRGB565;
 #endif
@@ -290,10 +432,53 @@
     return err;
 }
 
+const struct ColorConverter::Coeffs *ColorConverter::getMatrix() const {
+    const bool isFullRange = mSrcColorSpace.mRange == ColorUtils::kColorRangeFull;
+    const bool is10Bit = (mSrcFormat == COLOR_FormatYUVP010
+            || mSrcFormat == OMX_COLOR_FormatYUV420Planar16);
+
+    switch (mSrcColorSpace.mStandard) {
+    case ColorUtils::kColorStandardBT601_525:
+    case ColorUtils::kColorStandardBT601_625:
+        return (isFullRange ? &BT601_FULL :
+                is10Bit ? &BT601_LTD_10BIT : &BT601_LIMITED);
+
+    case ColorUtils::kColorStandardBT709:
+        return (isFullRange ? &BT709_FULL :
+                is10Bit ? &BT709_LTD_10BIT : &BT709_LIMITED);
+
+    case ColorUtils::kColorStandardBT2020:
+        return (isFullRange ? &BT2020_FULL :
+                is10Bit ? &BT2020_LTD_10BIT : &BT2020_LIMITED);
+
+    default:
+        // for now use the default matrices for unhandled color spaces
+        // TODO: fail?
+        // return nullptr;
+        [[fallthrough]];
+
+    case ColorUtils::kColorStandardUnspecified:
+        return is10Bit ? &BT2020_LTD_10BIT : &BT601_LIMITED;
+
+    }
+}
+
 status_t ColorConverter::convertCbYCrY(
         const BitmapParams &src, const BitmapParams &dst) {
     // XXX Untested
 
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+
     uint8_t *kAdjustedClip = initClip();
 
     uint16_t *dst_ptr = (uint16_t *)dst.mBits
@@ -304,22 +489,22 @@
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_ptr[2 * x + 1] - 16;
-            signed y2 = (signed)src_ptr[2 * x + 3] - 16;
+            signed y1 = (signed)src_ptr[2 * x + 1] - _c16;
+            signed y2 = (signed)src_ptr[2 * x + 3] - _c16;
             signed u = (signed)src_ptr[2 * x] - 128;
             signed v = (signed)src_ptr[2 * x + 2] - 128;
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * 298;
+            signed tmp1 = y1 * _y + 128;
             signed b1 = (tmp1 + u_b) / 256;
             signed g1 = (tmp1 + v_g + u_g) / 256;
             signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * 298;
+            signed tmp2 = y2 * _y + 128;
             signed b2 = (tmp2 + u_b) / 256;
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
@@ -348,15 +533,32 @@
     return OK;
 }
 
+/*
+    libyuv supports the following color spaces:
+
+    I420: BT.601 limited range
+    J420: BT.601 full range (jpeg)
+    H420: BT.709 limited range
+
+*/
+
 #define DECLARE_YUV2RGBFUNC(func, rgb) int (*func)(     \
-        const uint8_t*, int, const uint8_t*, int,           \
-        const uint8_t*, int, uint8_t*, int, int, int)       \
-        = mSrcColorSpace.isBt709() ? libyuv::H420To##rgb \
-        : mSrcColorSpace.isJpeg() ? libyuv::J420To##rgb  \
+        const uint8_t*, int, const uint8_t*, int,       \
+        const uint8_t*, int, uint8_t*, int, int, int)   \
+        = mSrcColorSpace.isH420() ? libyuv::H420To##rgb \
+        : mSrcColorSpace.isJ420() ? libyuv::J420To##rgb \
         : libyuv::I420To##rgb
 
 status_t ColorConverter::convertYUV420PlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
+    // Fall back to our conversion if libyuv does not support the color space.
+    // I420 (BT.601 limited) is default, so don't fall back if we end up using it anyway.
+    if (!mSrcColorSpace.isH420() && !mSrcColorSpace.isJ420()
+            // && !mSrcColorSpace.isI420() /* same as line below */
+            && getMatrix() != &BT601_LIMITED) {
+        return convertYUV420Planar(src, dst);
+    }
+
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
@@ -404,6 +606,13 @@
 
 status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
+    // Fall back to our conversion if libyuv does not support the color space.
+    // libyuv only supports BT.601 limited range NV12. Don't fall back if we end up using it anyway.
+    if (// !mSrcColorSpace.isI420() && /* same as below */
+        getMatrix() != &BT601_LIMITED) {
+        return convertYUV420SemiPlanar(src, dst);
+    }
+
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
@@ -444,16 +653,16 @@
     case OMX_COLOR_FormatYUV420Planar:
         return [](void *src_y, void *src_u, void *src_v, size_t x,
                   signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = ((uint8_t*)src_y)[x] - 16;
-            *y2 = ((uint8_t*)src_y)[x + 1] - 16;
+            *y1 = ((uint8_t*)src_y)[x];
+            *y2 = ((uint8_t*)src_y)[x + 1];
             *u = ((uint8_t*)src_u)[x / 2] - 128;
             *v = ((uint8_t*)src_v)[x / 2] - 128;
         };
     case OMX_COLOR_FormatYUV420Planar16:
         return [](void *src_y, void *src_u, void *src_v, size_t x,
                 signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = (signed)(((uint16_t*)src_y)[x] >> 2) - 16;
-            *y2 = (signed)(((uint16_t*)src_y)[x + 1] >> 2) - 16;
+            *y1 = (signed)(((uint16_t*)src_y)[x] >> 2);
+            *y2 = (signed)(((uint16_t*)src_y)[x + 1] >> 2);
             *u = (signed)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
             *v = (signed)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
         };
@@ -463,6 +672,8 @@
     return nullptr;
 }
 
+// TRICKY: this method only supports RGBA_1010102 output for 10-bit sources, and all other outputs
+// for 8-bit sources as the type of kAdjustedClip is hardcoded based on output, not input.
 std::function<void (void *, bool, signed, signed, signed, signed, signed, signed)>
 getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, void *kAdjustedClip) {
     switch ((int)dstFormat) {
@@ -557,6 +768,18 @@
 
 status_t ColorConverter::convertYUV420Planar(
         const BitmapParams &src, const BitmapParams &dst) {
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+
     uint8_t *kAdjustedClip = initClip();
 
     auto readFromSrc = getReadFromSrc(mSrcFormat);
@@ -575,38 +798,20 @@
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            // B = 1.164 * (Y - 16) + 2.018 * (U - 128)
-            // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
-            // R = 1.164 * (Y - 16) + 1.596 * (V - 128)
-
-            // B = 298/256 * (Y - 16) + 517/256 * (U - 128)
-            // G = .................. - 208/256 * (V - 128) - 100/256 * (U - 128)
-            // R = .................. + 409/256 * (V - 128)
-
-            // min_B = (298 * (- 16) + 517 * (- 128)) / 256 = -277
-            // min_G = (298 * (- 16) - 208 * (255 - 128) - 100 * (255 - 128)) / 256 = -172
-            // min_R = (298 * (- 16) + 409 * (- 128)) / 256 = -223
-
-            // max_B = (298 * (255 - 16) + 517 * (255 - 128)) / 256 = 534
-            // max_G = (298 * (255 - 16) - 208 * (- 128) - 100 * (- 128)) / 256 = 432
-            // max_R = (298 * (255 - 16) + 409 * (255 - 128)) / 256 = 481
-
-            // clip range -278 .. 535
-
             signed y1, y2, u, v;
             readFromSrc(src_y, src_u, src_v, x, &y1, &y2, &u, &v);
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * 298;
+            signed tmp1 = (y1 - _c16) * _y + 128;
             signed b1 = (tmp1 + u_b) / 256;
             signed g1 = (tmp1 + v_g + u_g) / 256;
             signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * 298;
+            signed tmp2 = (y2 - _c16) * _y + 128;
             signed b2 = (tmp2 + u_b) / 256;
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
@@ -648,6 +853,18 @@
 
 status_t ColorConverter::convertYUVP010ToRGBA1010102(
         const BitmapParams &src, const BitmapParams &dst) {
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 64 : 0;
+
     uint16_t *kAdjustedClip10bit = initClip10Bit();
 
 //    auto readFromSrc = getReadFromSrc(mSrcFormat);
@@ -663,72 +880,28 @@
             + src.mStride * src.mHeight
             + (src.mCropTop / 2) * src.mStride + src.mCropLeft * src.mBpp);
 
-    // BT.2020 Limited Range conversion
-
-    // B = 1.168  *(Y - 64) + 2.148  *(U - 512)
-    // G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
-    // R = 1.168  *(Y - 64) + 1.683  *(V - 512)
-
-    // B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
-    // G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
-    // R = .................... + 1723/1024  *(V - 512)
-
-    // min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
-    // min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
-    // min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
-
-    // max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
-    // max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
-    // max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
-
-    // clip range -1175 .. 2218
-
-    // BT.709 Limited Range conversion
-
-    // B = 1.164 * (Y - 64) + 2.018 * (U - 512)
-    // G = 1.164 * (Y - 64) - 0.813 * (V - 512) - 0.391 * (U - 512)
-    // R = 1.164 * (Y - 64) + 1.596 * (V - 512)
-
-    // B = 1192/1024 * (Y - 64) + 2068/1024 * (U - 512)
-    // G = .................... -  832/1024 * (V - 512) - 400/1024 * (U - 512)
-    // R = .................... + 1636/1024 * (V - 512)
-
-    // min_B = (1192 * (- 64) + 2068 * (- 512)) / 1024 = -1108
-
-    // max_B = (1192 * (1023 - 64) + 517 * (1023 - 512)) / 1024 = 2148
-
-    // clip range -1108 .. 2148
-
-    signed mY = 1196, mU_B = 2200, mV_G = -668, mV_R = 1723, mU_G = -192;
-    if (!mSrcColorSpace.isBt2020()) {
-        mY = 1192;
-        mU_B = 2068;
-        mV_G = -832;
-        mV_R = 1636;
-        mU_G = -400;
-    }
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
             signed y1, y2, u, v;
-            y1 = (src_y[x] >> 6) - 64;
-            y2 = (src_y[x + 1] >> 6) - 64;
+            y1 = (src_y[x] >> 6) - _c16;
+            y2 = (src_y[x + 1] >> 6) - _c16;
             u = int(src_uv[x] >> 6) - 512;
             v = int(src_uv[x + 1] >> 6) - 512;
 
-            signed u_b = u * mU_B;
-            signed u_g = u * mU_G;
-            signed v_g = v * mV_G;
-            signed v_r = v * mV_R;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * mY;
-            signed b1 = (tmp1 + u_b) / 1024;
-            signed g1 = (tmp1 + v_g + u_g) / 1024;
-            signed r1 = (tmp1 + v_r) / 1024;
+            signed tmp1 = y1 * _y + 128;
+            signed b1 = (tmp1 + u_b) / 256;
+            signed g1 = (tmp1 + v_g + u_g) / 256;
+            signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * mY;
-            signed b2 = (tmp2 + u_b) / 1024;
-            signed g2 = (tmp2 + v_g + u_g) / 1024;
-            signed r2 = (tmp2 + v_r) / 1024;
+            signed tmp2 = y2 * _y + 128;
+            signed b2 = (tmp2 + u_b) / 256;
+            signed g2 = (tmp2 + v_g + u_g) / 256;
+            signed r2 = (tmp2 + v_r) / 256;
 
             bool uncropped = x + 1 < src.cropWidth();
 
@@ -949,11 +1122,6 @@
 
 status_t ColorConverter::convertQCOMYUV420SemiPlanar(
         const BitmapParams &src, const BitmapParams &dst) {
-    uint8_t *kAdjustedClip = initClip();
-
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
-
     const uint8_t *src_y =
         (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
@@ -961,67 +1129,25 @@
         (const uint8_t *)src_y + src.mWidth * src.mHeight
         + src.mCropTop * src.mWidth + src.mCropLeft;
 
-    for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - 16;
-            signed y2 = (signed)src_y[x + 1] - 16;
+    /* QCOMYUV420SemiPlanar is NV21, while MediaCodec uses NV12 */
+    return convertYUV420SemiPlanarBase(
+            src, dst, src_y, src_u, src.mWidth /* row_inc */, true /* isNV21 */);
+}
 
-            signed u = (signed)src_u[x & ~1] - 128;
-            signed v = (signed)src_u[(x & ~1) + 1] - 128;
+status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
+        const BitmapParams &src, const BitmapParams &dst) {
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+    const uint8_t *src_u =
+        (const uint8_t *)src_y + src.mWidth * (src.mHeight - src.mCropTop / 2);
 
-            signed tmp1 = y1 * 298;
-            signed b1 = (tmp1 + u_b) / 256;
-            signed g1 = (tmp1 + v_g + u_g) / 256;
-            signed r1 = (tmp1 + v_r) / 256;
-
-            signed tmp2 = y2 * 298;
-            signed b2 = (tmp2 + u_b) / 256;
-            signed g2 = (tmp2 + v_g + u_g) / 256;
-            signed r2 = (tmp2 + v_r) / 256;
-
-            uint32_t rgb1 =
-                ((kAdjustedClip[b1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[r1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[b2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[r2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
-        }
-
-        src_y += src.mWidth;
-
-        if (y & 1) {
-            src_u += src.mWidth;
-        }
-
-        dst_ptr += dst.mWidth;
-    }
-
-    return OK;
+    return convertYUV420SemiPlanarBase(
+            src, dst, src_y, src_u, src.mWidth /* row_inc */);
 }
 
 status_t ColorConverter::convertYUV420SemiPlanar(
         const BitmapParams &src, const BitmapParams &dst) {
-    // XXX Untested
-
-    uint8_t *kAdjustedClip = initClip();
-
-    uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
-            dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
-
     const uint8_t *src_y =
         (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
 
@@ -1029,90 +1155,49 @@
         (const uint8_t *)src.mBits + src.mHeight * src.mStride +
         (src.mCropTop / 2) * src.mStride + src.mCropLeft;
 
-    for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - 16;
-            signed y2 = (signed)src_y[x + 1] - 16;
-
-            signed v = (signed)src_u[x & ~1] - 128;
-            signed u = (signed)src_u[(x & ~1) + 1] - 128;
-
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
-
-            signed tmp1 = y1 * 298;
-            signed b1 = (tmp1 + u_b) / 256;
-            signed g1 = (tmp1 + v_g + u_g) / 256;
-            signed r1 = (tmp1 + v_r) / 256;
-
-            signed tmp2 = y2 * 298;
-            signed b2 = (tmp2 + u_b) / 256;
-            signed g2 = (tmp2 + v_g + u_g) / 256;
-            signed r2 = (tmp2 + v_r) / 256;
-
-            uint32_t rgb1 =
-                ((kAdjustedClip[b1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[r1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[b2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[r2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
-        }
-
-        src_y += src.mStride;
-
-        if (y & 1) {
-            src_u += src.mStride;
-        }
-
-        dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
-    }
-
-    return OK;
+    return convertYUV420SemiPlanarBase(
+            src, dst, src_y, src_u, src.mStride /* row_inc */);
 }
 
-status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
+status_t ColorConverter::convertYUV420SemiPlanarBase(
+        const BitmapParams &src, const BitmapParams &dst,
+        const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21) {
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+
     uint8_t *kAdjustedClip = initClip();
 
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
-
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
-
-    const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * (src.mHeight - src.mCropTop / 2);
+    uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
+            dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - 16;
-            signed y2 = (signed)src_y[x + 1] - 16;
+            signed y1 = (signed)src_y[x] - _c16;
+            signed y2 = (signed)src_y[x + 1] - _c16;
 
-            signed u = (signed)src_u[x & ~1] - 128;
-            signed v = (signed)src_u[(x & ~1) + 1] - 128;
+            signed u = (signed)src_u[(x & ~1) + isNV21] - 128;
+            signed v = (signed)src_u[(x & ~1) + !isNV21] - 128;
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * 298;
+            signed tmp1 = y1 * _y + 128;
             signed b1 = (tmp1 + u_b) / 256;
             signed g1 = (tmp1 + v_g + u_g) / 256;
             signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * 298;
+            signed tmp2 = y2 * _y + 128;
             signed b2 = (tmp2 + u_b) / 256;
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
@@ -1134,46 +1219,40 @@
             }
         }
 
-        src_y += src.mWidth;
+        src_y += row_inc;
 
         if (y & 1) {
-            src_u += src.mWidth;
+            src_u += row_inc;
         }
 
-        dst_ptr += dst.mWidth;
+        dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
     }
 
     return OK;
 }
 
 uint8_t *ColorConverter::initClip() {
-    static const signed kClipMin = -278;
-    static const signed kClipMax = 535;
-
     if (mClip == NULL) {
-        mClip = new uint8_t[kClipMax - kClipMin + 1];
+        mClip = new uint8_t[CLIP_RANGE_MAX_8BIT - CLIP_RANGE_MIN_8BIT + 1];
 
-        for (signed i = kClipMin; i <= kClipMax; ++i) {
-            mClip[i - kClipMin] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
+        for (signed i = CLIP_RANGE_MIN_8BIT; i <= CLIP_RANGE_MAX_8BIT; ++i) {
+            mClip[i - CLIP_RANGE_MIN_8BIT] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
         }
     }
 
-    return &mClip[-kClipMin];
+    return &mClip[-CLIP_RANGE_MIN_8BIT];
 }
 
 uint16_t *ColorConverter::initClip10Bit() {
-    static const signed kClipMin = -1176;
-    static const signed kClipMax = 2219;
-
     if (mClip10Bit == NULL) {
-        mClip10Bit = new uint16_t[kClipMax - kClipMin + 1];
+        mClip10Bit = new uint16_t[CLIP_RANGE_MAX_10BIT - CLIP_RANGE_MIN_10BIT + 1];
 
-        for (signed i = kClipMin; i <= kClipMax; ++i) {
-            mClip10Bit[i - kClipMin] = (i < 0) ? 0 : (i > 1023) ? 1023 : (uint16_t)i;
+        for (signed i = CLIP_RANGE_MIN_10BIT; i <= CLIP_RANGE_MAX_10BIT; ++i) {
+            mClip10Bit[i - CLIP_RANGE_MIN_10BIT] = (i < 0) ? 0 : (i > 1023) ? 1023 : (uint16_t)i;
         }
     }
 
-    return &mClip10Bit[-kClipMin];
+    return &mClip10Bit[-CLIP_RANGE_MIN_10BIT];
 }
 
 }  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 632b32c..5a21755 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -522,6 +522,7 @@
     status_t setLatency(uint32_t latency);
     status_t getLatency(uint32_t *latency);
     status_t setTunnelPeek(int32_t tunnelPeek);
+    status_t setTunnelPeekLegacy(int32_t isLegacy);
     status_t setAudioPresentation(int32_t presentationId, int32_t programId);
     status_t setOperatingRate(float rateFloat, bool isVideo);
     status_t getIntraRefreshPeriod(uint32_t *intraRefreshPeriod);
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 1d86a22..7a05f00 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -47,15 +47,20 @@
             size_t dstCropLeft, size_t dstCropTop,
             size_t dstCropRight, size_t dstCropBottom);
 
+    struct Coeffs; // matrix coefficients
+
 private:
     struct ColorSpace {
         uint32_t mStandard;
         uint32_t mRange;
         uint32_t mTransfer;
 
-        bool isBt709();
-        bool isBt2020();
-        bool isJpeg();
+        bool isBt2020() const;
+
+        // libyuv helper methods
+        bool isH420() const;
+        bool isI420() const;
+        bool isJ420() const;
     };
 
     struct BitmapParams {
@@ -84,6 +89,9 @@
     uint8_t *initClip();
     uint16_t *initClip10Bit();
 
+    // returns the YUV2RGB matrix coefficients according to the color aspects and bit depth
+    const struct Coeffs *getMatrix() const;
+
     status_t convertCbYCrY(
             const BitmapParams &src, const BitmapParams &dst);
 
@@ -111,6 +119,10 @@
     status_t convertYUV420SemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    status_t convertYUV420SemiPlanarBase(
+            const BitmapParams &src, const BitmapParams &dst,
+            const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21 = false);
+
     status_t convertTIYUV420PackedSemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f5af50d..1d2d711 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -25,6 +25,7 @@
 #include <media/hardware/CryptoAPI.h>
 #include <media/MediaCodecInfo.h>
 #include <media/MediaMetrics.h>
+#include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <utils/Vector.h>
@@ -397,6 +398,7 @@
     // <all states>     -> EnabledNoBuffer  when flush
     // <all states>     -> EnabledNoBuffer  when stop then configure then start
     enum struct TunnelPeekState {
+        kLegacyMode,
         kDisabledNoBuffer,
         kEnabledNoBuffer,
         kDisabledQueued,
@@ -451,11 +453,12 @@
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
-    uint32_t mHDRMetadataFlags; /* bitmask of kFlagHDR* */
-    enum {
-        kFlagHDRStaticInfo = 1 << 0,
-        kFlagHDR10PlusInfo = 1 << 1,
-    };
+    int32_t mConfigColorTransfer;
+    bool mHDRStaticInfo;
+    bool mHDR10PlusInfo;
+    void updateHDRFormatMetric();
+    hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
+            const AString &mediaType);
 
     // initial create parameters
     AString mInitName;
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
index 7e33f09..4827d9e 100644
--- a/media/libstagefright/omx/OMXStore.cpp
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -28,6 +28,8 @@
 #include <dlfcn.h>
 #include <fcntl.h>
 
+#include <sstream>
+
 namespace android {
 
 OMXStore::OMXStore() {
@@ -108,9 +110,26 @@
     return android::base::GetIntProperty("ro.product.first_api_level", __ANDROID_API_T__);
 }
 
+static bool isTV() {
+    static const bool kIsTv = []() {
+        std::string characteristics = android::base::GetProperty("ro.build.characteristics", "");
+        std::stringstream ss(characteristics);
+        for (std::string item; std::getline(ss, item, ','); ) {
+            if (item == "tv") {
+                return true;
+            }
+        }
+        return false;
+    }();
+    return kIsTv;
+}
+
 void OMXStore::addPlugin(OMXPluginBase *plugin) {
     Mutex::Autolock autoLock(mLock);
 
+    bool typeTV = isTV();
+    int firstApiLevel = getFirstApiLevel();
+
     OMX_U32 index = 0;
 
     char name[128];
@@ -125,13 +144,16 @@
             bool skip = false;
             for (String8 role : roles) {
                 if (role.find("video_decoder") != -1 || role.find("video_encoder") != -1) {
-                    if (getFirstApiLevel() >= __ANDROID_API_S__) {
+                    if (firstApiLevel >= __ANDROID_API_T__) {
+                        skip = true;
+                        break;
+                    } else if (!typeTV && firstApiLevel >= __ANDROID_API_S__) {
                         skip = true;
                         break;
                     }
                 }
                 if (role.find("audio_decoder") != -1 || role.find("audio_encoder") != -1) {
-                    if (getFirstApiLevel() >= __ANDROID_API_T__) {
+                    if (firstApiLevel >= __ANDROID_API_T__) {
                         skip = true;
                         break;
                     }
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 7d72510..a61f48f 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "ARTPConnection"
 #include <utils/Log.h>
 
+#include <media/stagefright/rtsp/ARTPAssembler.h>
 #include <media/stagefright/rtsp/ARTPConnection.h>
 #include <media/stagefright/rtsp/ARTPSource.h>
 #include <media/stagefright/rtsp/ASessionDescription.h>
@@ -41,6 +42,10 @@
     return data[0] << 8 | data[1];
 }
 
+static uint32_t u24at(const uint8_t *data) {
+    return u16at(data) << 16 | data[2];
+}
+
 static uint32_t u32at(const uint8_t *data) {
     return u16at(data) << 16 | u16at(&data[2]);
 }
@@ -877,11 +882,15 @@
         switch (data[1]) {
             case 200:
             {
-                parseSR(s, data, headerLength);
+                parseSenderReport(s, data, headerLength);
                 break;
             }
 
             case 201:  // RR
+            {
+                parseReceiverReport(s, data, headerLength);
+                break;
+            }
             case 202:  // SDES
             case 204:  // APP
                 break;
@@ -940,18 +949,44 @@
     return OK;
 }
 
-status_t ARTPConnection::parseSR(
+status_t ARTPConnection::parseSenderReport(
         StreamInfo *s, const uint8_t *data, size_t size) {
-    size_t RC = data[0] & 0x1f;
-
-    if (size < (7 + RC * 6) * 4) {
-        // Packet too short for the minimal SR header.
+    ALOG_ASSERT(size >= 1, "parseSenderReport: invalid packet size.");
+    size_t receptionReportCount = data[0] & 0x1f;
+    if (size < (7 + (receptionReportCount * 6)) * 4) {
+        // Packet too short for the minimal sender report header.
         return -1;
     }
 
-    uint32_t id = u32at(&data[4]);
+    int64_t recvTimeUs = ALooper::GetNowUs();
+    uint32_t senderId = u32at(&data[4]);
     uint64_t ntpTime = u64at(&data[8]);
     uint32_t rtpTime = u32at(&data[16]);
+    uint32_t pktCount = u32at(&data[20]);
+    uint32_t octCount = u32at(&data[24]);
+
+    ALOGD("SR received: ssrc=0x%08x, rtpTime%u == ntpTime %llu, pkt=%u, oct=%u",
+            senderId, rtpTime, (unsigned long long)ntpTime, pktCount, octCount);
+
+    sp<ARTPSource> source = findSource(s, senderId);
+    source->timeUpdate(recvTimeUs, rtpTime, ntpTime);
+
+    for (int32_t i = 0; i < receptionReportCount; i++) {
+        int32_t offset = 28 + (i * 24);
+        parseReceptionReportBlock(s, recvTimeUs, senderId, data + offset, size - offset);
+    }
+
+    return 0;
+}
+
+status_t ARTPConnection::parseReceiverReport(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    ALOG_ASSERT(size >= 1, "parseReceiverReport: invalid packet size.");
+    size_t receptionReportCount = data[0] & 0x1f;
+    if (size < (2 + (receptionReportCount * 6)) * 4) {
+        // Packet too short for the minimal receiver report header.
+        return -1;
+    }
 
 #if 0
     ALOGI("XXX timeUpdate: ssrc=0x%08x, rtpTime %u == ntpTime %.3f",
@@ -959,10 +994,40 @@
          rtpTime,
          (ntpTime >> 32) + (double)(ntpTime & 0xffffffff) / (1ll << 32));
 #endif
+    int64_t recvTimeUs = ALooper::GetNowUs();
+    uint32_t senderId = u32at(&data[4]);
 
-    sp<ARTPSource> source = findSource(s, id);
+    for (int i = 0; i < receptionReportCount; i++) {
+        int32_t offset = 8 + (i * 24);
+        parseReceptionReportBlock(s, recvTimeUs, senderId, data + offset, size - offset);
+    }
 
-    source->timeUpdate(rtpTime, ntpTime);
+    return 0;
+}
+
+status_t ARTPConnection::parseReceptionReportBlock(
+        StreamInfo *s, int64_t recvTimeUs, uint32_t senderId, const uint8_t *data, size_t size) {
+    ALOG_ASSERT(size >= 24, "parseReceptionReportBlock: invalid packet size.");
+    if (size < 24) {
+        // remaining size is smaller than reception report block size.
+        return -1;
+    }
+
+    uint32_t rbId = u32at(&data[0]);
+    uint32_t fLost = data[4];
+    int32_t cumLost = u24at(&data[5]);
+    uint32_t ehSeq = u32at(&data[8]);
+    uint32_t jitter = u32at(&data[12]);
+    uint32_t lsr = u32at(&data[16]);
+    uint32_t dlsr = u32at(&data[20]);
+
+    ALOGD("Reception Report Block: t:%llu sid:%u rid:%u fl:%u cl:%u hs:%u jt:%u lsr:%u dlsr:%u",
+            (unsigned long long)recvTimeUs, senderId, rbId, fLost, cumLost,
+            ehSeq, jitter, lsr, dlsr);
+    sp<ARTPSource> source = findSource(s, senderId);
+    sp<ReceptionReportBlock> rrb = new ReceptionReportBlock(
+            rbId, fLost, cumLost, ehSeq, jitter, lsr, dlsr);
+    source->processReceptionReportBlock(recvTimeUs, senderId, rrb);
 
     return 0;
 }
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 5f62b9d..717d8af 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -132,10 +132,10 @@
     }
 }
 
-void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
+void ARTPSource::timeUpdate(int64_t recvTimeUs, uint32_t rtpTime, uint64_t ntpTime) {
     mLastSrRtpTime = rtpTime;
     mLastSrNtpTime = ntpTime;
-    mLastSrUpdateTimeUs = ALooper::GetNowUs();
+    mLastSrUpdateTimeUs = recvTimeUs;
 
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("time-update", true);
@@ -143,7 +143,30 @@
     notify->setInt64("ntp-time", ntpTime);
     notify->setInt32("rtcp-event", 1);
     notify->setInt32("payload-type", RTCP_SR);
-    notify->setInt64("recv-time-us", mLastSrUpdateTimeUs);
+    notify->setInt64("recv-time-us", recvTimeUs);
+    notify->post();
+}
+
+void ARTPSource::processReceptionReportBlock(
+        int64_t recvTimeUs, uint32_t senderId, sp<ReceptionReportBlock> rrb) {
+    mLastRrUpdateTimeUs = recvTimeUs;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("rtcp-event", 1);
+    // A Reception Report Block (RRB) can be included in both Sender Report and Receiver Report.
+    // But it means 'Packet Reception Report' actually.
+    // So that, we will report RRB as RR since there is no meaning difference
+    // between RRB(Reception Report Block) and RR(Receiver Report).
+    notify->setInt32("payload-type", RTCP_RR);
+    notify->setInt64("recv-time-us", recvTimeUs);
+    notify->setInt32("rtcp-rr-ssrc", senderId);
+    notify->setInt32("rtcp-rrb-ssrc", rrb->ssrc);
+    notify->setInt32("rtcp-rrb-fraction", rrb->fraction);
+    notify->setInt32("rtcp-rrb-lost", rrb->lost);
+    notify->setInt32("rtcp-rrb-lastSeq", rrb->lastSeq);
+    notify->setInt32("rtcp-rrb-jitter", rrb->jitter);
+    notify->setInt32("rtcp-rrb-lsr", rrb->lsr);
+    notify->setInt32("rtcp-rrb-dlsr", rrb->dlsr);
     notify->post();
 }
 
@@ -453,7 +476,8 @@
     data[18] = (mHighestSeqNumber >> 8) & 0xff;
     data[19] = mHighestSeqNumber & 0xff;
 
-    uint32_t jitterTime = 0;
+    uint32_t jitterTimeMs = (uint32_t)getInterArrivalJitterTimeMs();
+    uint32_t jitterTime = jitterTimeMs * mClockRate / 1000;
     data[20] = jitterTime >> 24;    // Interarrival jitter
     data[21] = (jitterTime >> 16) & 0xff;
     data[22] = (jitterTime >> 8) & 0xff;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index f959c40..39161b6 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -105,6 +105,27 @@
             (long long)rtp, (long long)play, (long long)exp, isExp);
 }
 
+struct ReceptionReportBlock : public RefBase {
+    uint32_t ssrc;       // ssrc of data source being reported
+    uint32_t fraction;   // fraction lost since last SR/RR
+    int32_t lost;        // cumul. no. pkts lost (signed!)
+    uint32_t lastSeq;    // extended last seq. no. received
+    uint32_t jitter;     // interarrival jitter
+    uint32_t lsr;        // last SR packet from this source
+    uint32_t dlsr;       // delay since last SR packet
+
+    ReceptionReportBlock(uint32_t ssrc, uint32_t fraction, int32_t lost, uint32_t lastSeq,
+            uint32_t jitter, uint32_t lsr, uint32_t dlsr) {
+        this->ssrc = ssrc;
+        this->fraction = fraction;
+        this->lost = lost;
+        this->lastSeq = lastSeq;
+        this->jitter = jitter;
+        this->lsr = lsr;
+        this->dlsr = dlsr;
+    }
+};
+
 }  // namespace android
 
 #endif  // A_RTP_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 36cca31..73d2866 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -110,7 +110,10 @@
     status_t parseRTP(StreamInfo *info, const sp<ABuffer> &buffer);
     status_t parseRTPExt(StreamInfo *s, const uint8_t *extData, size_t extLen, int32_t *cvoDegrees);
     status_t parseRTCP(StreamInfo *info, const sp<ABuffer> &buffer);
-    status_t parseSR(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseSenderReport(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseReceiverReport(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseReceptionReportBlock(StreamInfo *info,
+            int64_t recvTimeUs, uint32_t senderId, const uint8_t *data, size_t size);
     status_t parseTSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parsePSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parseBYE(StreamInfo *info, const uint8_t *data, size_t size);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index 4984e91..e9b4942 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -36,6 +36,7 @@
 struct ABuffer;
 struct AMessage;
 struct ARTPAssembler;
+struct ReceptionReportBlock;
 struct ASessionDescription;
 
 struct ARTPSource : public RefBase {
@@ -59,8 +60,10 @@
 
     void processRTPPacket(const sp<ABuffer> &buffer);
     void processRTPPacket();
+    void processReceptionReportBlock(
+            int64_t recvTimeUs, uint32_t senderId, sp<ReceptionReportBlock> rrb);
     void timeReset();
-    void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
+    void timeUpdate(int64_t recvTimeUs, uint32_t rtpTime, uint64_t ntpTime);
     void byeReceived();
 
     List<sp<ABuffer> > *queue() { return &mQueue; }
@@ -135,6 +138,8 @@
     uint64_t mLastSrNtpTime;
     int64_t mLastSrUpdateTimeUs;
 
+    int64_t mLastRrUpdateTimeUs;
+
     bool mIsFirstRtpRtcpGap;
     double mAvgRtpRtcpGapMs;
     double mAvgUnderlineDelayMs;
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
new file mode 100644
index 0000000..82d03e5
--- /dev/null
+++ b/media/ndk/fuzzer/Android.bp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+     name: "libmediandk_fuzzer_defaults",
+     shared_libs: [
+        "libandroid_runtime_lazy",
+        "libbase",
+        "libdatasource",
+        "libmedia",
+        "libmediadrm",
+        "libmedia_omx",
+        "libmedia_jni_utils",
+        "libstagefright",
+        "libstagefright_foundation",
+        "liblog",
+        "libutils",
+        "libcutils",
+        "libnativewindow",
+        "libhidlbase",
+        "libgui",
+        "libui",
+        "libmediandk",
+     ],
+     static_libs: [
+        "libmediandk_utils",
+        "libnativehelper_lazy",
+     ],
+     header_libs: [
+         "media_ndk_headers",
+     ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "ndk_crypto_fuzzer",
+    srcs: ["ndk_crypto_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+}
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
new file mode 100644
index 0000000..4f78e4a
--- /dev/null
+++ b/media/ndk/fuzzer/README.md
@@ -0,0 +1,24 @@
+# Fuzzers for libmediandk
+
+## Table of contents
++ [ndk_crypto_fuzzer](#NdkCrypto)
+
+# <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
+
+NdkCrypto supports the following parameters:
+    UniversalIdentifier (parameter name: "uuid")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `uuid`| `Array`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_crypto_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_crypto_fuzzer/ndk_crypto_fuzzer
+```
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
new file mode 100644
index 0000000..2b22f0f
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaCrypto.h>
+
+constexpr size_t kMaxString = 256;
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    AMediaUUID uuid = {};
+    int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
+    for (size_t idx = 0; idx < maxLen; ++idx) {
+        uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
+    }
+    std::vector<uint8_t> initData =
+            fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+    AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
+    while (fdp.remaining_bytes()) {
+        auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    AMediaCrypto_requiresSecureDecoderComponent(
+                            fdp.ConsumeRandomLengthString(kMaxString).c_str());
+                },
+                [&]() { AMediaCrypto_isCryptoSchemeSupported(uuid); },
+        });
+        invokeNdkCryptoFuzzer();
+    }
+    AMediaCrypto_delete(crypto);
+    return 0;
+}
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index a38ef57..04d9ed9 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -21,23 +21,76 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library {
-    name: "libmediautils",
+cc_defaults {
+    name: "libmediautils_defaults",
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
 
+filegroup {
+    name: "libmediautils_core_srcs",
     srcs: [
         "AImageReaderUtils.cpp",
-        "BatteryNotifier.cpp",
         "ISchedulingPolicyService.cpp",
         "Library.cpp",
-        "LimitProcessMemory.cpp",
         "MediaUtilsDelayed.cpp",
-        "MemoryLeakTrackUtil.cpp",
         "MethodStatistics.cpp",
         "Process.cpp",
-        "ProcessInfo.cpp",
         "SchedulingPolicyService.cpp",
-        "ServiceUtilities.cpp",
         "ThreadSnapshot.cpp",
+    ],
+}
+
+cc_library_headers {
+    name: "libmediautils_headers",
+    host_supported: true,
+    vendor_available: true, // required for platform/hardware/interfaces
+    shared_libs: [
+        "liblog",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
+
+cc_library {
+    name: "libmediautils_core",
+    defaults: ["libmediautils_defaults"],
+    host_supported: true,
+    srcs: [":libmediautils_core_srcs"],
+    shared_libs: [
+        "libaudioutils", // for clock.h, Statistics.h
+        "libbase",
+        "libbinder",
+        "libhidlbase",
+        "liblog",
+        "libpermission",
+        "libutils",
+        "android.hardware.graphics.bufferqueue@1.0",
+        "android.hidl.token@1.0-utils",
+        "packagemanager_aidl-cpp",
+    ],
+
+    export_shared_lib_headers: [
+        "libpermission",
+    ],
+
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
+
+cc_library {
+    name: "libmediautils",
+    defaults: ["libmediautils_defaults"],
+    srcs: [
+        ":libmediautils_core_srcs",
+        "BatteryNotifier.cpp",
+        "MemoryLeakTrackUtil.cpp",
+        "LimitProcessMemory.cpp",
+        "ProcessInfo.cpp",
+        "ServiceUtilities.cpp",
         "TimeCheck.cpp",
         "TimerThread.cpp",
     ],
@@ -49,12 +102,13 @@
     shared_libs: [
         "libaudioclient_aidl_conversion",
         "libaudioutils", // for clock.h, Statistics.h
+        "libbase",
         "libbinder",
         "libcutils",
-        "liblog",
-        "libutils",
         "libhidlbase",
+        "liblog",
         "libpermission",
+        "libutils",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
         "packagemanager_aidl-cpp",
@@ -65,12 +119,6 @@
 
     logtags: ["EventLogTags.logtags"],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-
     header_libs: [
         "bionic_libc_platform_headers",
         "libmedia_headers",
@@ -81,7 +129,7 @@
     ],
 
     required: [
-        "libmediautils_delayed",  // lazy loaded
+        "libmediautils_delayed", // lazy loaded
     ],
 
     include_dirs: [
@@ -94,14 +142,10 @@
 
 cc_library {
     name: "libmediautils_delayed", // match with MEDIAUTILS_DELAYED_LIBRARY_NAME
+    defaults: ["libmediautils_defaults"],
     srcs: [
         "MediaUtilsDelayedLibrary.cpp",
     ],
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
     shared_libs: [
         "liblog",
         "libutils",
@@ -111,16 +155,12 @@
 
 cc_library {
     name: "libmediautils_vendor",
-    vendor_available: true,  // required for platform/hardware/interfaces
+    defaults: ["libmediautils_defaults"],
+    vendor_available: true, // required for platform/hardware/interfaces
     srcs: [
         "MemoryLeakTrackUtil.cpp",
     ],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
     shared_libs: [
         "liblog",
         "libutils",
@@ -137,23 +177,3 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
-
-
-cc_library_headers {
-    name: "libmediautils_headers",
-    vendor_available: true,  // required for platform/hardware/interfaces
-
-    export_include_dirs: ["include"],
-}
-
-cc_test {
-    name: "libmediautils_test",
-    srcs: [
-        "memory-test.cpp",
-        "TimerThread-test.cpp",
-    ],
-    shared_libs: [
-      "libmediautils",
-      "libutils",
-    ]
-}
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index d26e6c2..fc4c2f9 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -9,14 +9,13 @@
 
 cc_defaults {
     name: "libmediautils_fuzzer_defaults",
+    host_supported: true,
     shared_libs: [
-        "libbatterystats_aidl",
         "libbinder",
-        "libcutils",
         "liblog",
-        "libmediautils",
+        "libcutils",
+        "libmediautils_core",
         "libutils",
-        "libbinder",
         "framework-permission-aidl-cpp",
         "packagemanager_aidl-cpp",
     ],
@@ -27,33 +26,36 @@
         "-Werror",
         "-Wno-c++2a-extensions",
     ],
-
-    header_libs: [
-        "bionic_libc_platform_headers",
-        "libmedia_headers",
-    ],
 }
 
 cc_fuzz {
     name: "libmediautils_fuzzer_battery_notifier",
+    host_supported: false,
+    shared_libs: ["libmediautils"],
     defaults: ["libmediautils_fuzzer_defaults"],
     srcs: ["BatteryNotifierFuzz.cpp"],
 }
 
 cc_fuzz {
     name: "libmediautils_fuzzer_scheduling_policy_service",
+    host_supported: false,
+    shared_libs: ["libmediautils"],
     defaults: ["libmediautils_fuzzer_defaults"],
     srcs: ["SchedulingPolicyServiceFuzz.cpp"],
 }
 
 cc_fuzz {
     name: "libmediautils_fuzzer_service_utilities",
+    host_supported: false,
+    shared_libs: ["libmediautils"],
     defaults: ["libmediautils_fuzzer_defaults"],
     srcs: ["ServiceUtilitiesFuzz.cpp"],
 }
 
 cc_fuzz {
     name: "libmediautils_fuzzer_time_check",
+    host_supported: false,
+    shared_libs: ["libmediautils"],
     defaults: ["libmediautils_fuzzer_defaults"],
     srcs: ["TimeCheckFuzz.cpp"],
 }
diff --git a/media/utils/include/mediautils/ExtendedAccumulator.h b/media/utils/include/mediautils/ExtendedAccumulator.h
new file mode 100644
index 0000000..7e3e170
--- /dev/null
+++ b/media/utils/include/mediautils/ExtendedAccumulator.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <cstdint>
+#include <tuple>
+#include <type_traits>
+
+#include <log/log.h>
+
+namespace android::mediautils {
+
+// The goal of this class is to detect and accumulate wraparound occurrences on a
+// lower sized integer.
+
+// This class assumes that the underlying unsigned type is either incremented or
+// decremented by at most the underlying signed type between any two subsequent
+// polls (or construction). This is well-defined as the modular nature of
+// unsigned arithmetic ensures that every new value maps 1-1 to an
+// increment/decrement over the same sized signed type. It also ensures that our
+// counter will be equivalent mod the size of the integer even if the underlying
+// type is modified outside of this range.
+//
+// For convenience, this class is thread compatible. Additionally, it is safe
+// as long as there is only one writer.
+template <typename Integral = uint32_t, typename AccumulatingType = uint64_t>
+class ExtendedAccumulator {
+    static_assert(sizeof(Integral) < sizeof(AccumulatingType),
+                  "Accumulating type should be larger than underlying type");
+    static_assert(std::is_integral_v<Integral> && std::is_unsigned_v<Integral>,
+                  "Wraparound behavior is only well-defiend for unsigned ints");
+    static_assert(std::is_integral_v<AccumulatingType>);
+
+  public:
+    enum class Wrap {
+        NORMAL = 0,
+        UNDERFLOW = 1,
+        OVERFLOW = 2,
+    };
+
+    using UnsignedInt = Integral;
+    using SignedInt = std::make_signed_t<UnsignedInt>;
+
+    explicit ExtendedAccumulator(AccumulatingType initial = 0) : mAccumulated(initial) {}
+
+    // Returns a pair of the calculated change on the accumulating value, and a
+    // Wrap type representing the type of wraparound (if any) which occurred.
+    std::pair<SignedInt, Wrap> poll(UnsignedInt value) {
+        auto acc = mAccumulated.load(std::memory_order_relaxed);
+        const auto bottom_bits = static_cast<UnsignedInt>(acc);
+        std::pair<SignedInt, Wrap> res = {0, Wrap::NORMAL};
+        const bool overflow = __builtin_sub_overflow(value, bottom_bits, &res.first);
+
+        if (overflow) {
+            res.second = (res.first > 0) ? Wrap::OVERFLOW : Wrap::UNDERFLOW;
+        }
+
+        const bool acc_overflow = __builtin_add_overflow(acc, res.first, &acc);
+        // If our *accumulating* type overflows or underflows (depending on its
+        // signedness), we should abort.
+        if (acc_overflow) LOG_ALWAYS_FATAL("Unexpected overflow/underflow in %s", __func__);
+
+        mAccumulated.store(acc, std::memory_order_relaxed);
+        return res;
+    }
+
+    AccumulatingType getValue() const { return mAccumulated.load(std::memory_order_relaxed); }
+
+  private:
+    // Invariant - the bottom underlying bits of accumulated are the same as the
+    // last value provided to poll.
+    std::atomic<AccumulatingType> mAccumulated;
+};
+
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index de20d55..3d7981a 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -130,7 +130,7 @@
     std::optional<bool> doIsAllowed(uid_t uid);
     sp<content::pm::IPackageManagerNative> retrievePackageManager();
     sp<content::pm::IPackageManagerNative> mPackageManager; // To check apps manifest
-    uint_t mPackageManagerErrors = 0;
+    unsigned int mPackageManagerErrors = 0;
     struct Package {
         std::string name;
         bool playbackCaptureAllowed = false;
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 1024018..759768a 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -7,78 +7,93 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_test_library {
-    name: "libsharedtest",
+cc_defaults {
+    name: "libmediautils_tests_defaults",
+
+    host_supported: true,
+
     cflags: [
         "-Wall",
         "-Werror",
         "-Wextra",
     ],
 
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
+    sanitize: {
+        address: true,
+        cfi: true,
+        integer_overflow: true,
+        memtag_heap: true,
     },
 
     shared_libs: [
         "liblog",
+        "libmediautils_core",
+        "libutils",
     ],
 
+}
+
+cc_defaults {
+    name: "libmediautils_tests_host_unavail",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    host_supported: false,
+
+    shared_libs: [
+        "libmediautils",
+    ],
+}
+
+cc_test_library {
+    name: "libsharedtest",
+
+    defaults: ["libmediautils_tests_defaults"],
+
     srcs: [
         "sharedtest.cpp",
-    ]
+    ],
 }
 
 cc_test {
     name: "library_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
-    },
-
-    shared_libs: [
-        "libbase",
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     data_libs: [
         "libsharedtest",
     ],
 
+    shared_libs: [
+        "libbase",
+    ],
+
     srcs: [
         "library_tests.cpp",
     ],
 }
 
 cc_test {
-    name: "media_process_tests",
+    name: "libmediautils_test",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_host_unavail"],
 
     shared_libs: [
-        "liblog",
         "libmediautils",
-        "libutils",
     ],
 
     srcs: [
+        "memory-test.cpp",
+        "TimerThread-test.cpp",
+    ],
+}
+
+cc_test {
+    name: "media_process_tests",
+
+    defaults: ["libmediautils_tests_host_unavail"],
+
+    srcs: [
         "media_process_tests.cpp",
     ],
 }
@@ -86,17 +101,7 @@
 cc_test {
     name: "media_synchronization_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_host_unavail"],
 
     srcs: [
         "media_synchronization_tests.cpp",
@@ -106,17 +111,7 @@
 cc_test {
     name: "media_threadsnapshot_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_host_unavail"],
 
     srcs: [
         "media_threadsnapshot_tests.cpp",
@@ -126,17 +121,10 @@
 cc_test {
     name: "mediautils_scopedstatistics_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
     ],
 
     srcs: [
@@ -147,17 +135,10 @@
 cc_test {
     name: "methodstatistics_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
     ],
 
     srcs: [
@@ -168,26 +149,19 @@
 cc_test {
     name: "timecheck_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
-    },
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_host_unavail"],
 
     srcs: [
         "timecheck_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "extended_accumulator_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "extended_accumulator_tests.cpp",
+    ],
+}
diff --git a/media/utils/TimerThread-test.cpp b/media/utils/tests/TimerThread-test.cpp
similarity index 70%
rename from media/utils/TimerThread-test.cpp
rename to media/utils/tests/TimerThread-test.cpp
index 93cd64c..1fbe894 100644
--- a/media/utils/TimerThread-test.cpp
+++ b/media/utils/tests/TimerThread-test.cpp
@@ -40,8 +40,10 @@
     std::this_thread::sleep_for(100ms - kJitter);
     ASSERT_FALSE(taskRan);
     std::this_thread::sleep_for(2 * kJitter);
-    ASSERT_TRUE(taskRan);
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_TRUE(taskRan); // timed-out called.
+    ASSERT_EQ(1ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // nothing cancelled
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, Cancel) {
@@ -53,8 +55,10 @@
     ASSERT_FALSE(taskRan);
     ASSERT_TRUE(thread.cancelTask(handle));
     std::this_thread::sleep_for(2 * kJitter);
-    ASSERT_FALSE(taskRan);
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_FALSE(taskRan); // timed-out did not call.
+    ASSERT_EQ(0ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // task cancelled.
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, CancelAfterRun) {
@@ -63,9 +67,11 @@
     TimerThread::Handle handle =
             thread.scheduleTask("CancelAfterRun", [&taskRan] { taskRan = true; }, 100ms);
     std::this_thread::sleep_for(100ms + kJitter);
-    ASSERT_TRUE(taskRan);
+    ASSERT_TRUE(taskRan); //  timed-out called.
     ASSERT_FALSE(thread.cancelTask(handle));
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // nothing actually cancelled
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, MultipleTasks) {
@@ -82,9 +88,9 @@
     thread.scheduleTask("5", [&taskRan] { taskRan[5] = true; }, 200ms);
 
     // 6 tasks pending
-    ASSERT_EQ(6, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(6ul, countChars(thread.pendingToString(), REQUEST_START));
     // 0 tasks completed
-    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // Task 1 should trigger around 100ms.
     std::this_thread::sleep_until(startTime + 100ms - kJitter);
@@ -141,9 +147,10 @@
     ASSERT_TRUE(taskRan[5]);
 
     // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
-    // 4 tasks ran and 1 cancelled
-    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks called on timeout,  and 1 cancelled
+    ASSERT_EQ(4ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // Task 3 should trigger around 400ms.
     std::this_thread::sleep_until(startTime + 400ms - kJitter);
@@ -154,8 +161,9 @@
     ASSERT_FALSE(taskRan[4]);
     ASSERT_TRUE(taskRan[5]);
 
-    // 4 tasks ran and 1 cancelled
-    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+    // 4 tasks called on timeout and 1 cancelled
+    ASSERT_EQ(4ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 
     std::this_thread::sleep_until(startTime + 400ms + kJitter);
     ASSERT_TRUE(taskRan[0]);
@@ -166,9 +174,10 @@
     ASSERT_TRUE(taskRan[5]);
 
     // 0 tasks pending
-    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
-    // 5 tasks ran and 1 cancelled
-    ASSERT_EQ(5 + 1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 5 tasks called on timeout and 1 cancelled
+    ASSERT_EQ(5ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, TrackedTasks) {
@@ -179,47 +188,47 @@
     auto handle2 = thread.trackTask("2");
 
     // 3 tasks pending
-    ASSERT_EQ(3, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(3ul, countChars(thread.pendingToString(), REQUEST_START));
     // 0 tasks retired
-    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 
     ASSERT_TRUE(thread.cancelTask(handle0));
     ASSERT_TRUE(thread.cancelTask(handle1));
 
     // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
     // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // handle1 is stale, cancel returns false.
     ASSERT_FALSE(thread.cancelTask(handle1));
 
     // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
     // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // Add another tracked task.
     auto handle3 = thread.trackTask("3");
 
     // 2 tasks pending
-    ASSERT_EQ(2, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.pendingToString(), REQUEST_START));
     // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
 
     ASSERT_TRUE(thread.cancelTask(handle2));
 
     // 1 tasks pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
     // 3 tasks retired
-    ASSERT_EQ(3, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(3ul, countChars(thread.retiredToString(), REQUEST_START));
 
     ASSERT_TRUE(thread.cancelTask(handle3));
 
     // 0 tasks pending
-    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.pendingToString(), REQUEST_START));
     // 4 tasks retired
-    ASSERT_EQ(4, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(4ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 }  // namespace
diff --git a/media/utils/tests/extended_accumulator_tests.cpp b/media/utils/tests/extended_accumulator_tests.cpp
new file mode 100644
index 0000000..e243e7e
--- /dev/null
+++ b/media/utils/tests/extended_accumulator_tests.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "extended_accumulator_tests"
+
+#include <mediautils/ExtendedAccumulator.h>
+
+#include <type_traits>
+#include <cstdint>
+#include <limits.h>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+// Conditionally choose a base accumulating counter value in order to prevent
+// unsigned underflow on the accumulator from aborting the tests.
+template <typename TType, typename CType>
+static constexpr CType getBase() {
+  static_assert(sizeof(TType) < sizeof(CType));
+  if constexpr (std::is_unsigned_v<CType>) {
+      return std::numeric_limits<TType>::max() + 1;
+  } else {
+      return 0;
+  }
+}
+
+// Since the entire state of this utility is the previous value, and the
+// behavior is isomorphic mod the underlying type on the previous value, we can
+// test combinations of the previous value of the underlying type and a
+// hypothetical signed update to that type and ensure the accumulator moves
+// correctly and reports overflow correctly.
+template <typename TestUInt, typename CType>
+void testPair(TestUInt prevVal, std::make_signed_t<TestUInt> delta) {
+    using TestDetect = ExtendedAccumulator<TestUInt, CType>;
+    using TestInt = typename TestDetect::SignedInt;
+    static_assert(std::is_same_v<typename TestDetect::UnsignedInt, TestUInt>);
+    static_assert(std::is_same_v<TestInt, std::make_signed_t<TestUInt>>);
+    static_assert(sizeof(TestUInt) < sizeof(CType));
+
+    // To safely detect underflow/overflow for testing
+    // Should be 0 mod TestUInt, max + 1 is convenient
+    static constexpr CType base = getBase<TestUInt, CType>();
+    const CType prev = base + prevVal;
+    TestDetect test{prev};
+    EXPECT_EQ(test.getValue(), prev);
+    // Prevent unsigned wraparound abort
+    CType next;
+    const auto err =  __builtin_add_overflow(prev, delta, &next);
+    LOG_ALWAYS_FATAL_IF(err, "Unexpected wrap in tests");
+    const auto [result, status] = test.poll(static_cast<TestUInt>(next));
+    EXPECT_EQ(test.getValue(), next);
+    EXPECT_EQ(result, delta);
+
+    // Test overflow/underflow event reporting.
+    if (next < base) EXPECT_EQ(TestDetect::Wrap::UNDERFLOW, status);
+    else if (next > base + std::numeric_limits<TestUInt>::max())
+        EXPECT_EQ(TestDetect::Wrap::OVERFLOW, status);
+    else EXPECT_EQ(TestDetect::Wrap::NORMAL, status);
+}
+
+// Test this utility on every combination of prior and update value for the
+// type uint8_t, with an unsigned containing type.
+TEST(wraparound_tests, cover_u8_u64) {
+    using TType = uint8_t;
+    using CType = uint64_t;
+    static constexpr CType max = std::numeric_limits<TType>::max();
+    for (CType i = 0; i <= max; i++) {
+        for (CType j = 0; j <= max; j++) {
+            testPair<TType, CType>(i, static_cast<int64_t>(j));
+        }
+    }
+}
+
+// Test this utility on every combination of prior and update value for the
+// type uint8_t, with a signed containing type.
+TEST(wraparound_tests, cover_u8_s64) {
+    using TType = uint8_t;
+    using CType = int64_t;
+    static constexpr CType max = std::numeric_limits<TType>::max();
+    for (CType i = 0; i <= max; i++) {
+        for (CType j = 0; j <= max; j++) {
+            testPair<TType, CType>(i, static_cast<int64_t>(j));
+        }
+    }
+}
diff --git a/media/utils/memory-test.cpp b/media/utils/tests/memory-test.cpp
similarity index 100%
rename from media/utils/memory-test.cpp
rename to media/utils/tests/memory-test.cpp
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 763c070..a08879e 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -78,7 +78,6 @@
         "libnblog",
         "libpermission",
         "libpowermanager",
-        "libmediautils",
         "libmemunreachable",
         "libmedia_helper",
         "libshmemcompat",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 5661206..5fbb4ed 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -3895,17 +3895,24 @@
             goto Exit;
         }
     } else if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
-        if (!isAudioServerUid(callingUid)) {
-            ALOGE("%s: only APM can create using AUDIO_SESSION_OUTPUT_STAGE", __func__);
-            lStatus = PERMISSION_DENIED;
-            goto Exit;
-        }
-
         if (io == AUDIO_IO_HANDLE_NONE) {
             ALOGE("%s: APM must specify output when using AUDIO_SESSION_OUTPUT_STAGE", __func__);
             lStatus = BAD_VALUE;
             goto Exit;
         }
+        PlaybackThread *thread = checkPlaybackThread_l(io);
+        if (thread == nullptr) {
+            ALOGE("%s: invalid output %d specified for AUDIO_SESSION_OUTPUT_STAGE", __func__, io);
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
+        if (!modifyDefaultAudioEffectsAllowed(adjAttributionSource)
+                && !isAudioServerUid(callingUid)) {
+            ALOGE("%s: effect on AUDIO_SESSION_OUTPUT_STAGE not granted for uid %d",
+                    __func__, callingUid);
+            lStatus = PERMISSION_DENIED;
+            goto Exit;
+        }
     } else if (sessionId == AUDIO_SESSION_DEVICE) {
         if (!modifyDefaultAudioEffectsAllowed(adjAttributionSource)) {
             ALOGE("%s: device effect permission denied for uid %d", __func__, callingUid);
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 2677ab3..20bfbb0 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -397,6 +397,8 @@
     int64_t             mLogStartFrames = 0;    // Timestamp frames at start()
     double              mLogLatencyMs = 0.;     // Track the last log latency
 
+    bool                mLogForceVolumeUpdate = true; // force volume update to TrackMetrics.
+
     TrackMetrics        mTrackMetrics;
 
     bool                mServerLatencySupported = false;
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index 30d69ab..6fc70d6 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -64,7 +64,6 @@
                     AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP, devices.c_str());
         }
         ++mIntervalCount;
-        mIntervalStartTimeNs = systemTime();
     }
 
     void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
@@ -90,11 +89,9 @@
     // Called when we are removed from the Thread.
     void logEndInterval() {
         std::lock_guard l(mLock);
-        if (mIntervalStartTimeNs != 0) {
-            const int64_t elapsedTimeNs = systemTime() - mIntervalStartTimeNs;
-            mIntervalStartTimeNs = 0;
-            mCumulativeTimeNs += elapsedTimeNs;
-            mDeviceTimeNs += elapsedTimeNs;
+        if (mLastVolumeChangeTimeNs != 0) {
+            logVolume_l(mVolume); // flush out the last volume.
+            mLastVolumeChangeTimeNs = 0;
         }
     }
 
@@ -133,20 +130,8 @@
 
     // may be called multiple times during an interval
     void logVolume(float volume) {
-        const int64_t timeNs = systemTime();
         std::lock_guard l(mLock);
-        if (mStartVolumeTimeNs == 0) {
-            mDeviceVolume = mVolume = volume;
-            mLastVolumeChangeTimeNs = mStartVolumeTimeNs = timeNs;
-            updateMinMaxVolume(0, mVolume);
-            return;
-        }
-        const int64_t durationNs = timeNs - mLastVolumeChangeTimeNs;
-        updateMinMaxVolume(durationNs, mVolume);
-        mDeviceVolume = (mDeviceVolume * (mLastVolumeChangeTimeNs - mStartVolumeTimeNs) +
-            mVolume * durationNs) / (timeNs - mStartVolumeTimeNs);
-        mVolume = volume;
-        mLastVolumeChangeTimeNs = timeNs;
+        logVolume_l(volume);
     }
 
     // Use absolute numbers returned by AudioTrackShared.
@@ -158,6 +143,7 @@
     }
 
 private:
+
     // no lock required - all arguments and constants.
     void deliverDeviceMetrics(const char *eventName, const char *devices) const {
         mediametrics::LogItem(mMetricsId)
@@ -167,6 +153,23 @@
            .record();
     }
 
+    void logVolume_l(float volume) REQUIRES(mLock) {
+        const int64_t timeNs = systemTime();
+        const int64_t durationNs = mLastVolumeChangeTimeNs == 0
+                ? 0 : timeNs - mLastVolumeChangeTimeNs;
+        if (durationNs > 0) {
+            // See West's algorithm for weighted averages
+            // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
+            mDeviceVolume += (mVolume - mDeviceVolume) * durationNs
+                      / (durationNs + mDeviceTimeNs);
+            mDeviceTimeNs += durationNs;
+            mCumulativeTimeNs += durationNs;
+        }
+        updateMinMaxVolume(durationNs, mVolume); // always update.
+        mVolume = volume;
+        mLastVolumeChangeTimeNs = timeNs;
+    }
+
     void deliverCumulativeMetrics(const char *eventName) const REQUIRES(mLock) {
         if (mIntervalCount > 0) {
             mediametrics::LogItem item(mMetricsId);
@@ -199,14 +202,12 @@
         // mDevices is not reset by resetIntervalGroupMetrics.
 
         mIntervalCount = 0;
-        mIntervalStartTimeNs = 0;
         // mCumulativeTimeNs is not reset by resetIntervalGroupMetrics.
         mDeviceTimeNs = 0;
 
         mVolume = 0.f;
         mDeviceVolume = 0.f;
-        mStartVolumeTimeNs = 0;
-        mLastVolumeChangeTimeNs = 0;
+        mLastVolumeChangeTimeNs = 0;  // last time volume logged, cleared on endInterval
         mMinVolume = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
         mMaxVolume = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
         mMinVolumeDurationNs = 0;
@@ -230,14 +231,12 @@
 
     // Number of intervals and playing time
     int32_t           mIntervalCount GUARDED_BY(mLock) = 0;
-    int64_t           mIntervalStartTimeNs GUARDED_BY(mLock) = 0;
-    int64_t           mCumulativeTimeNs GUARDED_BY(mLock) = 0;
-    int64_t           mDeviceTimeNs GUARDED_BY(mLock) = 0;
+    int64_t           mCumulativeTimeNs GUARDED_BY(mLock) = 0; // total time.
+    int64_t           mDeviceTimeNs GUARDED_BY(mLock) = 0;     // time on device.
 
     // Average volume
-    double            mVolume GUARDED_BY(mLock) = 0.f;
-    double            mDeviceVolume GUARDED_BY(mLock) = 0.f;
-    int64_t           mStartVolumeTimeNs GUARDED_BY(mLock) = 0;
+    double            mVolume GUARDED_BY(mLock) = 0.f;       // last set volume.
+    double            mDeviceVolume GUARDED_BY(mLock) = 0.f; // running average volume.
     int64_t           mLastVolumeChangeTimeNs GUARDED_BY(mLock) = 0;
 
     // Min/Max volume
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index a9720da..30301a8 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1123,6 +1123,7 @@
                     .mPosition[ExtendedTimestamp::LOCATION_KERNEL];
             mLogLatencyMs = 0.;
         }
+        mLogForceVolumeUpdate = true;  // at least one volume logged for metrics when starting.
 
         if (status == NO_ERROR || status == ALREADY_EXISTS) {
             // for streaming tracks, remove the buffer read stop limit.
@@ -1394,7 +1395,11 @@
     if (mFinalVolume != volume) { // Compare to an epsilon if too many meaningless updates
         mFinalVolume = volume;
         setMetadataHasChanged();
-        mTrackMetrics.logVolume(volume);
+        mLogForceVolumeUpdate = true;
+    }
+    if (mLogForceVolumeUpdate) {
+        mLogForceVolumeUpdate = false;
+        mTrackMetrics.logVolume(mFinalVolume);
     }
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 70fdfcb..c7a60c2 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -127,7 +127,8 @@
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
             sp<AudioEffect> fx = new AudioEffect(attributionSource);
-            fx->set(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
+            fx->set(nullptr /*type */, &effect->mUuid, -1 /* priority */, nullptr /* callback */,
+                    audioSession, input);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGW("addInputEffects(): failed to create Fx %s on source %d",
@@ -279,7 +280,8 @@
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
             sp<AudioEffect> fx = new AudioEffect(attributionSource);
-            fx->set(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
+            fx->set(nullptr /* type */, &effect->mUuid, 0 /* priority */, nullptr /* callback */,
+                    audioSession, output);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("addOutputSessionEffects(): failed to create Fx  %s on session %d",
@@ -984,8 +986,8 @@
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
             sp<AudioEffect> fx = new AudioEffect(attributionSource);
-            fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
-                    nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
+            fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0 /* priority */, nullptr /* callback */,
+                    AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
                     AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
                                         deviceEffects->getDeviceAddress()});
             status_t status = fx->initCheck();
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 9baaf93..a98d474 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/MediaMetricsItem.h>
 #include <media/ShmemCompat.h>
 #include <mediautils/ServiceUtilities.h>
 #include <utils/Thread.h>
@@ -57,6 +58,19 @@
        if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
        std::move(_tmp.value()); })
 
+audio_channel_mask_t getMaxChannelMask(std::vector<audio_channel_mask_t> masks) {
+    uint32_t maxCount = 0;
+    audio_channel_mask_t maxMask = AUDIO_CHANNEL_NONE;
+    for (auto mask : masks) {
+        const size_t count = audio_channel_count_from_out_mask(mask);
+        if (count > maxCount) {
+            maxMask = mask;
+            maxCount = count;
+        }
+    }
+    return maxMask;
+}
+
 // ---------------------------------------------------------------------------
 
 class Spatializer::EngineCallbackHandler : public AHandler {
@@ -286,6 +300,21 @@
         ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS reports empty", __func__);
         return BAD_VALUE;
     }
+
+    // Currently we expose only RELATIVE_WORLD.
+    // This is a limitation of the head tracking library based on a UX choice.
+    mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::DISABLED);
+    if (mSupportsHeadTracking) {
+        mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
+    }
+    mediametrics::LogItem(mMetricsId)
+        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
+        .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)getMaxChannelMask(mChannelMasks))
+        .set(AMEDIAMETRICS_PROP_LEVELS, aidl_utils::enumsToString(mLevels))
+        .set(AMEDIAMETRICS_PROP_MODES, aidl_utils::enumsToString(mSpatializationModes))
+        .set(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, aidl_utils::enumsToString(mHeadTrackingModes))
+        .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+        .record();
     return NO_ERROR;
 }
 
@@ -294,14 +323,7 @@
     std::lock_guard lock(mLock);
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     // For now use highest supported channel count
-    uint32_t maxCount = 0;
-    for ( auto mask : mChannelMasks) {
-        const size_t count = audio_channel_count_from_out_mask(mask);
-        if (count > maxCount) {
-            config.channel_mask = mask;
-            maxCount = count;
-        }
-    }
+    config.channel_mask = getMaxChannelMask(mChannelMasks);
     return config;
 }
 
@@ -338,7 +360,7 @@
     if (levels == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    levels->push_back(SpatializationLevel::NONE);
+    // SpatializationLevel::NONE is already required from the effect or we don't load it.
     levels->insert(levels->end(), mLevels.begin(), mLevels.end());
     return Status::ok();
 }
@@ -399,11 +421,7 @@
     if (modes == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-
-    modes->push_back(SpatializerHeadTrackingMode::DISABLED);
-    if (mSupportsHeadTracking) {
-        modes->push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
-    }
+    modes->insert(modes->end(), mHeadTrackingModes.begin(), mHeadTrackingModes.end());
     return Status::ok();
 }
 
@@ -501,9 +519,11 @@
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
     std::lock_guard lock(mLock);
-    mHeadSensor = sensorHandle;
-    checkPoseController_l();
-    checkSensorsState_l();
+    if (mHeadSensor != sensorHandle) {
+        mHeadSensor = sensorHandle;
+        checkPoseController_l();
+        checkSensorsState_l();
+    }
     return Status::ok();
 }
 
@@ -513,8 +533,13 @@
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
     std::lock_guard lock(mLock);
-    mScreenSensor = sensorHandle;
-    checkSensorsState_l();
+    if (mScreenSensor != sensorHandle) {
+        mScreenSensor = sensorHandle;
+        // TODO: consider a new method setHeadAndScreenSensor()
+        // because we generally set both at the same time.
+        // This will avoid duplicated work and recentering.
+        checkSensorsState_l();
+    }
     return Status::ok();
 }
 
@@ -693,9 +718,10 @@
         // create FX instance on output
         AttributionSourceState attributionSource = AttributionSourceState();
         mEngine = new AudioEffect(attributionSource);
-        mEngine->set(nullptr, &mEngineDescriptor.uuid, 0, Spatializer::engineCallback /* cbf */,
-                     this /* user */, AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */,
-                     false /* probe */, true /* notifyFramesProcessed */);
+        mEngine->set(nullptr /* type */, &mEngineDescriptor.uuid, 0 /* priority */,
+                     wp<AudioEffect::IAudioEffectCallback>::fromExisting(this),
+                     AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */, false /* probe */,
+                     true /* notifyFramesProcessed */);
         status_t status = mEngine->initCheck();
         ALOGV("%s mEngine create status %d", __func__, (int)status);
         if (status != NO_ERROR) {
@@ -812,27 +838,10 @@
     }
 }
 
-void Spatializer::engineCallback(int32_t event, void *user, void *info) {
-    if (user == nullptr) {
-        return;
-    }
-    Spatializer* const me = reinterpret_cast<Spatializer *>(user);
-    switch (event) {
-        case AudioEffect::EVENT_FRAMES_PROCESSED: {
-            int frames = info == nullptr ? 0 : *(int*)info;
-            ALOGV("%s frames processed %d for me %p", __func__, frames, me);
-            me->postFramesProcessedMsg(frames);
-        } break;
-        default:
-            ALOGV("%s event %d", __func__, event);
-            break;
-    }
-}
-
-void Spatializer::postFramesProcessedMsg(int frames) {
+void Spatializer::onFramesProcessed(int32_t framesProcessed) {
     sp<AMessage> msg =
             new AMessage(EngineCallbackHandler::kWhatOnFramesProcessed, mHandler);
-    msg->setInt32(EngineCallbackHandler::kNumFramesKey, frames);
+    msg->setInt32(EngineCallbackHandler::kNumFramesKey, framesProcessed);
     msg->post();
 }
 
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index a36ba61..ad45fa9 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -84,6 +84,7 @@
  * spatializer mixer thread is destroyed.
  */
 class Spatializer : public media::BnSpatializer,
+                    public AudioEffect::IAudioEffectCallback,
                     public IBinder::DeathRecipient,
                     private SpatializerPoseController::Listener {
   public:
@@ -274,7 +275,7 @@
         return NO_ERROR;
     }
 
-    void postFramesProcessedMsg(int frames);
+    virtual void onFramesProcessed(int32_t framesProcessed) override;
 
     /**
      * Checks if head and screen sensors must be actively monitored based on
@@ -298,7 +299,10 @@
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
-    SpatializerPolicyCallback* mPolicyCallback;
+    SpatializerPolicyCallback* const mPolicyCallback;
+
+    /** Currently there is only one version of the spatializer running */
+    const std::string mMetricsId = AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
 
     /** Mutex protecting internal state */
     mutable std::mutex mLock;
@@ -339,6 +343,7 @@
     float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
 
     std::vector<media::SpatializationLevel> mLevels;
+    std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
     std::vector<media::SpatializationMode> mSpatializationModes;
     std::vector<audio_channel_mask_t> mChannelMasks;
     bool mSupportsHeadTracking;
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 0a9f4d9..304d44a 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "SpatializerPoseController"
 //#define LOG_NDEBUG 0
 #include <sensor/Sensor.h>
+#include <media/MediaMetricsItem.h>
 #include <utils/Log.h>
 #include <utils/SystemClock.h>
 
@@ -75,6 +76,10 @@
 // How many ticks in a second.
 constexpr auto kTicksPerSecond = Ticks::period::den;
 
+std::string getSensorMetricsId(int32_t sensorId) {
+    return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR).append(std::to_string(sensorId));
+}
+
 }  // namespace
 
 SpatializerPoseController::SpatializerPoseController(Listener* listener,
@@ -144,9 +149,16 @@
 
 void SpatializerPoseController::setHeadSensor(int32_t sensor) {
     std::lock_guard lock(mMutex);
+    if (sensor == mHeadSensor) return;
+    ALOGV("%s: new sensor:%d  mHeadSensor:%d  mScreenSensor:%d",
+            __func__, sensor, mHeadSensor, mScreenSensor);
+
     // Stop current sensor, if valid and different from the other sensor.
     if (mHeadSensor != INVALID_SENSOR && mHeadSensor != mScreenSensor) {
         mPoseProvider->stopSensor(mHeadSensor);
+        mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
+            .record();
     }
 
     if (sensor != INVALID_SENSOR) {
@@ -154,6 +166,15 @@
             // Start new sensor.
             mHeadSensor =
                     mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
+            if (mHeadSensor != INVALID_SENSOR) {
+                auto sensor = mPoseProvider->getSensorByHandle(mHeadSensor);
+                std::string stringType = sensor ? sensor->getStringType().c_str() : "";
+                mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
+                    .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
+                    .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_HEAD)
+                    .set(AMEDIAMETRICS_PROP_TYPE, stringType)
+                    .record();
+            }
         } else {
             // Sensor is already enabled.
             mHeadSensor = mScreenSensor;
@@ -162,14 +183,21 @@
         mHeadSensor = INVALID_SENSOR;
     }
 
-    mProcessor->recenter(true, false);
+    mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */);
 }
 
 void SpatializerPoseController::setScreenSensor(int32_t sensor) {
     std::lock_guard lock(mMutex);
+    if (sensor == mScreenSensor) return;
+    ALOGV("%s: new sensor:%d  mHeadSensor:%d  mScreenSensor:%d",
+            __func__, sensor, mHeadSensor, mScreenSensor);
+
     // Stop current sensor, if valid and different from the other sensor.
     if (mScreenSensor != INVALID_SENSOR && mScreenSensor != mHeadSensor) {
         mPoseProvider->stopSensor(mScreenSensor);
+        mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
+            .record();
     }
 
     if (sensor != INVALID_SENSOR) {
@@ -177,6 +205,13 @@
             // Start new sensor.
             mScreenSensor =
                     mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
+            auto sensor = mPoseProvider->getSensorByHandle(mScreenSensor);
+            std::string stringType = sensor ? sensor->getStringType().c_str() : "";
+            mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
+                .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN)
+                .set(AMEDIAMETRICS_PROP_TYPE, stringType)
+                .record();
         } else {
             // Sensor is already enabled.
             mScreenSensor = mHeadSensor;
@@ -185,7 +220,7 @@
         mScreenSensor = INVALID_SENSOR;
     }
 
-    mProcessor->recenter(false, true);
+    mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */);
 }
 
 void SpatializerPoseController::setDesiredMode(HeadTrackingMode mode) {
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index e98975e..981c569 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -163,7 +163,6 @@
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device@3.8",
         "android.hardware.camera.device-V1-ndk",
         "media_permission-aidl-cpp",
     ],
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 9aa6b82..a2e7f7e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -142,7 +142,10 @@
 // Set to keep track of logged service error events.
 static std::set<String8> sServiceErrorEventSet;
 
-CameraService::CameraService() :
+CameraService::CameraService(
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
+                std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
         mNumberOfCameras(0),
         mNumberOfCamerasWithoutSystemCamera(0),
@@ -195,7 +198,7 @@
 
     // This needs to be last call in this function, so that it's as close to
     // ServiceManager::addService() as possible.
-    CameraServiceProxyWrapper::pingCameraServiceProxy();
+    mCameraServiceProxyWrapper->pingCameraServiceProxy();
     ALOGI("CameraService pinged cameraservice proxy");
 }
 
@@ -897,29 +900,37 @@
     BasicClient::BasicClient::sCameraService = nullptr;
 }
 
-int CameraService::getDeviceVersion(const String8& cameraId, int* facing, int* orientation) {
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId, int* facing,
+        int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
 
     status_t res;
     hardware::hidl_version maxVersion{0,0};
+    IPCTransport transport = IPCTransport::INVALID;
     res = mCameraProviderManager->getHighestSupportedVersion(cameraId.string(),
-            &maxVersion);
-    if (res != OK) return -1;
+            &maxVersion, &transport);
+    if (res != OK || transport == IPCTransport::INVALID) {
+        ALOGE("%s: Unable to get highest supported version for camera id %s", __FUNCTION__,
+                cameraId.string());
+        return std::make_pair(-1, IPCTransport::INVALID) ;
+    }
     deviceVersion = HARDWARE_DEVICE_API_VERSION(maxVersion.get_major(), maxVersion.get_minor());
 
     hardware::CameraInfo info;
     if (facing) {
         res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
-        if (res != OK) return -1;
+        if (res != OK) {
+            return std::make_pair(-1, IPCTransport::INVALID);
+        }
         *facing = info.facing;
         if (orientation) {
             *orientation = info.orientation;
         }
     }
 
-    return deviceVersion;
+    return std::make_pair(deviceVersion, transport);
 }
 
 Status CameraService::filterGetInfoErrorCode(status_t err) {
@@ -943,45 +954,48 @@
         const sp<IInterface>& cameraCb, const String16& packageName, bool systemNativeClient,
         const std::optional<String16>& featureId,  const String8& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid, int deviceVersion, apiLevel effectiveApiLevel, bool overrideForPerfClass,
-        /*out*/sp<BasicClient>* client) {
-
-    // Create CameraClient based on device version reported by the HAL.
-    switch(deviceVersion) {
-        case CAMERA_DEVICE_API_VERSION_1_0:
-            ALOGE("Camera using old HAL version: %d", deviceVersion);
-            return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
-                    "Camera device \"%s\" HAL version %d no longer supported",
-                    cameraId.string(), deviceVersion);
-            break;
-        case CAMERA_DEVICE_API_VERSION_3_0:
-        case CAMERA_DEVICE_API_VERSION_3_1:
-        case CAMERA_DEVICE_API_VERSION_3_2:
-        case CAMERA_DEVICE_API_VERSION_3_3:
-        case CAMERA_DEVICE_API_VERSION_3_4:
-        case CAMERA_DEVICE_API_VERSION_3_5:
-        case CAMERA_DEVICE_API_VERSION_3_6:
-        case CAMERA_DEVICE_API_VERSION_3_7:
-        case CAMERA_DEVICE_API_VERSION_3_8:
-            if (effectiveApiLevel == API_1) { // Camera1 API route
-                sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new Camera2Client(cameraService, tmp, packageName, featureId,
-                        cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
-                        servicePid, overrideForPerfClass);
-            } else { // Camera2 API route
-                sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
-                        static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
-                *client = new CameraDeviceClient(cameraService, tmp, packageName,
-                        systemNativeClient, featureId, cameraId, facing, sensorOrientation,
-                        clientPid, clientUid, servicePid, overrideForPerfClass);
-            }
-            break;
-        default:
-            // Should not be reachable
-            ALOGE("Unknown camera device HAL version: %d", deviceVersion);
-            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
-                    "Camera device \"%s\" has unknown HAL version %d",
-                    cameraId.string(), deviceVersion);
+        int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client) {
+    // For HIDL devices
+    if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
+        // Create CameraClient based on device version reported by the HAL.
+        int deviceVersion = deviceVersionAndTransport.first;
+        switch(deviceVersion) {
+            case CAMERA_DEVICE_API_VERSION_1_0:
+                ALOGE("Camera using old HAL version: %d", deviceVersion);
+                return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
+                        "Camera device \"%s\" HAL version %d no longer supported",
+                        cameraId.string(), deviceVersion);
+                break;
+            case CAMERA_DEVICE_API_VERSION_3_0:
+            case CAMERA_DEVICE_API_VERSION_3_1:
+            case CAMERA_DEVICE_API_VERSION_3_2:
+            case CAMERA_DEVICE_API_VERSION_3_3:
+            case CAMERA_DEVICE_API_VERSION_3_4:
+            case CAMERA_DEVICE_API_VERSION_3_5:
+            case CAMERA_DEVICE_API_VERSION_3_6:
+            case CAMERA_DEVICE_API_VERSION_3_7:
+                break;
+            default:
+                // Should not be reachable
+                ALOGE("Unknown camera device HAL version: %d", deviceVersion);
+                return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                        "Camera device \"%s\" has unknown HAL version %d",
+                        cameraId.string(), deviceVersion);
+        }
+    }
+    if (effectiveApiLevel == API_1) { // Camera1 API route
+        sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
+        *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+                packageName, featureId, cameraId, api1CameraId, facing, sensorOrientation,
+                clientPid, clientUid, servicePid, overrideForPerfClass);
+    } else { // Camera2 API route
+        sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
+                static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
+        *client = new CameraDeviceClient(cameraService, tmp,
+                cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+                featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+                overrideForPerfClass);
     }
     return Status::ok();
 }
@@ -1698,7 +1712,7 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    if (CameraServiceProxyWrapper::isCameraDisabled()) {
+    if (mCameraServiceProxyWrapper->isCameraDisabled()) {
         String8 msg =
                 String8::format("Camera disabled by device policy");
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -1707,7 +1721,8 @@
 
     // enforce system camera permissions
     if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
+            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+            !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
         String8 msg =
                 String8::format("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
@@ -1832,7 +1847,8 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
-        int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
+        auto deviceVersionAndTransport =
+                getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1845,7 +1861,7 @@
         if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
-                deviceVersion, effectiveApiLevel, overrideForPerfClass,
+                deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -1908,7 +1924,7 @@
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
         } else {
           client->setRotateAndCropOverride(
-              CameraServiceProxyWrapper::getRotateAndCropOverride(
+              mCameraServiceProxyWrapper->getRotateAndCropOverride(
                   clientPackageName, facing, multiuser_get_user_id(clientUid)));
         }
 
@@ -1958,7 +1974,7 @@
     device = client;
 
     int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
-    CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+    mCameraServiceProxyWrapper->logOpen(cameraId, facing, clientPackageName,
             effectiveApiLevel, isNonSystemNdk, openLatencyMs);
 
     {
@@ -2444,7 +2460,7 @@
             const auto basicClient = current->getValue();
             if (basicClient.get() != nullptr) {
               basicClient->setRotateAndCropOverride(
-                  CameraServiceProxyWrapper::getRotateAndCropOverride(
+                  mCameraServiceProxyWrapper->getRotateAndCropOverride(
                       basicClient->getPackageName(),
                       basicClient->getCameraFacing(),
                       multiuser_get_user_id(basicClient->getClientUid())));
@@ -2719,45 +2735,48 @@
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    int deviceVersion = getDeviceVersion(id);
-    switch (deviceVersion) {
-        case CAMERA_DEVICE_API_VERSION_1_0:
-        case CAMERA_DEVICE_API_VERSION_3_0:
-        case CAMERA_DEVICE_API_VERSION_3_1:
-            if (apiVersion == API_VERSION_2) {
-                ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without shim",
-                        __FUNCTION__, id.string(), deviceVersion);
-                *isSupported = false;
-            } else { // if (apiVersion == API_VERSION_1) {
-                ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always supported",
+    auto deviceVersionAndTransport = getDeviceVersion(id);
+    if (deviceVersionAndTransport.first == -1) {
+        String8 msg = String8::format("Unknown camera ID %s", id.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
+        int deviceVersion = deviceVersionAndTransport.first;
+        switch (deviceVersion) {
+            case CAMERA_DEVICE_API_VERSION_1_0:
+            case CAMERA_DEVICE_API_VERSION_3_0:
+            case CAMERA_DEVICE_API_VERSION_3_1:
+                if (apiVersion == API_VERSION_2) {
+                    ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without "
+                            "shim", __FUNCTION__, id.string(), deviceVersion);
+                    *isSupported = false;
+                } else { // if (apiVersion == API_VERSION_1) {
+                    ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always "
+                            "supported", __FUNCTION__, id.string());
+                    *isSupported = true;
+                }
+                break;
+            case CAMERA_DEVICE_API_VERSION_3_2:
+            case CAMERA_DEVICE_API_VERSION_3_3:
+            case CAMERA_DEVICE_API_VERSION_3_4:
+            case CAMERA_DEVICE_API_VERSION_3_5:
+            case CAMERA_DEVICE_API_VERSION_3_6:
+            case CAMERA_DEVICE_API_VERSION_3_7:
+                ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
                         __FUNCTION__, id.string());
                 *isSupported = true;
+                break;
+            default: {
+                String8 msg = String8::format("Unknown device version %x for device %s",
+                        deviceVersion, id.string());
+                ALOGE("%s: %s", __FUNCTION__, msg.string());
+                return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.string());
             }
-            break;
-        case CAMERA_DEVICE_API_VERSION_3_2:
-        case CAMERA_DEVICE_API_VERSION_3_3:
-        case CAMERA_DEVICE_API_VERSION_3_4:
-        case CAMERA_DEVICE_API_VERSION_3_5:
-        case CAMERA_DEVICE_API_VERSION_3_6:
-        case CAMERA_DEVICE_API_VERSION_3_7:
-        case CAMERA_DEVICE_API_VERSION_3_8:
-            ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
-                    __FUNCTION__, id.string());
-            *isSupported = true;
-            break;
-        case -1: {
-            String8 msg = String8::format("Unknown camera ID %s", id.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
-        default: {
-            String8 msg = String8::format("Unknown device version %x for device %s",
-                    deviceVersion, id.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.string());
-        }
+    } else {
+        *isSupported = true;
     }
-
     return Status::ok();
 }
 
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 30c00a5..0395475 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -47,6 +47,8 @@
 #include "media/RingBuffer.h"
 #include "utils/AutoConditionLock.h"
 #include "utils/ClientManager.h"
+#include "utils/IPCTransport.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 #include <set>
 #include <string>
@@ -99,7 +101,10 @@
     // Implementation of BinderService<T>
     static char const* getServiceName() { return "media.camera"; }
 
-                        CameraService();
+                        // Non-null arguments for cameraServiceProxyWrapper should be provided for
+                        // testing purposes only.
+                        CameraService(std::shared_ptr<CameraServiceProxyWrapper>
+                                cameraServiceProxyWrapper = nullptr);
     virtual             ~CameraService();
 
     /////////////////////////////////////////////////////////////////////
@@ -242,7 +247,7 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    int                 getDeviceVersion(const String8& cameraId, int* facing = nullptr,
+    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId, int* facing = nullptr,
             int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
@@ -771,6 +776,8 @@
 
     sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
 
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
     // Delay-load the Camera HAL module
     virtual void onFirstRef();
 
@@ -1272,8 +1279,9 @@
             const sp<IInterface>& cameraCb, const String16& packageName,
             bool systemNativeClient, const std::optional<String16>& featureId,
             const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
-            int clientPid, uid_t clientUid, int servicePid, int deviceVersion,
-            apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
+            int clientPid, uid_t clientUid, int servicePid,
+            std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
+            bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 74806a4..65523bc 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -52,6 +52,7 @@
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraDeviceId,
@@ -62,7 +63,7 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass):
-        Camera2ClientBase(cameraService, cameraClient, clientPackageName,
+        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
@@ -112,7 +113,7 @@
     {
         SharedParameters::Lock l(mParameters);
 
-        res = l.mParameters.initialize(mDevice.get(), mDeviceVersion);
+        res = l.mParameters.initialize(mDevice.get());
         if (res != OK) {
             ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
                     __FUNCTION__, mCameraId, strerror(-res), res);
@@ -478,7 +479,7 @@
     CameraService::Client::disconnect();
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
-    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+    mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs);
 
     return res;
 }
@@ -1689,12 +1690,15 @@
                 __FUNCTION__, mCameraId, degrees);
         return BAD_VALUE;
     }
-    SharedParameters::Lock l(mParameters);
-    if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
-        ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
-                mRotateAndCropMode);
-        transform = mRotateAndCropPreviewTransform;
+    {
+        Mutex::Autolock icl(mRotateAndCropLock);
+        if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
+            ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
+                    mRotateAndCropMode);
+            transform = mRotateAndCropPreviewTransform;
+        }
     }
+    SharedParameters::Lock l(mParameters);
     if (transform != l.mParameters.previewTransform &&
             getPreviewStreamId() != NO_STREAM) {
         mDevice->setStreamTransform(getPreviewStreamId(), transform);
@@ -2321,7 +2325,7 @@
     if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
 
     {
-        Mutex::Autolock icl(mBinderSerializationLock);
+        Mutex::Autolock icl(mRotateAndCropLock);
         if (mRotateAndCropIsSupported) {
             mRotateAndCropMode = rotateAndCrop;
         } else {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index da49f56..fe91cba 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -98,6 +98,7 @@
 
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
             const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId,
             const String8& cameraDeviceId,
@@ -242,6 +243,8 @@
     bool isZslEnabledInStillTemplate();
     // The current rotate & crop mode passed by camera service
     uint8_t mRotateAndCropMode;
+    // Synchronize access to 'mRotateAndCropMode'
+    mutable Mutex mRotateAndCropLock;
     // Contains the preview stream transformation that would normally be applied
     // when the display rotation is 0
     int mRotateAndCropPreviewTransform;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 9a7ada2..123cd75 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -50,7 +50,7 @@
 Parameters::~Parameters() {
 }
 
-status_t Parameters::initialize(CameraDeviceBase *device, int deviceVersion) {
+status_t Parameters::initialize(CameraDeviceBase *device) {
     status_t res;
     if (device == nullptr) {
         ALOGE("%s: device is null!", __FUNCTION__);
@@ -63,7 +63,6 @@
         return BAD_VALUE;
     }
     Parameters::info = &info;
-    mDeviceVersion = deviceVersion;
 
     res = buildFastInfo(device);
     if (res != OK) return res;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 263025e..cbe62a7 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -269,7 +269,7 @@
     ~Parameters();
 
     // Sets up default parameters
-    status_t initialize(CameraDeviceBase *device, int deviceVersion);
+    status_t initialize(CameraDeviceBase *device);
 
     // Build fast-access device static info from static info
     status_t buildFastInfo(CameraDeviceBase *device);
@@ -459,7 +459,6 @@
     // Helper function to get the suggested video sizes
     Vector<Size> getPreferredVideoSizes() const;
 
-    int mDeviceVersion;
     uint8_t mDefaultSceneMode;
 };
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index a27d7ed..8787e03 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -31,7 +31,6 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "api2/CameraDeviceClient.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 #include <camera_metadata_hidden.h>
 
@@ -87,6 +86,7 @@
 
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
@@ -97,9 +97,9 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass) :
-    Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass),
+    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+            systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+            sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
@@ -168,9 +168,8 @@
                                 __FUNCTION__, mCameraIdStr.c_str(), entry.data.i64[i]);
                     }
                 }
-                mDynamicProfileMap.emplace(
-                        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-                        standardBitmap);
+                mDynamicProfileMap[ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD] =
+                        standardBitmap;
             } else {
                 ALOGE("%s: Device %s supports 10-bit output but doesn't include a dynamic range"
                         " profile map!", __FUNCTION__, mCameraIdStr.c_str());
@@ -693,7 +692,7 @@
 
         nsecs_t configureEnd = systemTime();
         int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
-        CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+        mCameraServiceProxyWrapper->logStreamConfigured(mCameraIdStr, operatingMode,
                 false /*internalReconfig*/, configureDurationMs);
     }
 
@@ -2062,7 +2061,7 @@
     Camera2ClientBase::detachDevice();
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
-    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+    mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs);
 }
 
 /** Device-related methods */
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 45915ba..06844c6 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -29,6 +29,7 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 #include "CompositeStream.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include "utils/SessionConfigurationUtils.h"
 
 using android::camera3::OutputStreamInfo;
@@ -178,6 +179,7 @@
 
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
             const String16& clientPackageName,
             bool clientPackageOverride,
             const std::optional<String16>& clientFeatureId,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 49a9760..633746a 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -37,7 +37,6 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "utils/CameraThreadState.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
 using namespace camera2;
@@ -48,6 +47,7 @@
 Camera2ClientBase<TClientBase>::Camera2ClientBase(
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
@@ -64,7 +64,7 @@
                 clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid),
         mSharedCameraCallbacks(remoteCallback),
-        mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
     ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
@@ -116,13 +116,13 @@
     switch (providerTransport) {
         case IPCTransport::HIDL:
             mDevice =
-                    new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                    new HidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
-                    new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                    new AidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -326,7 +326,7 @@
                     TClientBase::mCameraIdStr.string(), res);
             return res;
         }
-        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
+        mCameraServiceProxyWrapper->logActive(TClientBase::mCameraIdStr, maxPreviewFps);
     }
     mDeviceActive = true;
 
@@ -345,7 +345,7 @@
             ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
                     TClientBase::mCameraIdStr.string(), res);
         }
-        CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+        mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
                 streamStats);
     }
@@ -423,11 +423,6 @@
 }
 
 template <typename TClientBase>
-int Camera2ClientBase<TClientBase>::getCameraDeviceVersion() const {
-    return mDeviceVersion;
-}
-
-template <typename TClientBase>
 const sp<CameraDeviceBase>& Camera2ClientBase<TClientBase>::getCameraDevice() {
     return mDevice;
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index ec33f46..0dad50f 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -19,6 +19,7 @@
 
 #include "common/CameraDeviceBase.h"
 #include "camera/CaptureResult.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
 
@@ -47,6 +48,7 @@
     // TODO: too many params, move into a ClientArgs<T>
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
+                      std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
                       const String16& clientPackageName,
                       bool systemNativeClient,
                       const std::optional<String16>& clientFeatureId,
@@ -135,6 +137,7 @@
     pid_t mInitialClientPid;
     bool mOverrideForPerfClass = false;
     bool mLegacyClient = false;
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
 
     virtual sp<IBinder> asBinderWrapper() {
         return IInterface::asBinder(this);
@@ -152,8 +155,6 @@
 
     /** CameraDeviceBase instance wrapping HAL3+ entry */
 
-    const int mDeviceVersion;
-
     // Note: This was previously set to const to avoid mDevice being updated -
     // b/112639939 (update of sp<> is racy) during dumpDevice (which is important to be lock free
     // for debugging purpose). The const has been removed since CameraDeviceBase
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 54c826f..cd23250 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -21,7 +21,6 @@
 #include "CameraProviderManager.h"
 
 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
-#include <android/hardware/camera/device/3.8/ICameraDevice.h>
 
 #include <algorithm>
 #include <chrono>
@@ -61,11 +60,6 @@
 
 const float CameraProviderManager::kDepthARTolerance = .1f;
 
-// AIDL Devices start with major version 1, offset added to bring them up to HIDL.
-const uint16_t kAidlDeviceMajorOffset = 2;
-// AIDL Devices start with minor version 1, offset added to bring them up to HIDL.
-const uint16_t kAidlDeviceMinorOffset = 7;
-
 CameraProviderManager::HidlServiceInteractionProxyImpl
 CameraProviderManager::sHidlServiceInteractionProxy{};
 
@@ -280,15 +274,13 @@
     return deviceIds;
 }
 
-bool CameraProviderManager::isValidDevice(const std::string &id, uint16_t majorVersion) const {
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    return isValidDeviceLocked(id, majorVersion);
-}
-
-bool CameraProviderManager::isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const {
+bool CameraProviderManager::isValidDeviceLocked(const std::string &id, uint16_t majorVersion,
+        IPCTransport transport) const {
     for (auto& provider : mProviders) {
+        IPCTransport providerTransport = provider->getIPCTransport();
         for (auto& deviceInfo : provider->mDevices) {
-            if (deviceInfo->mId == id && deviceInfo->mVersion.get_major() == majorVersion) {
+            if (deviceInfo->mId == id && deviceInfo->mVersion.get_major() == majorVersion &&
+                    transport == providerTransport) {
                 return true;
             }
         }
@@ -369,29 +361,32 @@
     return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics);
 }
 
-// Till hidl is removed from the android source tree, we use this for aidl as
-// well. We artificially give aidl camera device version 1 a major version 3 and minor
-// version 8.
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
-        hardware::hidl_version *v) {
+        hardware::hidl_version *v, IPCTransport *transport) {
+    if (v == nullptr || transport == nullptr) {
+        return BAD_VALUE;
+    }
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     hardware::hidl_version maxVersion{0,0};
     bool found = false;
+    IPCTransport providerTransport = IPCTransport::INVALID;
     for (auto& provider : mProviders) {
         for (auto& deviceInfo : provider->mDevices) {
             if (deviceInfo->mId == id) {
                 if (deviceInfo->mVersion > maxVersion) {
                     maxVersion = deviceInfo->mVersion;
+                    providerTransport = provider->getIPCTransport();
                     found = true;
                 }
             }
         }
     }
-    if (!found) {
+    if (!found || providerTransport == IPCTransport::INVALID) {
         return NAME_NOT_FOUND;
     }
     *v = maxVersion;
+    *transport = providerTransport;
     return OK;
 }
 
@@ -442,9 +437,10 @@
 bool CameraProviderManager::supportSetTorchMode(const std::string &id) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     for (auto& provider : mProviders) {
-        auto deviceInfo = findDeviceInfoLocked(id);
-        if (deviceInfo != nullptr) {
-            return provider->mSetTorchModeSupported;
+        for (auto& deviceInfo : provider->mDevices) {
+            if (deviceInfo->mId == id) {
+                return provider->mSetTorchModeSupported;
+            }
         }
     }
     return false;
@@ -583,7 +579,11 @@
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     mDeviceState = newState;
     status_t res = OK;
-    for (auto& provider : mProviders) {
+    // Make a copy of mProviders because we unlock mInterfaceMutex temporarily
+    // within the loop. It's possible that during the time mInterfaceMutex is
+    // unlocked, mProviders has changed.
+    auto providers = mProviders;
+    for (auto& provider : providers) {
         ALOGV("%s: Notifying %s for new state 0x%" PRIx64,
                 __FUNCTION__, provider->mProviderName.c_str(), newState);
         // b/199240726 Camera providers can for example try to add/remove
@@ -613,8 +613,7 @@
 
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-    auto deviceInfo = findDeviceInfoLocked(id,
-            /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
+    auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
     auto *aidlDeviceInfo3 = static_cast<AidlProviderInfo::AidlDeviceInfo3*>(deviceInfo);
@@ -695,8 +694,7 @@
 
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-    auto deviceInfo = findDeviceInfoLocked(id,
-            /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
+    auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
     auto *hidlDeviceInfo3 = static_cast<HidlProviderInfo::HidlDeviceInfo3*>(deviceInfo);
@@ -897,9 +895,16 @@
 }
 
 CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
-        const std::string& id,
-        hardware::hidl_version minVersion, hardware::hidl_version maxVersion) const {
+        const std::string& id) const {
     for (auto& provider : mProviders) {
+        using hardware::hidl_version;
+        IPCTransport transport = provider->getIPCTransport();
+        // AIDL min version starts at major: 1 minor: 1
+        hidl_version minVersion =
+                (transport == IPCTransport::HIDL) ? hidl_version{3, 2} : hidl_version{1, 1} ;
+        hidl_version maxVersion =
+                (transport == IPCTransport::HIDL) ? hidl_version{3, 7} : hidl_version{1000, 0};
+
         for (auto& deviceInfo : provider->mDevices) {
             if (deviceInfo->mId == id &&
                     minVersion <= deviceInfo->mVersion && maxVersion >= deviceInfo->mVersion) {
@@ -911,16 +916,13 @@
 }
 
 metadata_vendor_id_t CameraProviderManager::getProviderTagIdLocked(
-        const std::string& id, hardware::hidl_version minVersion,
-        hardware::hidl_version maxVersion) const {
+        const std::string& id) const {
     metadata_vendor_id_t ret = CAMERA_METADATA_INVALID_VENDOR_ID;
 
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     for (auto& provider : mProviders) {
         for (auto& deviceInfo : provider->mDevices) {
-            if (deviceInfo->mId == id &&
-                    minVersion <= deviceInfo->mVersion &&
-                    maxVersion >= deviceInfo->mVersion) {
+            if (deviceInfo->mId == id) {
                 return provider->mProviderTagid;
             }
         }
@@ -1274,9 +1276,14 @@
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupMonochromeTags() {
     status_t res = OK;
     auto& c = mCameraCharacteristics;
-
+    sp<ProviderInfo> parentProvider = mParentProvider.promote();
+    if (parentProvider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    IPCTransport ipcTransport = parentProvider->getIPCTransport();
     // Override static metadata for MONOCHROME camera with older device version
-    if (mVersion.get_major() == 3 && mVersion.get_minor() < 5) {
+    if (ipcTransport == IPCTransport::HIDL &&
+            (mVersion.get_major() == 3 && mVersion.get_minor() < 5)) {
         camera_metadata_entry cap = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
         for (size_t i = 0; i < cap.count; i++) {
             if (cap.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
@@ -1405,7 +1412,8 @@
     return res;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addReadoutTimestampTag() {
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addReadoutTimestampTag(
+        bool readoutTimestampSupported) {
     status_t res = OK;
     auto& c = mCameraCharacteristics;
 
@@ -1416,9 +1424,7 @@
     }
 
     uint8_t readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
-    int deviceVersion = HARDWARE_DEVICE_API_VERSION(
-            mVersion.get_major(), mVersion.get_minor());
-    if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_8) {
+    if (readoutTimestampSupported) {
         readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
     }
 
@@ -1658,6 +1664,7 @@
     }
 
     for (auto& provider : mProviders) {
+        IPCTransport transport = provider->getIPCTransport();
         for (auto& deviceInfo : provider->mDevices) {
             std::vector<std::string> physicalIds;
             if (deviceInfo->mIsLogicalCamera) {
@@ -1665,7 +1672,8 @@
                         cameraId) != deviceInfo->mPhysicalIds.end()) {
                     int deviceVersion = HARDWARE_DEVICE_API_VERSION(
                             deviceInfo->mVersion.get_major(), deviceInfo->mVersion.get_minor());
-                    if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) {
+                    if (transport == IPCTransport::HIDL &&
+                            deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) {
                         ALOGE("%s: Wrong deviceVersion %x for hiddenPhysicalCameraId %s",
                                 __FUNCTION__, deviceVersion, cameraId.c_str());
                         return falseRet;
@@ -1883,39 +1891,49 @@
 
     uint16_t major, minor;
     std::string type, id;
+    IPCTransport transport = getIPCTransport();
 
     status_t res = parseDeviceName(name, &major, &minor, &type, &id);
     if (res != OK) {
         return res;
     }
-    if (getIPCTransport() == IPCTransport::AIDL) {
-        // Till HIDL support exists, map AIDL versions to HIDL.
-        // TODO:b/196432585 Explore if we can get rid of this.
-        major += kAidlDeviceMajorOffset;
-        minor += kAidlDeviceMinorOffset;
-    }
 
     if (type != mType) {
         ALOGE("%s: Device type %s does not match provider type %s", __FUNCTION__,
                 type.c_str(), mType.c_str());
         return BAD_VALUE;
     }
-    if (mManager->isValidDeviceLocked(id, major)) {
+    if (mManager->isValidDeviceLocked(id, major, transport)) {
         ALOGE("%s: Device %s: ID %s is already in use for device major version %d", __FUNCTION__,
                 name.c_str(), id.c_str(), major);
         return BAD_VALUE;
     }
 
     std::unique_ptr<DeviceInfo> deviceInfo;
-    switch (major) {
-        case 3:
-            deviceInfo = initializeDeviceInfo(name, mProviderTagid, id, minor);
+    switch (transport) {
+        case IPCTransport::HIDL:
+            switch (major) {
+                case 3:
+                    break;
+                default:
+                    ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:",
+                          __FUNCTION__,  name.c_str(), major);
+                    return BAD_VALUE;
+            }
+            break;
+        case IPCTransport::AIDL:
+            if (major != 1) {
+                ALOGE("%s: Device %s: Unsupported AIDL device HAL major version %d:", __FUNCTION__,
+                        name.c_str(), major);
+                return BAD_VALUE;
+            }
             break;
         default:
-            ALOGE("%s: Device %s: Unsupported IDL device HAL major version %d:", __FUNCTION__,
-                    name.c_str(), major);
+            ALOGE("%s Invalid transport %d", __FUNCTION__, transport);
             return BAD_VALUE;
     }
+
+    deviceInfo = initializeDeviceInfo(name, mProviderTagid, id, minor);
     if (deviceInfo == nullptr) return BAD_VALUE;
     deviceInfo->notifyDeviceStateChange(getDeviceState());
     deviceInfo->mStatus = initialStatus;
@@ -2697,7 +2715,7 @@
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics) const {
-    auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {3, 0}, /*maxVersion*/ {5, 0});
+    auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
         return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
     }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 352a6e3..d049aff 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -41,7 +41,6 @@
 #include <android/hardware/camera/provider/2.6/ICameraProvider.h>
 #include <android/hardware/camera/provider/2.7/ICameraProvider.h>
 #include <android/hardware/camera/device/3.7/types.h>
-#include <android/hardware/camera/device/3.8/types.h>
 #include <android/hidl/manager/1.0/IServiceNotification.h>
 #include <binder/IServiceManager.h>
 #include <camera/VendorTagDescriptor.h>
@@ -96,7 +95,6 @@
 #define CAMERA_DEVICE_API_VERSION_3_5 HARDWARE_DEVICE_API_VERSION(3, 5)
 #define CAMERA_DEVICE_API_VERSION_3_6 HARDWARE_DEVICE_API_VERSION(3, 6)
 #define CAMERA_DEVICE_API_VERSION_3_7 HARDWARE_DEVICE_API_VERSION(3, 7)
-#define CAMERA_DEVICE_API_VERSION_3_8 HARDWARE_DEVICE_API_VERSION(3, 8)
 
 /**
  * The vendor tag descriptor class that takes HIDL/AIDL vendor tag information as
@@ -233,11 +231,6 @@
     std::vector<std::string> getAPI1CompatibleCameraDeviceIds() const;
 
     /**
-     * Return true if a device with a given ID and major version exists
-     */
-    bool isValidDevice(const std::string &id, uint16_t majorVersion) const;
-
-    /**
      * Return true if a device with a given ID has a flash unit. Returns false
      * for devices that are unknown.
      */
@@ -286,7 +279,7 @@
      * Return the highest supported device interface version for this ID
      */
     status_t getHighestSupportedVersion(const std::string &id,
-            hardware::hidl_version *v);
+            hardware::hidl_version *v, IPCTransport *transport);
 
     /**
      * Check if a given camera device support setTorchMode API.
@@ -392,9 +385,7 @@
     /*
      * Return provider type for a specific device.
      */
-    metadata_vendor_id_t getProviderTagIdLocked(const std::string& id,
-            hardware::hidl_version minVersion = hardware::hidl_version{0,0},
-            hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
+    metadata_vendor_id_t getProviderTagIdLocked(const std::string& id) const;
 
     /*
      * Check if a camera is a logical camera. And if yes, return
@@ -672,7 +663,7 @@
             status_t deriveHeicTags(bool maxResolution = false);
             status_t addRotateCropTags();
             status_t addPreCorrectionActiveArraySize();
-            status_t addReadoutTimestampTag();
+            status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
 
             static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
                     android_pixel_format_t format,
@@ -797,12 +788,8 @@
 
     // Utility to find a DeviceInfo by ID; pointer is only valid while mInterfaceMutex is held
     // and the calling code doesn't mutate the list of providers or their lists of devices.
-    // Finds the first device of the given ID that falls within the requested version range
-    //   minVersion <= deviceVersion < maxVersion
     // No guarantees on the order of traversal
-    ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id,
-            hardware::hidl_version minVersion = hardware::hidl_version{0,0},
-            hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
+    ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
 
     // Map external providers to USB devices in order to handle USB hotplug
     // events for lazy HALs
@@ -829,7 +816,8 @@
     status_t removeProvider(const std::string& provider);
     sp<StatusListener> getStatusListener() const;
 
-    bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
+    bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion,
+            IPCTransport transport) const;
 
     size_t mProviderInstanceId = 0;
     std::vector<sp<ProviderInfo>> mProviders;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d986f52..d60565f 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -27,7 +27,6 @@
 #include <utils/Trace.h>
 
 #include <android/hardware/camera/device/3.7/ICameraDevice.h>
-#include <android/hardware/camera/device/3.8/ICameraDevice.h>
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -656,7 +655,7 @@
         ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
-    res = addReadoutTimestampTag();
+    res = addReadoutTimestampTag(/*readoutTimestampSupported*/false);
     if (OK != res) {
         ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -771,74 +770,14 @@
 }
 
 status_t HidlProviderInfo::HidlDeviceInfo3::turnOnTorchWithStrengthLevel(
-        int32_t torchStrength) {
-    const sp<hardware::camera::device::V3_2::ICameraDevice> interface = startDeviceInterface();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    sp<hardware::camera::device::V3_8::ICameraDevice> interface_3_8 = nullptr;
-    auto castResult_3_8 = device::V3_8::ICameraDevice::castFrom(interface);
-    if (castResult_3_8.isOk()) {
-        interface_3_8 = castResult_3_8;
-    }
-
-    if (interface_3_8 == nullptr) {
-        return INVALID_OPERATION;
-    }
-
-    Status s = interface_3_8->turnOnTorchWithStrengthLevel(torchStrength);
-    if (s == Status::OK) {
-        mTorchStrengthLevel = torchStrength;
-    }
-    return mapToStatusT(s);
+        int32_t /*torchStrengthLevel*/) {
+    ALOGE("%s HIDL does not support turning on torch with variable strength", __FUNCTION__);
+    return INVALID_OPERATION;
 }
 
-status_t HidlProviderInfo::HidlDeviceInfo3::getTorchStrengthLevel(int32_t *torchStrength) {
-    if (torchStrength == nullptr) {
-        return BAD_VALUE;
-    }
-    const sp<hardware::camera::device::V3_2::ICameraDevice> interface = startDeviceInterface();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    auto castResult_3_8 = device::V3_8::ICameraDevice::castFrom(interface);
-    sp<hardware::camera::device::V3_8::ICameraDevice> interface_3_8 = nullptr;
-    if (castResult_3_8.isOk()) {
-        interface_3_8 = castResult_3_8;
-    }
-
-    if (interface_3_8 == nullptr) {
-        return INVALID_OPERATION;
-    }
-
-    Status callStatus;
-    status_t res;
-    hardware::Return<void> ret = interface_3_8->getTorchStrengthLevel([&callStatus, &torchStrength]
-        (Status status, const int32_t& torchStrengthLevel) {
-        callStatus = status;
-        if (status == Status::OK) {
-             *torchStrength = torchStrengthLevel;
-        } });
-
-    if (ret.isOk()) {
-        switch (callStatus) {
-            case Status::OK:
-                // Expected case, do nothing.
-                res = OK;
-                break;
-            case Status::METHOD_NOT_SUPPORTED:
-                res = INVALID_OPERATION;
-                break;
-            default:
-                ALOGE("%s: Get torch strength level failed: %d", __FUNCTION__, callStatus);
-                res = UNKNOWN_ERROR;
-        }
-    } else {
-        ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
-        res = UNKNOWN_ERROR;
-    }
-
-    return res;
+status_t HidlProviderInfo::HidlDeviceInfo3::getTorchStrengthLevel(int32_t * /*torchStrength*/) {
+    ALOGE("%s HIDL does not support variable torch strength level", __FUNCTION__);
+    return INVALID_OPERATION;
 }
 
 sp<hardware::camera::device::V3_2::ICameraDevice>
@@ -893,11 +832,11 @@
         const SessionConfiguration &configuration, bool overrideForPerfClass,
         metadataGetter getMetadata, bool *status) {
 
-    hardware::camera::device::V3_8::StreamConfiguration streamConfiguration;
+    hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
     bool earlyExit = false;
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
-            streamConfiguration, overrideForPerfClass, &earlyExit);
+            configuration_3_7, overrideForPerfClass, &earlyExit);
 
     if (!bRes.isOk()) {
         return UNKNOWN_ERROR;
@@ -919,8 +858,6 @@
     sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult_3_5;
     auto castResult_3_7 = device::V3_7::ICameraDevice::castFrom(interface);
     sp<hardware::camera::device::V3_7::ICameraDevice> interface_3_7 = castResult_3_7;
-    auto castResult_3_8 = device::V3_8::ICameraDevice::castFrom(interface);
-    sp<hardware::camera::device::V3_8::ICameraDevice> interface_3_8 = castResult_3_8;
 
     status_t res;
     Status callStatus;
@@ -930,27 +867,11 @@
                 callStatus = s;
                 *status = combStatus;
             };
-    if (interface_3_8 != nullptr) {
-        ret = interface_3_8->isStreamCombinationSupported_3_8(streamConfiguration, halCb);
-    } else if (interface_3_7 != nullptr) {
-        hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
-        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV38ToV37(
-                configuration_3_7, streamConfiguration);
-        if (!success) {
-            *status = false;
-            return OK;
-        }
+    if (interface_3_7 != nullptr) {
         ret = interface_3_7->isStreamCombinationSupported_3_7(configuration_3_7, halCb);
     } else if (interface_3_5 != nullptr) {
-        hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
-        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV38ToV37(
-                configuration_3_7, streamConfiguration);
-        if (!success) {
-            *status = false;
-            return OK;
-        }
         hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
-        success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
                 configuration_3_4, configuration_3_7);
         if (!success) {
             *status = false;
@@ -993,7 +914,7 @@
     status_t res = OK;
     for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
         const std::string& cameraId = cameraIdAndSessionConfig.mCameraId;
-        hardware::camera::device::V3_8::StreamConfiguration streamConfiguration;
+        hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
         CameraMetadata deviceInfo;
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
@@ -1027,8 +948,7 @@
         }
         CameraIdAndStreamCombination halCameraIdAndStream;
         halCameraIdAndStream.cameraId = cameraId;
-        SessionConfigurationUtils::convertHALStreamCombinationFromV38ToV37(
-                halCameraIdAndStream.streamConfiguration, streamConfiguration);
+        halCameraIdAndStream.streamConfiguration = streamConfiguration;
         halCameraIdsAndStreamsV.push_back(halCameraIdAndStream);
     }
     *halCameraIdsAndStreamCombinations = halCameraIdsAndStreamsV;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 943c0af..ebfa1d6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -63,7 +63,6 @@
 #include "utils/CameraThreadState.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 #include <algorithm>
 #include <tuple>
@@ -73,7 +72,9 @@
 
 namespace android {
 
-Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool legacyClient):
+Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8 &id, bool overrideForPerfClass, bool legacyClient):
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
         mLegacyClient(legacyClient),
         mOperatingMode(NO_MODE),
@@ -2257,7 +2258,7 @@
         ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
     }
 
-    CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
+    mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
         ns2ms(systemTime() - startTime));
 
     if (markClientActive) {
@@ -4276,8 +4277,8 @@
                 if (parent != nullptr) {
                     parent->mRequestBufferSM.onRequestThreadPaused();
                 }
-                mRequestClearing = false;
             }
+            mRequestClearing = false;
             // Stop waiting for now and let thread management happen
             return NULL;
         }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 3c5cb78..d757eb9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -49,6 +49,7 @@
 #include "utils/TagMonitor.h"
 #include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include <camera_metadata_hidden.h>
 
 using android::camera3::camera_capture_request_t;
@@ -82,7 +83,8 @@
   friend class AidlCamera3Device;
   public:
 
-    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool legacyClient = false);
+    explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            const String8& id, bool overrideForPerfClass, bool legacyClient = false);
 
     virtual ~Camera3Device();
     // Delete and optionally close native handles and clear the input vector afterward
@@ -325,6 +327,8 @@
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
     // A lock to enforce serialization on the input/configure side
     // of the public interface.
     // Not locked by methods guarded by mOutputLock, since they may act
@@ -458,6 +462,28 @@
                 // Verify buffer caches
                 std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
                         offlineStream.circulatingBufferIds.end());
+                {
+                    // Due to timing it is possible that we may not have any remaining pending
+                    // capture requests that can update the caches on Hal side. This can result in
+                    // buffer cache mismatch between the service and the Hal and must be accounted
+                    // for.
+                    std::lock_guard<std::mutex> l(mFreedBuffersLock);
+                    for (const auto& it : mFreedBuffers) {
+                        if (it.first == id) {
+                            ALOGV("%s: stream ID %d buffer id %" PRIu64 " cache removal still "
+                                    "pending", __FUNCTION__, id, it.second);
+                            const auto& cachedEntry = std::find(bufIds.begin(), bufIds.end(),
+                                    it.second);
+                            if (cachedEntry != bufIds.end()) {
+                                bufIds.erase(cachedEntry);
+                            } else {
+                                ALOGE("%s: stream ID %d buffer id %" PRIu64 " cache removal still "
+                                        "pending however buffer is no longer in the offline stream "
+                                        "info!", __FUNCTION__, id, it.second);
+                            }
+                        }
+                    }
+                }
                 if (!verifyBufferIds(id, bufIds)) {
                     ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
                     return UNKNOWN_ERROR;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 1e12fae..8e4ff13 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -379,9 +379,11 @@
     }
 
     // Fill in JPEG header
-    CameraBlob *aidlBlobHeader = reinterpret_cast<CameraBlob *>(aidlHeaderStart);
-    aidlBlobHeader->blobId = blobId;
-    aidlBlobHeader->blobSizeBytes = blobSizeBytes;
+    CameraBlob aidlHeader = {
+            .blobId = blobId,
+            .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
+    };
+    memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
     graphicBuffer->unlock();
     return OK;
 }
@@ -1416,9 +1418,13 @@
     nsecs_t expectedPresentT = mLastPresentTime;
     nsecs_t minDiff = INT64_MAX;
     // Derive minimum intervals between presentation times based on minimal
-    // expected duration.
-    size_t minVsyncs = (mMinExpectedDuration + vsyncEventData.frameInterval - 1) /
-            vsyncEventData.frameInterval - 1;
+    // expected duration. The minimum number of Vsyncs is:
+    // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
+    // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
+    // - and so on.
+    int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
+            vsyncEventData.frameInterval;
+    if (minVsyncs < 0) minVsyncs = 0;
     nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval + kTimelineThresholdNs;
     // Find best timestamp in the vsync timeline:
     // - closest to the ideal present time,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index dd01408..d6107c2 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -26,8 +26,6 @@
 
 #include <common/CameraDeviceBase.h>
 
-#include <android/hardware/camera/device/3.8/ICameraDeviceCallback.h>
-
 #include "device3/BufferUtils.h"
 #include "device3/DistortionMapper.h"
 #include "device3/ZoomRatioMapper.h"
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index f05520f..973bc04 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -161,9 +161,11 @@
     return (uint64_t)usage;
 }
 
-AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
-            bool legacyClient) : Camera3Device(id, overrideForPerfClass, legacyClient) {
-        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+AidlCamera3Device::AidlCamera3Device(
+        std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8& id, bool overrideForPerfClass, bool legacyClient) :
+        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, legacyClient) {
+    mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
 status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index d20a7eb..ecf42b4 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,8 +39,9 @@
     using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
     class AidlCameraDeviceCallbacks;
     friend class AidlCameraDeviceCallbacks;
-    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass,
-            bool legacyClient = false);
+    explicit AidlCamera3Device(
+            std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            const String8& id, bool overrideForPerfClass, bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index b462d44..b2accc1 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -245,6 +245,8 @@
                             __FUNCTION__, streamId, strerror(-res), res);
                     if (res == TIMED_OUT || res == NO_MEMORY) {
                         bufRet.val.set<Tag::error>(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
+                    } else if (res == INVALID_OPERATION) {
+                        bufRet.val.set<Tag::error>(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
                     } else {
                         bufRet.val.set<Tag::error>(StreamBufferRequestError::UNKNOWN_ERROR);
                     }
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 6d76687..4bb426c 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -65,16 +65,9 @@
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
-using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
 
 namespace android {
 
-CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap
-HidlCamera3Device::mapToHidlDynamicProfile(int64_t dynamicRangeProfile) {
-    return static_cast<CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap>(
-            dynamicRangeProfile);
-}
-
 hardware::graphics::common::V1_0::PixelFormat HidlCamera3Device::mapToPixelFormat(
         int frameworkFormat) {
     return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
@@ -285,7 +278,8 @@
     // Metadata tags needs fixup for monochrome camera device version less
     // than 3.5.
     hardware::hidl_version maxVersion{0,0};
-    res = manager->getHighestSupportedVersion(mId.string(), &maxVersion);
+    IPCTransport transport = IPCTransport::HIDL;
+    res = manager->getHighestSupportedVersion(mId.string(), &maxVersion, &transport);
     if (res != OK) {
         ALOGE("%s: Error in getting camera device version id: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -446,11 +440,6 @@
     return notifyHelper<hardware::camera::device::V3_2::NotifyMsg>(msgs);
 }
 
-hardware::Return<void> HidlCamera3Device::notify_3_8(
-        const hardware::hidl_vec<hardware::camera::device::V3_8::NotifyMsg>& msgs) {
-    return notifyHelper<hardware::camera::device::V3_8::NotifyMsg>(msgs);
-}
-
 template<typename NotifyMsgType>
 hardware::Return<void> HidlCamera3Device::notifyHelper(
         const hardware::hidl_vec<NotifyMsgType>& msgs) {
@@ -735,10 +724,6 @@
         mRequestMetadataQueue(queue) {
     // Check with hardware service manager if we can downcast these interfaces
     // Somewhat expensive, so cache the results at startup
-    auto castResult_3_8 = device::V3_8::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_8.isOk()) {
-        mHidlSession_3_8 = castResult_3_8;
-    }
     auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
     if (castResult_3_7.isOk()) {
         mHidlSession_3_7 = castResult_3_7;
@@ -766,7 +751,6 @@
 }
 
 void HidlCamera3Device::HidlHalInterface::clear() {
-    mHidlSession_3_8.clear();
     mHidlSession_3_7.clear();
     mHidlSession_3_6.clear();
     mHidlSession_3_5.clear();
@@ -905,16 +889,13 @@
     device::V3_2::StreamConfiguration requestedConfiguration3_2;
     device::V3_4::StreamConfiguration requestedConfiguration3_4;
     device::V3_7::StreamConfiguration requestedConfiguration3_7;
-    device::V3_8::StreamConfiguration requestedConfiguration3_8;
     requestedConfiguration3_2.streams.resize(config->num_streams);
     requestedConfiguration3_4.streams.resize(config->num_streams);
     requestedConfiguration3_7.streams.resize(config->num_streams);
-    requestedConfiguration3_8.streams.resize(config->num_streams);
     for (size_t i = 0; i < config->num_streams; i++) {
         device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
         device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
         device::V3_7::Stream &dst3_7 = requestedConfiguration3_7.streams[i];
-        device::V3_8::Stream &dst3_8 = requestedConfiguration3_8.streams[i];
         camera3::camera_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
@@ -963,23 +944,17 @@
             dst3_7.sensorPixelModesUsed[j++] =
                     static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
         }
-        if ((src->dynamic_range_profile !=
-                    ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) &&
-                (mHidlSession_3_8 == nullptr)) {
+        if (src->dynamic_range_profile !=
+                    ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
             ALOGE("%s: Camera device doesn't support non-standard dynamic range profiles: %" PRIx64,
                     __FUNCTION__, src->dynamic_range_profile);
             return BAD_VALUE;
         }
-        if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT &&
-                mHidlSession_3_8 == nullptr) {
+        if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
             ALOGE("%s: Camera device doesn't support non-default stream use case %" PRId64 "!",
                     __FUNCTION__, src->use_case);
             return BAD_VALUE;
         }
-        dst3_8.v3_7 = dst3_7;
-        dst3_8.dynamicRangeProfile = mapToHidlDynamicProfile(src->dynamic_range_profile);
-        dst3_8.useCase =
-                static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(src->use_case);
         activeStreams.insert(streamId);
         // Create Buffer ID map if necessary
         mBufferRecords.tryCreateBufferCache(streamId);
@@ -1004,10 +979,6 @@
     requestedConfiguration3_7.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
             sessionParamSize);
-    requestedConfiguration3_8.operationMode = operationMode;
-    requestedConfiguration3_8.sessionParams.setToExternal(
-            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            sessionParamSize);
 
     // Invoke configureStreams
     device::V3_3::HalStreamConfiguration finalConfiguration;
@@ -1053,18 +1024,7 @@
                 return OK;
             };
 
-    // See which version of HAL we have
-    if (mHidlSession_3_8 != nullptr) {
-        ALOGV("%s: v3.8 device found", __FUNCTION__);
-        requestedConfiguration3_8.streamConfigCounter = mNextStreamConfigCounter++;
-        requestedConfiguration3_8.multiResolutionInputImage = config->input_is_multi_resolution;
-        auto err = mHidlSession_3_8->configureStreams_3_8(requestedConfiguration3_8,
-                configStream36Cb);
-        res = postprocConfigStream36(err);
-        if (res != OK) {
-            return res;
-        }
-    } else if (mHidlSession_3_7 != nullptr) {
+    if (mHidlSession_3_7 != nullptr) {
         ALOGV("%s: v3.7 device found", __FUNCTION__);
         requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
         requestedConfiguration3_7.multiResolutionInputImage = config->input_is_multi_resolution;
@@ -1474,15 +1434,10 @@
     return OK;
 }
 
-status_t HidlCamera3Device::HidlHalInterface::repeatingRequestEnd(uint32_t frameNumber,
-        const std::vector<int32_t> &streamIds) {
+status_t HidlCamera3Device::HidlHalInterface::repeatingRequestEnd(uint32_t /*frameNumber*/,
+        const std::vector<int32_t> &/*streamIds*/) {
     ATRACE_NAME("CameraHal::repeatingRequestEnd");
-    if (!valid()) return INVALID_OPERATION;
-
-    if (mHidlSession_3_8.get() != nullptr) {
-        mHidlSession_3_8->repeatingRequestEnd(frameNumber, streamIds);
-    }
-    return OK;
+    return INVALID_OPERATION;
 }
 
 status_t HidlCamera3Device::HidlHalInterface::close() {
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 56c999a..faac83f 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -22,18 +22,18 @@
 
 namespace android {
 
-using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap;
 
 /**
  * CameraDevice for HIDL HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 or higher.
  */
 class HidlCamera3Device :
-            virtual public hardware::camera::device::V3_8::ICameraDeviceCallback,
+            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
             public Camera3Device {
   public:
 
-   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass,
-          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, legacyClient) { }
+   explicit HidlCamera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8& id, bool overrideForPerfClass, bool legacyClient = false) :
+        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -44,8 +44,6 @@
     static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
             android_dataspace dataSpace);
     static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint64_t usage);
-    static CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap mapToHidlDynamicProfile(
-                    int64_t dynamicRangeProfile);
     static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
             camera_stream_rotation_t rotation);
     // Returns a negative error code if the passed-in operation mode is not valid.
@@ -84,10 +82,6 @@
             const hardware::hidl_vec<
                     hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
 
-    hardware::Return<void> notify_3_8(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_8::NotifyMsg>& msgs) override;
-
     // Handle one notify message
     void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
 
@@ -164,8 +158,6 @@
         sp<hardware::camera::device::V3_6::ICameraDeviceSession> mHidlSession_3_6;
         // Valid if ICameraDeviceSession is @3.7 or newer
         sp<hardware::camera::device::V3_7::ICameraDeviceSession> mHidlSession_3_7;
-        // Valid if ICameraDeviceSession is @3.7 or newer
-        sp<hardware::camera::device::V3_8::ICameraDeviceSession> mHidlSession_3_8;
 
         std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
 
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
index a9a6c66..ff6fc17 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
@@ -63,29 +63,7 @@
 }
 
 void notify(CaptureOutputStates& states,
-        const hardware::camera::device::V3_8::NotifyMsg& msg) {
-    using android::hardware::camera::device::V3_2::MsgType;
-
-    hardware::camera::device::V3_2::NotifyMsg msg_3_2;
-    msg_3_2.type = msg.type;
-    bool hasReadoutTime = false;
-    uint64_t readoutTime = 0;
-    switch (msg.type) {
-        case MsgType::ERROR:
-            msg_3_2.msg.error = msg.msg.error;
-            break;
-        case MsgType::SHUTTER:
-            msg_3_2.msg.shutter = msg.msg.shutter.v3_2;
-            hasReadoutTime = true;
-            readoutTime = msg.msg.shutter.readoutTimestamp;
-            break;
-    }
-    notify(states, msg_3_2, hasReadoutTime, readoutTime);
-}
-
-void notify(CaptureOutputStates& states,
-        const hardware::camera::device::V3_2::NotifyMsg& msg,
-        bool hasReadoutTime, uint64_t readoutTime) {
+        const hardware::camera::device::V3_2::NotifyMsg& msg) {
 
     using android::hardware::camera::device::V3_2::MsgType;
     using android::hardware::camera::device::V3_2::ErrorCode;
@@ -127,8 +105,8 @@
             m.type = CAMERA_MSG_SHUTTER;
             m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
             m.message.shutter.timestamp = msg.msg.shutter.timestamp;
-            m.message.shutter.readout_timestamp_valid = hasReadoutTime;
-            m.message.shutter.readout_timestamp = readoutTime;
+            m.message.shutter.readout_timestamp_valid = false;
+            m.message.shutter.readout_timestamp = 0LL;
             break;
     }
     notify(states, &m);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h
index 583d738..5e6cba6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h
@@ -26,18 +26,12 @@
 
 #include <common/CameraDeviceBase.h>
 
-#include <android/hardware/camera/device/3.8/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
 
 #include "device3/BufferUtils.h"
-//#include "device3/DistortionMapper.h"
-//#include "device3/ZoomRatioMapper.h"
-//#include "device3/RotateAndCropMapper.h"
 #include "device3/InFlightRequest.h"
 #include "device3/Camera3Stream.h"
-//#include "device3/Camera3OutputStreamInterface.h"
 #include "device3/Camera3OutputUtils.h"
-//#include "utils/SessionStatsBuilder.h"
-//#include "utils/TagMonitor.h"
 
 namespace android {
 
@@ -65,11 +59,7 @@
 
     // Handle one notify message
     void notify(CaptureOutputStates& states,
-            const hardware::camera::device::V3_2::NotifyMsg& msg,
-            bool hasReadoutTime = false, uint64_t readoutTime = 0LL);
-    void notify(CaptureOutputStates& states,
-            const hardware::camera::device::V3_8::NotifyMsg& msg);
-
+            const hardware::camera::device::V3_2::NotifyMsg& msg);
     void requestStreamBuffers(RequestBufferStates& states,
             const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
             hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index d3377f4..74b3700 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -31,47 +31,47 @@
 std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
       {30, {
           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
           ANDROID_CONTROL_ZOOM_RATIO_RANGE,
           ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
-          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
         } },
       {31, {
-          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_SENSOR_INFO_BINNING_FACTOR,
+          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
         } },
       {32, {
           ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
         } },
       {33, {
-          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
           ANDROID_AUTOMOTIVE_LENS_FACING,
           ANDROID_AUTOMOTIVE_LOCATION,
+          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
           ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
           ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
-          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
-          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
         } },
 };
 
@@ -81,9 +81,9 @@
  */
 std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
       {30, {
+          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
           ANDROID_CONTROL_ZOOM_RATIO,
           ANDROID_SCALER_ROTATE_AND_CROP,
-          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
         }  },
       {31, {
           ANDROID_SENSOR_PIXEL_MODE,
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 1df730d..d909624 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -60,7 +60,6 @@
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device@3.8",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 4928faf..4d7798c 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -27,8 +27,13 @@
         "external/dynamic_depth/internal",
     ],
 
+    header_libs: [
+        "libmedia_headers",
+    ],
+
     shared_libs: [
         "libbase",
+        "libbinder",
         "libcutils",
         "libcameraservice",
         "libhidlbase",
@@ -49,7 +54,6 @@
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device@3.8",
         "android.hidl.token@1.0-utils",
     ],
 
@@ -58,6 +62,7 @@
     ],
 
     srcs: [
+        "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
         "ClientManagerTest.cpp",
         "DepthProcessorTest.cpp",
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
new file mode 100644
index 0000000..4359f9b
--- /dev/null
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/BnCameraServiceProxy.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <android/hardware/ICameraService.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "../CameraService.h"
+#include "../utils/CameraServiceProxyWrapper.h"
+
+#include <gtest/gtest.h>
+
+#include <memory>
+#include <vector>
+
+using namespace android;
+using namespace android::hardware::camera;
+
+// Empty service listener.
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+        // No op
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onCameraAccessPrioritiesChanged() {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+            int32_t /*torchStrength*/) {
+        // No op
+        return binder::Status::ok();
+    }
+};
+
+// Empty device callback.
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+    TestCameraDeviceCallbacks() {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual binder::Status onDeviceError(int /*errorCode*/,
+            const CaptureResultExtras& /*resultExtras*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onDeviceIdle() {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+            int64_t /*timestamp*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+            const CaptureResultExtras& /*resultExtras*/,
+            const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onPrepared(int /*streamId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRepeatingRequestError(
+            int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRequestQueueEmpty() {
+        return binder::Status::ok();
+    }
+};
+
+// Override isCameraDisabled from the CameraServiceProxy with a flag.
+class CameraServiceProxyOverride : public ::android::hardware::BnCameraServiceProxy {
+public:
+    CameraServiceProxyOverride() :
+            mCameraServiceProxy(CameraServiceProxyWrapper::getDefaultCameraServiceProxy()),
+            mCameraDisabled(false), mOverrideCameraDisabled(false)
+    { }
+
+    virtual binder::Status getRotateAndCropOverride(const String16& packageName, int lensFacing,
+            int userId, int *ret) override {
+        return mCameraServiceProxy->getRotateAndCropOverride(packageName, lensFacing,
+                userId, ret);
+    }
+
+    virtual binder::Status pingForUserUpdate() override {
+        return mCameraServiceProxy->pingForUserUpdate();
+    }
+
+    virtual binder::Status notifyCameraState(
+            const hardware::CameraSessionStats& cameraSessionStats) override {
+        return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
+    }
+
+    virtual binder::Status isCameraDisabled(bool *ret) override {
+        if (mOverrideCameraDisabled) {
+            *ret = mCameraDisabled;
+            return binder::Status::ok();
+        }
+        return mCameraServiceProxy->isCameraDisabled(ret);
+    }
+
+    void setCameraDisabled(bool cameraDisabled) {
+        mCameraDisabled = cameraDisabled;
+    }
+
+    void setOverrideCameraDisabled(bool overrideCameraDisabled) {
+        mOverrideCameraDisabled = overrideCameraDisabled;
+    }
+
+protected:
+    sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
+    bool mCameraDisabled;
+    bool mOverrideCameraDisabled;
+};
+
+class AutoDisconnectDevice {
+public:
+    AutoDisconnectDevice(sp<hardware::camera2::ICameraDeviceUser> device) :
+            mDevice(device)
+    { }
+
+    ~AutoDisconnectDevice() {
+        if (mDevice != nullptr) { 
+            mDevice->disconnect();
+        }
+    }
+
+private:
+    sp<hardware::camera2::ICameraDeviceUser> mDevice;
+};
+
+class CameraPermissionsTest : public ::testing::Test {
+protected:
+    static sp<CameraService> sCameraService;
+    static sp<CameraServiceProxyOverride> sCameraServiceProxy;
+    static std::shared_ptr<CameraServiceProxyWrapper> sCameraServiceProxyWrapper;
+    static uid_t sOldUid;
+
+    static void SetUpTestSuite() {
+        sOldUid = getuid();
+        setuid(AID_CAMERASERVER);
+        sCameraServiceProxy = new CameraServiceProxyOverride();
+        sCameraServiceProxyWrapper =
+            std::make_shared<CameraServiceProxyWrapper>(sCameraServiceProxy);
+        sCameraService = new CameraService(sCameraServiceProxyWrapper);
+        sCameraService->clearCachedVariables();
+    }
+
+    static void TearDownTestSuite() {
+        sCameraServiceProxyWrapper = nullptr;
+        sCameraServiceProxy = nullptr;
+        sCameraService = nullptr;
+        setuid(sOldUid);
+    }
+};
+
+sp<CameraService> CameraPermissionsTest::sCameraService = nullptr;
+sp<CameraServiceProxyOverride> CameraPermissionsTest::sCameraServiceProxy = nullptr;
+std::shared_ptr<CameraServiceProxyWrapper>
+CameraPermissionsTest::sCameraServiceProxyWrapper = nullptr;
+uid_t CameraPermissionsTest::sOldUid = 0;
+
+// Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
+// policy, and succeed when it isn't.
+TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(true);
+
+    sCameraServiceProxy->setCameraDisabled(true);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+        AutoDisconnectDevice autoDisconnect(device);
+        ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
+        ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
+                << "connectDevice returned exception code " << status.exceptionCode();
+    }
+
+    sCameraServiceProxy->setCameraDisabled(false);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+        AutoDisconnectDevice autoDisconnect(device);
+        ASSERT_TRUE(status.isOk());
+    }
+}
+
+// Test that consecutive camera connections succeed.
+TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &deviceA);
+        AutoDisconnectDevice autoDisconnectA(deviceA);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+        status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &deviceB);
+        AutoDisconnectDevice autoDisconnectB(deviceB);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+    }
+}
+
+// Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
+// in the second call.
+TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &deviceA);
+        AutoDisconnectDevice autoDisconnectA(deviceA);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+        status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &deviceB);
+        AutoDisconnectDevice autoDisconnectB(deviceB);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+    }
+}
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 69175cc..733ecd9 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -29,29 +29,29 @@
 using hardware::ICameraServiceProxy;
 using hardware::CameraSessionStats;
 
-Mutex CameraServiceProxyWrapper::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
-
-Mutex CameraServiceProxyWrapper::mLock;
-std::map<String8, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
-        CameraServiceProxyWrapper::mSessionStatsMap;
-
 /**
  * CameraSessionStatsWrapper functions
  */
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
-    Mutex::Autolock l(mLock);
-
-    updateProxyDeviceState(mSessionStats);
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateProxyDeviceState(
+        sp<hardware::ICameraServiceProxy>& proxyBinder) {
+    if (proxyBinder == nullptr) return;
+    proxyBinder->notifyCameraState(mSessionStats);
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen(
+        sp<hardware::ICameraServiceProxy>& proxyBinder) {
+    Mutex::Autolock l(mLock);
+    updateProxyDeviceState(proxyBinder);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(
+    sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
     mSessionStats.mLatencyMs = latencyMs;
-    updateProxyDeviceState(mSessionStats);
+    updateProxyDeviceState(proxyBinder);
 }
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
@@ -66,12 +66,13 @@
     }
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(
+    sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
     mSessionStats.mMaxPreviewFps = maxPreviewFps;
-    updateProxyDeviceState(mSessionStats);
+    updateProxyDeviceState(proxyBinder);
 
     // Reset mCreationDuration to -1 to distinguish between 1st session
     // after configuration, and all other sessions after configuration.
@@ -79,6 +80,7 @@
 }
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+        sp<hardware::ICameraServiceProxy>& proxyBinder,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -91,7 +93,7 @@
     mSessionStats.mUserTag = String16(userTag.c_str());
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mStreamStats = streamStats;
-    updateProxyDeviceState(mSessionStats);
+    updateProxyDeviceState(proxyBinder);
 
     mSessionStats.mInternalReconfigure = 0;
     mSessionStats.mStreamStats.clear();
@@ -103,19 +105,26 @@
 
 sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
 #ifndef __BRILLO__
-    Mutex::Autolock al(sProxyMutex);
-    if (sCameraServiceProxy == nullptr) {
-        sp<IServiceManager> sm = defaultServiceManager();
-        // Use checkService because cameraserver normally starts before the
-        // system server and the proxy service. So the long timeout that getService
-        // has before giving up is inappropriate.
-        sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
-        if (binder != nullptr) {
-            sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
-        }
+    Mutex::Autolock al(mProxyMutex);
+    if (mCameraServiceProxy == nullptr) {
+        mCameraServiceProxy = getDefaultCameraServiceProxy();
     }
 #endif
-    return sCameraServiceProxy;
+    return mCameraServiceProxy;
+}
+
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::getDefaultCameraServiceProxy() {
+#ifndef __BRILLO__
+    sp<IServiceManager> sm = defaultServiceManager();
+    // Use checkService because cameraserver normally starts before the
+    // system server and the proxy service. So the long timeout that getService
+    // has before giving up is inappropriate.
+    sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+    if (binder != nullptr) {
+        return interface_cast<ICameraServiceProxy>(binder);
+    }
+#endif
+    return nullptr;
 }
 
 void CameraServiceProxyWrapper::pingCameraServiceProxy() {
@@ -138,12 +147,6 @@
     return ret;
 }
 
-void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
-    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
-    if (proxyBinder == nullptr) return;
-    proxyBinder->notifyCameraState(sessionStats);
-}
-
 void CameraServiceProxyWrapper::logStreamConfigured(const String8& id,
         int operatingMode, bool internalConfig, int32_t latencyMs) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
@@ -175,7 +178,8 @@
     }
 
     ALOGV("%s: id %s", __FUNCTION__, id.c_str());
-    sessionStats->onActive(maxPreviewFps);
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onActive(proxyBinder, maxPreviewFps);
 }
 
 void CameraServiceProxyWrapper::logIdle(const String8& id,
@@ -205,7 +209,8 @@
                 streamStats[i].mStartLatencyMs);
     }
 
-    sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
             videoStabilizationMode, streamStats);
 }
 
@@ -235,7 +240,8 @@
 
     ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
             __FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
-    sessionStats->onOpen();
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onOpen(proxyBinder);
 }
 
 void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs) {
@@ -259,7 +265,8 @@
     }
 
     ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
-    sessionStats->onClose(latencyMs);
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onClose(proxyBinder, latencyMs);
 }
 
 bool CameraServiceProxyWrapper::isCameraDisabled() {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index e34a8f0..6af56c3 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -32,72 +32,80 @@
 class CameraServiceProxyWrapper {
 private:
     // Guard mCameraServiceProxy
-    static Mutex sProxyMutex;
+    Mutex mProxyMutex;
     // Cached interface to the camera service proxy in system service
-    static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+    sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
 
-    struct CameraSessionStatsWrapper {
+    class CameraSessionStatsWrapper {
+    private:
         hardware::CameraSessionStats mSessionStats;
         Mutex mLock; // lock for per camera session stats
 
+        /**
+         * Update the session stats of a given camera device (open/close/active/idle) with
+         * the camera proxy service in the system service
+         */
+        void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
+
+    public:
         CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
                 const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
             mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
-            {}
+            { }
 
-        void onOpen();
-        void onClose(int32_t latencyMs);
+        void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
+        void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs);
         void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
-        void onActive(float maxPreviewFps);
-        void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
+        void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
+                int64_t requestCount, int64_t resultErrorCount, bool deviceError,
                 const std::string& userTag, int32_t videoStabilizationMode,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
     };
 
     // Lock for camera session stats map
-    static Mutex mLock;
+    Mutex mLock;
     // Map from camera id to the camera's session statistics
-    static std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+    std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
 
-    /**
-     * Update the session stats of a given camera device (open/close/active/idle) with
-     * the camera proxy service in the system service
-     */
-    static void updateProxyDeviceState(
-            const hardware::CameraSessionStats& sessionStats);
-
-    static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+    sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
 
 public:
+    CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
+            mCameraServiceProxy(serviceProxy)
+    { }
+
+    static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
+
     // Open
-    static void logOpen(const String8& id, int facing,
+    void logOpen(const String8& id, int facing,
             const String16& clientPackageName, int apiLevel, bool isNdk,
             int32_t latencyMs);
 
     // Close
-    static void logClose(const String8& id, int32_t latencyMs);
+    void logClose(const String8& id, int32_t latencyMs);
 
     // Stream configuration
-    static void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
+    void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
             int32_t latencyMs);
 
     // Session state becomes active
-    static void logActive(const String8& id, float maxPreviewFps);
+    void logActive(const String8& id, float maxPreviewFps);
 
     // Session state becomes idle
-    static void logIdle(const String8& id,
+    void logIdle(const String8& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::string& userTag, int32_t videoStabilizationMode,
             const std::vector<hardware::CameraStreamStats>& streamStats);
 
     // Ping camera service proxy for user update
-    static void pingCameraServiceProxy();
+    void pingCameraServiceProxy();
 
     // Return the current top activity rotate and crop override.
-    static int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+    int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
 
     // Detect if the camera is disabled by device policy.
-    static bool isCameraDisabled();
+    bool isCameraDisabled();
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index d164885..074c84d 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -527,12 +527,7 @@
         if (!returnIncompatibleClients) {
             // Find evicted clients
 
-            if (conflicting && curPriority < priority) {
-                // Pre-existing conflicting client with higher priority exists
-                evictList.clear();
-                evictList.push_back(client);
-                return evictList;
-            } else if (conflicting && owner == curOwner) {
+            if (conflicting && owner == curOwner) {
                 // Pre-existing conflicting client with the same client owner exists
                 // Open the same device twice -> most recent open wins
                 // Otherwise let the existing client wins to avoid behaviors difference
@@ -546,6 +541,11 @@
                     evictList.push_back(client);
                     return evictList;
                 }
+            } else if (conflicting && curPriority < priority) {
+                // Pre-existing conflicting client with higher priority exists
+                evictList.clear();
+                evictList.push_back(client);
+                return evictList;
             } else if (conflicting || ((totalCost > mMaxCost && curCost > 0) &&
                     (curPriority >= priority) &&
                     !(highestPriorityOwner == owner && owner == curOwner))) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 6493d79..7dde268 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,7 +19,6 @@
 #include "SessionConfigurationUtils.h"
 #include "../api2/DepthCompositeStream.h"
 #include "../api2/HeicCompositeStream.h"
-#include "android/hardware/camera/metadata/3.8/types.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -88,7 +87,7 @@
 int32_t PERF_CLASS_LEVEL =
         property_get_int32("ro.odm.build.media_performance_class", 0);
 
-bool IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);
+bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
 
 camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
         bool ultraHighResolution) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 97ca6b7..a127c7b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -21,11 +21,9 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
-#include <android/hardware/camera/device/3.8/types.h>
 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.8/ICameraDeviceSession.h>
 
 #include <device3/Camera3StreamInterface.h>
 #include <utils/IPCTransport.h>
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index 07b55e1..5444f2a 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -27,8 +27,6 @@
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
-using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap;
-using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
 
 namespace android {
 namespace camera3 {
@@ -36,9 +34,9 @@
 namespace SessionConfigurationUtils {
 
 status_t
-convertAidlToHidl38StreamCombination(
+convertAidlToHidl37StreamCombination(
         const aidl::android::hardware::camera::device::StreamConfiguration &aidl,
-        hardware::camera::device::V3_8::StreamConfiguration &hidl) {
+        hardware::camera::device::V3_7::StreamConfiguration &hidl) {
     hidl.operationMode =
         static_cast<hardware::camera::device::V3_2::StreamConfigurationMode>(aidl.operationMode);
     if (aidl.streamConfigCounter < 0) {
@@ -50,97 +48,66 @@
     hidl.streams.resize(aidl.streams.size());
     size_t i = 0;
     for (const auto &stream : aidl.streams) {
-        //hidlv3_8
-        hidl.streams[i].dynamicRangeProfile =
-                static_cast<
-                        CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap>
-                                (stream.dynamicRangeProfile);
-        hidl.streams[i].useCase =
-                static_cast<
-                        CameraMetadataEnumAndroidScalerAvailableStreamUseCases>
-                                (stream.useCase);
+        if (static_cast<int>(stream.dynamicRangeProfile) !=
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+            ALOGE("%s Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
+                    stream.dynamicRangeProfile);
+            return BAD_VALUE;
+        }
+
+        if (static_cast<int>(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+            ALOGE("%s Stream use case %" PRId64 "not supported by HIDL", __FUNCTION__,
+                    stream.useCase);
+            return BAD_VALUE;
+        }
 
         // hidl v3_7
-        hidl.streams[i].v3_7.groupId = stream.groupId;
-        hidl.streams[i].v3_7.sensorPixelModesUsed.resize(stream.sensorPixelModesUsed.size());
+        hidl.streams[i].groupId = stream.groupId;
+        hidl.streams[i].sensorPixelModesUsed.resize(stream.sensorPixelModesUsed.size());
         size_t j = 0;
         for (const auto &mode : stream.sensorPixelModesUsed) {
-            hidl.streams[i].v3_7.sensorPixelModesUsed[j] =
+            hidl.streams[i].sensorPixelModesUsed[j] =
                     static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
             j++;
         }
 
         //hidl v3_4
-        hidl.streams[i].v3_7.v3_4.physicalCameraId = stream.physicalCameraId;
+        hidl.streams[i].v3_4.physicalCameraId = stream.physicalCameraId;
 
         if (stream.bufferSize < 0) {
             return BAD_VALUE;
         }
-        hidl.streams[i].v3_7.v3_4.bufferSize = static_cast<uint32_t>(stream.bufferSize);
+        hidl.streams[i].v3_4.bufferSize = static_cast<uint32_t>(stream.bufferSize);
 
         // hild v3_2
-        hidl.streams[i].v3_7.v3_4.v3_2.id = stream.id;
-        hidl.streams[i].v3_7.v3_4.v3_2.format =
+        hidl.streams[i].v3_4.v3_2.id = stream.id;
+        hidl.streams[i].v3_4.v3_2.format =
                 static_cast<hardware::graphics::common::V1_0::PixelFormat>(stream.format);
 
         if (stream.width < 0 || stream.height < 0) {
             return BAD_VALUE;
         }
-        hidl.streams[i].v3_7.v3_4.v3_2.width = static_cast<uint32_t>(stream.width);
-        hidl.streams[i].v3_7.v3_4.v3_2.height = static_cast<uint32_t>(stream.height);
-        hidl.streams[i].v3_7.v3_4.v3_2.usage =
+        hidl.streams[i].v3_4.v3_2.width = static_cast<uint32_t>(stream.width);
+        hidl.streams[i].v3_4.v3_2.height = static_cast<uint32_t>(stream.height);
+        hidl.streams[i].v3_4.v3_2.usage =
                 static_cast<hardware::camera::device::V3_2::BufferUsageFlags>(stream.usage);
-        hidl.streams[i].v3_7.v3_4.v3_2.streamType =
+        hidl.streams[i].v3_4.v3_2.streamType =
                 static_cast<hardware::camera::device::V3_2::StreamType>(stream.streamType);
-        hidl.streams[i].v3_7.v3_4.v3_2.dataSpace =
+        hidl.streams[i].v3_4.v3_2.dataSpace =
                 static_cast<hardware::camera::device::V3_2::DataspaceFlags>(stream.dataSpace);
-        hidl.streams[i].v3_7.v3_4.v3_2.rotation =
+        hidl.streams[i].v3_4.v3_2.rotation =
                 static_cast<hardware::camera::device::V3_2::StreamRotation>(stream.rotation);
         i++;
     }
     return OK;
 }
 
-void mapStreamInfo(const OutputStreamInfo &streamInfo,
-            camera3::camera_stream_rotation_t rotation, String8 physicalId,
-            int32_t groupId, hardware::camera::device::V3_8::Stream *stream /*out*/) {
-    if (stream == nullptr) {
-        return;
-    }
-
-    stream->v3_7.v3_4.v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
-    stream->v3_7.v3_4.v3_2.width = streamInfo.width;
-    stream->v3_7.v3_4.v3_2.height = streamInfo.height;
-    stream->v3_7.v3_4.v3_2.format = HidlCamera3Device::mapToPixelFormat(streamInfo.format);
-    auto u = streamInfo.consumerUsage;
-    camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
-    stream->v3_7.v3_4.v3_2.usage = HidlCamera3Device::mapToConsumerUsage(u);
-    stream->v3_7.v3_4.v3_2.dataSpace = HidlCamera3Device::mapToHidlDataspace(streamInfo.dataSpace);
-    stream->v3_7.v3_4.v3_2.rotation = HidlCamera3Device::mapToStreamRotation(rotation);
-    stream->v3_7.v3_4.v3_2.id = -1; // Invalid stream id
-    stream->v3_7.v3_4.physicalCameraId = std::string(physicalId.string());
-    stream->v3_7.v3_4.bufferSize = 0;
-    stream->v3_7.groupId = groupId;
-    stream->v3_7.sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
-
-    size_t idx = 0;
-    for (auto mode : streamInfo.sensorPixelModesUsed) {
-        stream->v3_7.sensorPixelModesUsed[idx++] =
-                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
-    }
-    stream->dynamicRangeProfile =
-        static_cast<CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap> (
-                streamInfo.dynamicRangeProfile);
-    stream->useCase = static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(
-            streamInfo.streamUseCase);
-}
-
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_8::StreamConfiguration &streamConfiguration,
+        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit) {
     aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
     auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
@@ -153,38 +120,14 @@
         return binder::Status::ok();
     }
 
-    if (convertAidlToHidl38StreamCombination(aidlStreamConfiguration, streamConfiguration) != OK) {
+    if (convertAidlToHidl37StreamCombination(aidlStreamConfiguration, streamConfiguration) != OK) {
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                "Invalid AIDL->HIDL3.8 conversion");
+                "Invalid AIDL->HIDL3.7 conversion");
     }
 
     return binder::Status::ok();
 }
 
-bool convertHALStreamCombinationFromV38ToV37(
-        hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37,
-        const hardware::camera::device::V3_8::StreamConfiguration &streamConfigV38) {
-    streamConfigV37.streams.resize(streamConfigV38.streams.size());
-    for (size_t i = 0; i < streamConfigV38.streams.size(); i++) {
-        if (static_cast<int64_t>(streamConfigV38.streams[i].dynamicRangeProfile) !=
-                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
-            // ICameraDevice older than 3.8 doesn't support 10-bit dynamic range profiles
-            // image
-            return false;
-        }
-        if (static_cast<int64_t>(streamConfigV38.streams[i].useCase) !=
-                ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
-            // ICameraDevice older than 3.8 doesn't support stream use case
-            return false;
-        }
-        streamConfigV37.streams[i] = streamConfigV38.streams[i].v3_7;
-    }
-    streamConfigV37.operationMode = streamConfigV38.operationMode;
-    streamConfigV37.sessionParams = streamConfigV38.sessionParams;
-
-    return true;
-}
-
 bool convertHALStreamCombinationFromV37ToV34(
         hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
         const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
index 66956c5..c47abe0 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
@@ -16,10 +16,8 @@
 #ifndef ANDROID_SERVERS_CAMERA_SESSION_CONFIGURATION_UTILS_HIDL_H
 #define ANDROID_SERVERS_CAMERA_SESSION_CONFIGURATION_UTILS_HIDL_H
 
-#include <android/hardware/camera/device/3.8/types.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.8/ICameraDeviceSession.h>
 
 #include <utils/SessionConfigurationUtils.h>
 
@@ -30,10 +28,6 @@
 
 namespace SessionConfigurationUtils {
 
-void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
-        camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
-        hardware::camera::device::V3_8::Stream *stream /*out*/);
-
 // utility function to convert AIDL SessionConfiguration to HIDL
 // streamConfiguration. Also checks for validity of SessionConfiguration and
 // returns a non-ok binder::Status if the passed in session configuration
@@ -42,16 +36,9 @@
 convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
         const String8 &cameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_8::StreamConfiguration &streamConfiguration,
+        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit);
 
-// Utility function to convert a V3_8::StreamConfiguration to
-// V3_7::StreamConfiguration. Return false if the original V3_8 configuration cannot
-// be used by older version HAL.
-bool convertHALStreamCombinationFromV38ToV37(
-        hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37,
-        const hardware::camera::device::V3_8::StreamConfiguration &streamConfigV38);
-
 // Utility function to convert a V3_7::StreamConfiguration to
 // V3_4::StreamConfiguration. Return false if the original V3_7 configuration cannot
 // be used by older version HAL.
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index e322d62..a737ba0 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -390,47 +390,59 @@
     }
     AStatsEvent_writeInt32(event, qpBMaxOri);
 
-    // int32_t configColorStandard = -1;
-    // if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
-    //     metrics_proto.set_config_color_standard(configColorStandard);
-    // }
-    // AStatsEvent_writeInt32(event, configColorStandard);
+    int32_t configColorStandard = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
+        metrics_proto.set_config_color_standard(configColorStandard);
+    }
+    AStatsEvent_writeInt32(event, configColorStandard);
 
-    // int32_t configColorRange = -1;
-    // if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
-    //     metrics_proto.set_config_color_range(configColorRange);
-    // }
-    // AStatsEvent_writeInt32(event, configColorRange);
+    int32_t configColorRange = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
+        metrics_proto.set_config_color_range(configColorRange);
+    }
+    AStatsEvent_writeInt32(event, configColorRange);
 
-    // int32_t configColorTransfer = -1;
-    // if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
-    //     metrics_proto.set_config_color_transfer(configColorTransfer);
-    // }
-    // AStatsEvent_writeInt32(event, configColorTransfer);
+    int32_t configColorTransfer = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
+        metrics_proto.set_config_color_transfer(configColorTransfer);
+    }
+    AStatsEvent_writeInt32(event, configColorTransfer);
 
-    // int32_t parsedColorStandard = -1;
-    // if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
-    //     metrics_proto.set_parsed_color_standard(parsedColorStandard);
-    // }
-    // AStatsEvent_writeInt32(event, parsedColorStandard);
+    int32_t parsedColorStandard = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
+        metrics_proto.set_parsed_color_standard(parsedColorStandard);
+    }
+    AStatsEvent_writeInt32(event, parsedColorStandard);
 
-    // int32_t parsedColorRange = -1;
-    // if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
-    //     metrics_proto.set_parsed_color_range(parsedColorRange);
-    // }
-    // AStatsEvent_writeInt32(event, parsedColorRange);
+    int32_t parsedColorRange = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
+        metrics_proto.set_parsed_color_range(parsedColorRange);
+    }
+    AStatsEvent_writeInt32(event, parsedColorRange);
 
-    // int32_t parsedColorTransfer = -1;
-    // if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
-    //     metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
-    // }
-    // AStatsEvent_writeInt32(event, parsedColorTransfer);
+    int32_t parsedColorTransfer = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
+        metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
+    }
+    AStatsEvent_writeInt32(event, parsedColorTransfer);
 
-    // int32_t hdrMetadataFlags = -1;
-    // if (item->getInt32("android.media.mediacodec.hdr-metadata-flags", &hdrMetadataFlags)) {
-    //     metrics_proto.set_hdr_metadata_flags(hdrMetadataFlags);
-    // }
-    // AStatsEvent_writeInt32(event, hdrMetadataFlags);
+    int32_t hdrStaticInfo = -1;
+    if (item->getInt32("android.media.mediacodec.hdr-static-info", &hdrStaticInfo)) {
+        metrics_proto.set_hdr_static_info(hdrStaticInfo);
+    }
+    AStatsEvent_writeInt32(event, hdrStaticInfo);
+
+    int32_t hdr10PlusInfo = -1;
+    if (item->getInt32("android.media.mediacodec.hdr10-plus-info", &hdr10PlusInfo)) {
+        metrics_proto.set_hdr10_plus_info(hdr10PlusInfo);
+    }
+    AStatsEvent_writeInt32(event, hdr10PlusInfo);
+
+    int32_t hdrFormat= -1;
+    if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
+        metrics_proto.set_hdr_format(hdrFormat);
+    }
+    AStatsEvent_writeInt32(event, hdrFormat);
 
     int err = AStatsEvent_write(event);
     if (err < 0) {