Merge "GraphicsTracker: ensure migrated buffers are overwritten" into main
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 31cb7c0..8a93132 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -37,7 +37,6 @@
 const int kSecureStopIdStart = 100;
 const std::string kOfflineLicense("\"type\":\"persistent-license\"");
 const std::string kStreaming("Streaming");
-const std::string kTemporaryLicense("\"type\":\"temporary\"");
 const std::string kTrue("True");
 
 const std::string kQueryKeyLicenseType("LicenseType");
diff --git a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
index ddbc594..cd129ac 100644
--- a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
@@ -27,10 +27,7 @@
 const std::string kKeyTypeTag("kty");
 const std::string kKeyTag("k");
 const std::string kKeyIdTag("kid");
-const std::string kMediaSessionType("type");
-const std::string kPersistentLicenseSession("persistent-license");
 const std::string kSymmetricKeyValue("oct");
-const std::string kTemporaryLicenseSession("temporary");
 }  // namespace
 
 namespace clearkeydrm {
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index e74fb91..47b48e3 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -20,12 +20,6 @@
         "-Werror",
     ],
 
-    header_libs: [
-        "libaudiohal_headers",
-        "libmedia_headers",
-        "libmediametrics_headers",
-    ],
-
     defaults: [
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_media_audio_common_types_cpp_shared",
@@ -39,39 +33,10 @@
         "libaudioflinger",
         "libaudiopolicyservice",
         "libmedialogservice",
-        "libnbaio",
     ],
 
     shared_libs: [
-        "libaudioclient",
-        "libaudioprocessing",
-        "libbinder",
-        "libcutils",
-        "libhidlbase",
-        "liblog",
-        "libmedia",
-        "libmediautils",
-        "libnblog",
-        "libpowermanager",
-        "libutils",
-        "libvibrator",
-    ],
-
-    // TODO check if we still need all of these include directories
-    include_dirs: [
-        "external/sonic",
-        "frameworks/av/media/libaaudio/include",
-        "frameworks/av/media/libaaudio/src",
-        "frameworks/av/media/libaaudio/src/binding",
-        "frameworks/av/services/audioflinger",
-        "frameworks/av/services/audiopolicy",
-        "frameworks/av/services/audiopolicy/common/include",
-        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
-        "frameworks/av/services/audiopolicy/engine/interface",
-        "frameworks/av/services/audiopolicy/service",
-        "frameworks/av/services/medialog",
-        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-
+        "libhidlbase", // required for threadpool config.
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 55bf7a9..b287b91 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -327,10 +327,10 @@
                 newWork->worklets.front()->component = inWork->worklets.front()->component;
                 std::vector<std::unique_ptr<C2Tuning>> tunings;
                 for (std::unique_ptr<C2Tuning>& tuning : inWork->worklets.front()->tunings) {
-                    tunings.push_back(std::move(
+                    tunings.push_back(
                             std::unique_ptr<C2Tuning>(
                                     static_cast<C2Tuning*>(
-                                            C2Param::Copy(*(tuning.get())).release()))));
+                                            C2Param::Copy(*(tuning.get())).release())));
                 }
                 newWork->worklets.front()->tunings = std::move(tunings);
             }
@@ -344,7 +344,7 @@
                     << inputOrdinal.frameIndex.peekull()
                     << ") -> newFrameIndex " << newFrameIdx
                     <<" : input ts " << inputOrdinal.timestamp.peekull();
-            sliceWork.push_back(std::move(cloneInputWork(w, w->input.flags)));
+            sliceWork.push_back(cloneInputWork(w, w->input.flags));
             if (!w->input.buffers.empty() && w->input.buffers.front() != nullptr) {
                 sliceWork.back()->input.buffers = std::move(w->input.buffers);
             }
@@ -853,4 +853,4 @@
     mLargeWork.reset();
 }
 
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index f8fd425..4a956f5 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -139,6 +139,20 @@
         mReorderDepth = -1;
         mTimestampDevTest = false;
         mMd5Offset = 0;
+        mIsTunneledCodec = false;
+
+        // For C2 codecs that support tunneling by default, the default value of
+        // C2PortTunneledModeTuning::mode should (!= NONE). Otherwise VTS
+        // can assume that the codec can support regular (non-tunneled decode)
+        queried.clear();
+        c2err = mComponent->query(
+                {}, {C2PortTunneledModeTuning::output::PARAM_TYPE}, C2_MAY_BLOCK, &queried);
+        if (c2err == C2_OK && !queried.empty() && queried.front() != nullptr) {
+            C2TunneledModeStruct::mode_t tunneledMode =
+                    ((C2PortTunneledModeTuning::output*)queried.front().get())->m.mode;
+            mIsTunneledCodec = (tunneledMode != C2TunneledModeStruct::NONE);
+        }
+
         mMd5Enable = false;
         mRefMd5 = nullptr;
 
@@ -308,6 +322,7 @@
 
     bool mEos;
     bool mDisableTest;
+    bool mIsTunneledCodec;
     bool mMd5Enable;
     bool mTimestampDevTest;
     uint64_t mTimestampUs;
@@ -612,11 +627,14 @@
 
     bool signalEOS = std::get<3>(GetParam());
     surfaceMode_t surfMode = std::get<4>(GetParam());
-    mTimestampDevTest = true;
+    // Disable checking timestamp as tunneled codecs doesn't populate
+    // output buffers in C2Work.
+    mTimestampDevTest = !mIsTunneledCodec;
 
     android::Vector<FrameInfo> Info;
 
-    mMd5Enable = true;
+    // Disable md5 checks as tunneled codecs doesn't populate output buffers in C2Work
+    mMd5Enable = !mIsTunneledCodec;
     if (!mChksumFile.compare(sResourceDir)) mMd5Enable = false;
 
     uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
@@ -712,7 +730,9 @@
     typedef std::unique_lock<std::mutex> ULock;
     ASSERT_EQ(mComponent->start(), C2_OK);
 
-    mTimestampDevTest = true;
+    // Disable checking timestamp as tunneled codecs doesn't populate
+    // output buffers in C2Work.
+    mTimestampDevTest = !mIsTunneledCodec;
     uint32_t timestampOffset = 0;
     uint32_t offset = 0;
     android::Vector<FrameInfo> Info;
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 0aae23c..373f86e 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -222,19 +222,20 @@
     ~HGraphicBufferSourceWrapper() override = default;
 
     status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
-        mNode = new C2OMXNode(comp);
-        mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(mNode);
-        mNode->setFrameSize(mWidth, mHeight);
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        *node = new C2OMXNode(comp);
+        mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(*node);
+        (*node)->setFrameSize(mWidth, mHeight);
         // Usage is queried during configure(), so setting it beforehand.
         // 64 bit set parameter is existing only in C2OMXNode.
         OMX_U64 usage64 = mConfig.mUsage;
-        status_t res = mNode->setParameter(
+        status_t res = (*node)->setParameter(
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits64,
                 &usage64, sizeof(usage64));
 
         if (res != OK) {
             OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
-            (void)mNode->setParameter(
+            (void)(*node)->setParameter(
                     (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                     &usage, sizeof(usage));
         }
@@ -244,17 +245,18 @@
     }
 
     void disconnect() override {
-        if (mNode == nullptr) {
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
             return;
         }
-        sp<IOMXBufferSource> source = mNode->getSource();
+        sp<IOMXBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
             return;
         }
         source->onOmxIdle();
         source->onOmxLoaded();
-        mNode.clear();
+        node->clear();
         mOmxNode.clear();
     }
 
@@ -268,7 +270,11 @@
     }
 
     status_t start() override {
-        sp<IOMXBufferSource> source = mNode->getSource();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return NO_INIT;
+        }
+        sp<IOMXBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             return NO_INIT;
         }
@@ -278,7 +284,7 @@
 
         OMX_PARAM_PORTDEFINITIONTYPE param;
         param.nPortIndex = kPortIndexInput;
-        status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
+        status_t err = (*node)->getParameter(OMX_IndexParamPortDefinition,
                                            &param, sizeof(param));
         if (err == OK) {
             numSlots = param.nBufferCountActual;
@@ -297,6 +303,7 @@
     }
 
     status_t configure(Config &config) {
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
         std::stringstream status;
         status_t err = OK;
 
@@ -317,7 +324,7 @@
 
         // pts gap
         if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
-            if (mNode != nullptr) {
+            if ((*node) != nullptr) {
                 OMX_PARAM_U32TYPE ptrGapParam = {};
                 ptrGapParam.nSize = sizeof(OMX_PARAM_U32TYPE);
                 float gap = (config.mMinAdjustedFps > 0)
@@ -326,7 +333,7 @@
                 // float -> uint32_t is undefined if the value is negative.
                 // First convert to int32_t to ensure the expected behavior.
                 ptrGapParam.nU32 = int32_t(gap);
-                (void)mNode->setParameter(
+                (void)(*node)->setParameter(
                         (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl,
                         &ptrGapParam, sizeof(ptrGapParam));
             }
@@ -426,8 +433,8 @@
 
         // priority
         if (mConfig.mPriority != config.mPriority) {
-            if (config.mPriority != INT_MAX) {
-                mNode->setPriority(config.mPriority);
+            if (config.mPriority != INT_MAX && (*node) != nullptr) {
+                (*node)->setPriority(config.mPriority);
             }
             mConfig.mPriority = config.mPriority;
         }
@@ -441,24 +448,40 @@
     }
 
     void onInputBufferDone(c2_cntr64_t index) override {
-        mNode->onInputBufferDone(index);
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferDone(index);
     }
 
     void onInputBufferEmptied() override {
-        mNode->onInputBufferEmptied();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferEmptied();
     }
 
     android_dataspace getDataspace() override {
-        return mNode->getDataspace();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return HAL_DATASPACE_UNKNOWN;
+        }
+        return (*node)->getDataspace();
     }
 
     uint32_t getPixelFormat() override {
-        return mNode->getPixelFormat();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return PIXEL_FORMAT_UNKNOWN;
+        }
+        return (*node)->getPixelFormat();
     }
 
 private:
     sp<HGraphicBufferSource> mSource;
-    sp<C2OMXNode> mNode;
+    Mutexed<sp<C2OMXNode>> mNode;
     sp<hardware::media::omx::V1_0::IOmxNode> mOmxNode;
     uint32_t mWidth;
     uint32_t mHeight;
@@ -479,33 +502,39 @@
     ~AGraphicBufferSourceWrapper() override = default;
 
     status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
-        mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
-        mNode->setFrameSize(mWidth, mHeight);
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        *node = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+        (*node)->setFrameSize(mWidth, mHeight);
         // Usage is queried during configure(), so setting it beforehand.
         uint64_t usage = mConfig.mUsage;
-        (void)mNode->setConsumerUsage((int64_t)usage);
+        (void)(*node)->setConsumerUsage((int64_t)usage);
 
         return fromAidlStatus(mSource->configure(
-                mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+                (*node), static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
                         mDataSpace)));
     }
 
     void disconnect() override {
-        if (mNode == nullptr) {
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
             return;
         }
-        std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+        std::shared_ptr<IAidlBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
             return;
         }
         (void)source->onStop();
         (void)source->onRelease();
-        mNode.reset();
+        node->reset();
     }
 
     status_t start() override {
-        std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return NO_INIT;
+        }
+        std::shared_ptr<IAidlBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             return NO_INIT;
         }
@@ -513,7 +542,7 @@
         size_t numSlots = 16;
 
         IAidlNode::InputBufferParams param;
-        status_t err = fromAidlStatus(mNode->getInputBufferParams(&param));
+        status_t err = fromAidlStatus((*node)->getInputBufferParams(&param));
         if (err == OK) {
             numSlots = param.bufferCountActual;
         }
@@ -531,6 +560,7 @@
     }
 
     status_t configure(Config &config) {
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
         std::stringstream status;
         status_t err = OK;
 
@@ -551,14 +581,14 @@
 
         // pts gap
         if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
-            if (mNode != nullptr) {
+            if ((*node) != nullptr) {
                 float gap = (config.mMinAdjustedFps > 0)
                         ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
                         : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
                 // float -> uint32_t is undefined if the value is negative.
                 // First convert to int32_t to ensure the expected behavior.
                 int32_t gapUs = int32_t(gap);
-                (void)mNode->setAdjustTimestampGapUs(gapUs);
+                (void)(*node)->setAdjustTimestampGapUs(gapUs);
             }
         }
 
@@ -650,7 +680,7 @@
         // priority
         if (mConfig.mPriority != config.mPriority) {
             if (config.mPriority != INT_MAX) {
-                mNode->setPriority(config.mPriority);
+                (*node)->setPriority(config.mPriority);
             }
             mConfig.mPriority = config.mPriority;
         }
@@ -664,24 +694,40 @@
     }
 
     void onInputBufferDone(c2_cntr64_t index) override {
-        mNode->onInputBufferDone(index);
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferDone(index);
     }
 
     void onInputBufferEmptied() override {
-        mNode->onInputBufferEmptied();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferEmptied();
     }
 
     android_dataspace getDataspace() override {
-        return mNode->getDataspace();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return HAL_DATASPACE_UNKNOWN;
+        }
+        return (*node)->getDataspace();
     }
 
     uint32_t getPixelFormat() override {
-        return mNode->getPixelFormat();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return PIXEL_FORMAT_UNKNOWN;
+        }
+        return (*node)->getPixelFormat();
     }
 
 private:
     std::shared_ptr<AGraphicBufferSource> mSource;
-    std::shared_ptr<C2AidlNode> mNode;
+    Mutexed<std::shared_ptr<C2AidlNode>> mNode;
     uint32_t mWidth;
     uint32_t mHeight;
     Config mConfig;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 1348ef0..d829523 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -228,15 +228,17 @@
 status_t CCodecBufferChannel::setInputSurface(
         const std::shared_ptr<InputSurfaceWrapper> &surface) {
     ALOGV("[%s] setInputSurface", mName);
-    mInputSurface = surface;
-    return mInputSurface->connect(mComponent);
+    Mutexed<std::shared_ptr<InputSurfaceWrapper>>::Locked inputSurface(mInputSurface);
+    *inputSurface = surface;
+    return (*inputSurface)->connect(mComponent);
 }
 
 status_t CCodecBufferChannel::signalEndOfInputStream() {
-    if (mInputSurface == nullptr) {
+    Mutexed<std::shared_ptr<InputSurfaceWrapper>>::Locked inputSurface(mInputSurface);
+    if ((*inputSurface) == nullptr) {
         return INVALID_OPERATION;
     }
-    return mInputSurface->signalEndOfInputStream();
+    return (*inputSurface)->signalEndOfInputStream();
 }
 
 status_t CCodecBufferChannel::queueInputBufferInternal(
@@ -1069,9 +1071,11 @@
             return;
         }
     }
-    if (android::media::codec::provider_->input_surface_throttle()
-            && mInputSurface != nullptr) {
-        mInputSurface->onInputBufferEmptied();
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<std::shared_ptr<InputSurfaceWrapper>>::Locked inputSurface(mInputSurface);
+        if ((*inputSurface) != nullptr) {
+            (*inputSurface)->onInputBufferEmptied();
+        }
     }
     size_t numActiveSlots = 0;
     while (!mPipelineWatcher.lock()->pipelineFull()) {
@@ -1700,7 +1704,7 @@
                 && (hasCryptoOrDescrambler() || conforming)) {
             input->buffers.reset(new SlotInputBuffers(mName));
         } else if (graphic) {
-            if (mInputSurface) {
+            if (mInputSurface.lock()->get()) {
                 input->buffers.reset(new DummyInputBuffers(mName));
             } else if (mMetaMode == MODE_ANW) {
                 input->buffers.reset(new GraphicMetadataInputBuffers(mName));
@@ -1983,7 +1987,7 @@
 
 status_t CCodecBufferChannel::prepareInitialInputBuffers(
         std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers, bool retry) {
-    if (mInputSurface) {
+    if (mInputSurface.lock()->get()) {
         return OK;
     }
 
@@ -2109,9 +2113,7 @@
 
 void CCodecBufferChannel::reset() {
     stop();
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
+    mInputSurface.lock()->reset();
     mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
@@ -2206,7 +2208,7 @@
 
 void CCodecBufferChannel::onInputBufferDone(
         uint64_t frameIndex, size_t arrayIndex) {
-    if (mInputSurface) {
+    if (mInputSurface.lock()->get()) {
         return;
     }
     std::shared_ptr<C2Buffer> buffer =
@@ -2263,7 +2265,8 @@
         notifyClient = false;
     }
 
-    if (mInputSurface == nullptr && (work->worklets.size() != 1u
+    bool hasInputSurface = (mInputSurface.lock()->get() != nullptr);
+    if (!hasInputSurface && (work->worklets.size() != 1u
             || !work->worklets.front()
             || !(work->worklets.front()->output.flags &
                  C2FrameData::FLAG_INCOMPLETE))) {
@@ -2472,7 +2475,7 @@
     c2_cntr64_t timestamp =
         worklet->output.ordinal.timestamp + work->input.ordinal.customOrdinal
                 - work->input.ordinal.timestamp;
-    if (mInputSurface != nullptr) {
+    if (hasInputSurface) {
         // When using input surface we need to restore the original input timestamp.
         timestamp = work->input.ordinal.customOrdinal;
     }
@@ -2799,7 +2802,7 @@
 }
 
 void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
-    if (mInputSurface == nullptr) {
+    if (mInputSurface.lock()->get() == nullptr) {
         mInfoBuffers.push_back(buffer);
     } else {
         std::list<std::unique_ptr<C2Work>> items;
@@ -2807,7 +2810,6 @@
         work->input.infoBuffers.emplace_back(*buffer);
         work->worklets.emplace_back(new C2Worklet);
         items.push_back(std::move(work));
-        c2_status_t err = mComponent->queue(&items);
     }
 }
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index e62742b..f5e268a 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -391,7 +391,7 @@
     };
     Mutexed<BlockPools> mBlockPools;
 
-    std::shared_ptr<InputSurfaceWrapper> mInputSurface;
+    Mutexed<std::shared_ptr<InputSurfaceWrapper>> mInputSurface;
 
     MetaMode mMetaMode;
 
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2550dcf..164a1e0 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -43,6 +43,7 @@
 #include <C2Debug.h>
 
 #include "Codec2Buffer.h"
+#include "Codec2BufferUtils.h"
 
 namespace android {
 
@@ -215,482 +216,6 @@
     mBufferRef.reset();
 }
 
-// GraphicView2MediaImageConverter
-
-namespace {
-
-class GraphicView2MediaImageConverter {
-public:
-    /**
-     * Creates a C2GraphicView <=> MediaImage converter
-     *
-     * \param view C2GraphicView object
-     * \param format buffer format
-     * \param copy whether the converter is used for copy or not
-     */
-    GraphicView2MediaImageConverter(
-            const C2GraphicView &view, const sp<AMessage> &format, bool copy)
-        : mInitCheck(NO_INIT),
-          mView(view),
-          mWidth(view.width()),
-          mHeight(view.height()),
-          mAllocatedDepth(0),
-          mBackBufferSize(0),
-          mMediaImage(new ABuffer(sizeof(MediaImage2))) {
-        ATRACE_CALL();
-        if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
-            mClientColorFormat = COLOR_FormatYUV420Flexible;
-        }
-        if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
-            mComponentColorFormat = COLOR_FormatYUV420Flexible;
-        }
-        if (view.error() != C2_OK) {
-            ALOGD("Converter: view.error() = %d", view.error());
-            mInitCheck = BAD_VALUE;
-            return;
-        }
-        MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
-        const C2PlanarLayout &layout = view.layout();
-        if (layout.numPlanes == 0) {
-            ALOGD("Converter: 0 planes");
-            mInitCheck = BAD_VALUE;
-            return;
-        }
-        memset(mediaImage, 0, sizeof(*mediaImage));
-        mAllocatedDepth = layout.planes[0].allocatedDepth;
-        uint32_t bitDepth = layout.planes[0].bitDepth;
-
-        // align width and height to support subsampling cleanly
-        uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
-        uint32_t vStride = align(view.crop().height, 2);
-
-        bool tryWrapping = !copy;
-
-        switch (layout.type) {
-            case C2PlanarLayout::TYPE_YUV: {
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                std::optional<int> clientBitDepth = {};
-                switch (mClientColorFormat) {
-                    case COLOR_FormatYUVP010:
-                        clientBitDepth = 10;
-                        break;
-                    case COLOR_FormatYUV411PackedPlanar:
-                    case COLOR_FormatYUV411Planar:
-                    case COLOR_FormatYUV420Flexible:
-                    case COLOR_FormatYUV420PackedPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV422Flexible:
-                    case COLOR_FormatYUV422PackedPlanar:
-                    case COLOR_FormatYUV422PackedSemiPlanar:
-                    case COLOR_FormatYUV422Planar:
-                    case COLOR_FormatYUV422SemiPlanar:
-                    case COLOR_FormatYUV444Flexible:
-                    case COLOR_FormatYUV444Interleaved:
-                        clientBitDepth = 8;
-                        break;
-                    default:
-                        // no-op; used with optional
-                        break;
-
-                }
-                // conversion fails if client bit-depth and the component bit-depth differs
-                if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
-                    ALOGD("Bit depth of client: %d and component: %d differs",
-                        *clientBitDepth, bitDepth);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
-                C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
-                C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
-                if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
-                        || uPlane.channel != C2PlaneInfo::CHANNEL_CB
-                        || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
-                    ALOGD("Converter: not YUV layout");
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
-                        && uPlane.rowSampling == 2 && uPlane.colSampling == 2
-                        && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
-                if (yuv420888) {
-                    for (uint32_t i = 0; i < 3; ++i) {
-                        const C2PlaneInfo &plane = layout.planes[i];
-                        if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
-                            yuv420888 = false;
-                            break;
-                        }
-                    }
-                    yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
-                }
-                int32_t copyFormat = mClientColorFormat;
-                if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
-                    if (uPlane.colInc == 2 && vPlane.colInc == 2
-                            && yPlane.rowInc == uPlane.rowInc) {
-                        copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
-                    } else if (uPlane.colInc == 1 && vPlane.colInc == 1
-                            && yPlane.rowInc == uPlane.rowInc * 2) {
-                        copyFormat = COLOR_FormatYUV420PackedPlanar;
-                    }
-                }
-                ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
-                        "v:{colInc=%d rowInc=%d}",
-                        mClientColorFormat,
-                        yPlane.colInc, yPlane.rowInc,
-                        uPlane.colInc, uPlane.rowInc,
-                        vPlane.colInc, vPlane.rowInc);
-                switch (copyFormat) {
-                    case COLOR_FormatYUV420Flexible:
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420PackedPlanar:
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
-                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
-                            tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
-                                    && yPlane.rowInc == uPlane.rowInc * 2
-                                    && view.data()[0] < view.data()[1]
-                                    && view.data()[1] < view.data()[2];
-                        }
-                        break;
-
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
-                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
-                            tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
-                                    && yPlane.rowInc == uPlane.rowInc
-                                    && view.data()[0] < view.data()[1]
-                                    && view.data()[1] < view.data()[2];
-                        }
-                        break;
-
-                    case COLOR_FormatYUVP010:
-                        // stride is in bytes
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-                        if (tryWrapping) {
-                            tryWrapping = yPlane.allocatedDepth == 16
-                                    && uPlane.allocatedDepth == 16
-                                    && vPlane.allocatedDepth == 16
-                                    && yPlane.bitDepth == 10
-                                    && uPlane.bitDepth == 10
-                                    && vPlane.bitDepth == 10
-                                    && yPlane.rightShift == 6
-                                    && uPlane.rightShift == 6
-                                    && vPlane.rightShift == 6
-                                    && yPlane.rowSampling == 1 && yPlane.colSampling == 1
-                                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
-                                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2
-                                    && yPlane.colInc == 2
-                                    && uPlane.colInc == 4
-                                    && vPlane.colInc == 4
-                                    && yPlane.rowInc == uPlane.rowInc
-                                    && yPlane.rowInc == vPlane.rowInc;
-                        }
-                        break;
-
-                    default: {
-                        // default to fully planar format --- this will be overridden if wrapping
-                        // TODO: keep interleaved format
-                        int32_t colInc = divUp(mAllocatedDepth, 8u);
-                        int32_t rowInc = stride * colInc / yPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
-                        int32_t offset = rowInc * vStride / yPlane.rowSampling;
-
-                        rowInc = stride * colInc / uPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->U].mOffset = offset;
-                        mediaImage->mPlane[mediaImage->U].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
-                        offset += rowInc * vStride / uPlane.rowSampling;
-
-                        rowInc = stride * colInc / vPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->V].mOffset = offset;
-                        mediaImage->mPlane[mediaImage->V].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
-                        break;
-                    }
-                }
-                break;
-            }
-
-            case C2PlanarLayout::TYPE_YUVA:
-                ALOGD("Converter: unrecognized color format "
-                        "(client %d component %d) for YUVA layout",
-                        mClientColorFormat, mComponentColorFormat);
-                mInitCheck = NO_INIT;
-                return;
-            case C2PlanarLayout::TYPE_RGB:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
-                // TODO: support MediaImage layout
-                switch (mClientColorFormat) {
-                    case COLOR_FormatSurface:
-                    case COLOR_FormatRGBFlexible:
-                    case COLOR_Format24bitBGR888:
-                    case COLOR_Format24bitRGB888:
-                        ALOGD("Converter: accept color format "
-                                "(client %d component %d) for RGB layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        break;
-                    default:
-                        ALOGD("Converter: unrecognized color format "
-                                "(client %d component %d) for RGB layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
-            case C2PlanarLayout::TYPE_RGBA:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
-                // TODO: support MediaImage layout
-                switch (mClientColorFormat) {
-                    case COLOR_FormatSurface:
-                    case COLOR_FormatRGBAFlexible:
-                    case COLOR_Format32bitABGR8888:
-                    case COLOR_Format32bitARGB8888:
-                    case COLOR_Format32bitBGRA8888:
-                        ALOGD("Converter: accept color format "
-                                "(client %d component %d) for RGBA layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        break;
-                    default:
-                        ALOGD("Converter: unrecognized color format "
-                                "(client %d component %d) for RGBA layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 4) {
-                    ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
-            default:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
-                if (layout.numPlanes == 1) {
-                    const C2PlaneInfo &plane = layout.planes[0];
-                    if (plane.colInc < 0 || plane.rowInc < 0) {
-                        // Copy-only if we have negative colInc/rowInc
-                        tryWrapping = false;
-                    }
-                    mediaImage->mPlane[0].mOffset = 0;
-                    mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
-                    mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
-                    mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
-                    mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
-                } else {
-                    ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
-                            mClientColorFormat, mComponentColorFormat);
-                    mInitCheck = NO_INIT;
-                    return;
-                }
-                break;
-        }
-        if (tryWrapping) {
-            // try to map directly. check if the planes are near one another
-            const uint8_t *minPtr = mView.data()[0];
-            const uint8_t *maxPtr = mView.data()[0];
-            int32_t planeSize = 0;
-            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                const C2PlaneInfo &plane = layout.planes[i];
-                int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
-                ssize_t minOffset = plane.minOffset(
-                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
-                ssize_t maxOffset = plane.maxOffset(
-                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
-                if (minPtr > mView.data()[i] + minOffset) {
-                    minPtr = mView.data()[i] + minOffset;
-                }
-                if (maxPtr < mView.data()[i] + maxOffset) {
-                    maxPtr = mView.data()[i] + maxOffset;
-                }
-                planeSize += planeStride * divUp(mAllocatedDepth, 8u)
-                        * align(mHeight, 64) / plane.rowSampling;
-            }
-
-            if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
-                // FIXME: this is risky as reading/writing data out of bound results
-                //        in an undefined behavior, but gralloc does assume a
-                //        contiguous mapping
-                for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                    const C2PlaneInfo &plane = layout.planes[i];
-                    mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
-                    mediaImage->mPlane[i].mColInc = plane.colInc;
-                    mediaImage->mPlane[i].mRowInc = plane.rowInc;
-                    mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
-                    mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
-                }
-                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
-                ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
-            }
-        }
-        mediaImage->mNumPlanes = layout.numPlanes;
-        mediaImage->mWidth = view.crop().width;
-        mediaImage->mHeight = view.crop().height;
-        mediaImage->mBitDepth = bitDepth;
-        mediaImage->mBitDepthAllocated = mAllocatedDepth;
-
-        uint32_t bufferSize = 0;
-        for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-            const C2PlaneInfo &plane = layout.planes[i];
-            if (plane.allocatedDepth < plane.bitDepth
-                    || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
-                ALOGD("rightShift value of %u unsupported", plane.rightShift);
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
-                ALOGD("endianness value of %u unsupported", plane.endianness);
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
-                ALOGD("different allocatedDepth/bitDepth per plane unsupported");
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            // stride is in bytes
-            bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
-        }
-
-        mBackBufferSize = bufferSize;
-        mInitCheck = OK;
-    }
-
-    status_t initCheck() const { return mInitCheck; }
-
-    uint32_t backBufferSize() const { return mBackBufferSize; }
-
-    /**
-     * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
-     * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
-     * data into a backing buffer explicitly.
-     *
-     * \return media buffer. This is null if wrapping failed.
-     */
-    sp<ABuffer> wrap() const {
-        if (mBackBuffer == nullptr) {
-            return mWrapped;
-        }
-        return nullptr;
-    }
-
-    bool setBackBuffer(const sp<ABuffer> &backBuffer) {
-        if (backBuffer == nullptr) {
-            return false;
-        }
-        if (backBuffer->capacity() < mBackBufferSize) {
-            return false;
-        }
-        backBuffer->setRange(0, mBackBufferSize);
-        mBackBuffer = backBuffer;
-        return true;
-    }
-
-    /**
-     * Copy C2GraphicView to MediaImage2.
-     */
-    status_t copyToMediaImage() {
-        ATRACE_CALL();
-        if (mInitCheck != OK) {
-            return mInitCheck;
-        }
-        return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
-    }
-
-    const sp<ABuffer> &imageData() const { return mMediaImage; }
-
-private:
-    status_t mInitCheck;
-
-    const C2GraphicView mView;
-    uint32_t mWidth;
-    uint32_t mHeight;
-    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
-    int32_t mComponentColorFormat;  ///< SDK color format from component
-    sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
-    uint32_t mAllocatedDepth;
-    uint32_t mBackBufferSize;
-    sp<ABuffer> mMediaImage;
-    std::function<sp<ABuffer>(size_t)> mAlloc;
-
-    sp<ABuffer> mBackBuffer;    ///< backing buffer if we have to copy C2Buffer <=> ABuffer
-
-    MediaImage2 *getMediaImage() {
-        return (MediaImage2 *)mMediaImage->base();
-    }
-};
-
-}  // namespace
-
 // GraphicBlockBuffer
 
 // static
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 5e96921..8c5e909 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -44,28 +44,6 @@
 }  // namespace drm
 }  // namespace hardware
 
-/**
- * Copies a graphic view into a media image.
- *
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- * \param view graphic view
- *
- * \return OK on success
- */
-status_t ImageCopy(uint8_t *imgBase, const MediaImage2 *img, const C2GraphicView &view);
-
-/**
- * Copies a media image into a graphic view.
- *
- * \param view graphic view
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- *
- * \return OK on success
- */
-status_t ImageCopy(C2GraphicView &view, const uint8_t *imgBase, const MediaImage2 *img);
-
 class Codec2Buffer : public MediaCodecBuffer {
 public:
     using MediaCodecBuffer::MediaCodecBuffer;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 75e9bbc..574f1b9 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -27,7 +27,10 @@
 
 #include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaCodecConstants.h>
 
 #include <C2Debug.h>
 
@@ -787,4 +790,438 @@
     return MemoryBlockPool().fetch(size);
 }
 
+GraphicView2MediaImageConverter::GraphicView2MediaImageConverter(
+        const C2GraphicView &view, const sp<AMessage> &format, bool copy)
+    : mInitCheck(NO_INIT),
+        mView(view),
+        mWidth(view.width()),
+        mHeight(view.height()),
+        mAllocatedDepth(0),
+        mBackBufferSize(0),
+        mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+    ATRACE_CALL();
+    if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
+        mClientColorFormat = COLOR_FormatYUV420Flexible;
+    }
+    if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
+        mComponentColorFormat = COLOR_FormatYUV420Flexible;
+    }
+    if (view.error() != C2_OK) {
+        ALOGD("Converter: view.error() = %d", view.error());
+        mInitCheck = BAD_VALUE;
+        return;
+    }
+    MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
+    const C2PlanarLayout &layout = view.layout();
+    if (layout.numPlanes == 0) {
+        ALOGD("Converter: 0 planes");
+        mInitCheck = BAD_VALUE;
+        return;
+    }
+    memset(mediaImage, 0, sizeof(*mediaImage));
+    mAllocatedDepth = layout.planes[0].allocatedDepth;
+    uint32_t bitDepth = layout.planes[0].bitDepth;
+
+    // align width and height to support subsampling cleanly
+    uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
+    uint32_t vStride = align(view.crop().height, 2);
+
+    bool tryWrapping = !copy;
+
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_YUV: {
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
+            if (layout.numPlanes != 3) {
+                ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            std::optional<int> clientBitDepth = {};
+            switch (mClientColorFormat) {
+                case COLOR_FormatYUVP010:
+                    clientBitDepth = 10;
+                    break;
+                case COLOR_FormatYUV411PackedPlanar:
+                case COLOR_FormatYUV411Planar:
+                case COLOR_FormatYUV420Flexible:
+                case COLOR_FormatYUV420PackedPlanar:
+                case COLOR_FormatYUV420PackedSemiPlanar:
+                case COLOR_FormatYUV420Planar:
+                case COLOR_FormatYUV420SemiPlanar:
+                case COLOR_FormatYUV422Flexible:
+                case COLOR_FormatYUV422PackedPlanar:
+                case COLOR_FormatYUV422PackedSemiPlanar:
+                case COLOR_FormatYUV422Planar:
+                case COLOR_FormatYUV422SemiPlanar:
+                case COLOR_FormatYUV444Flexible:
+                case COLOR_FormatYUV444Interleaved:
+                    clientBitDepth = 8;
+                    break;
+                default:
+                    // no-op; used with optional
+                    break;
+
+            }
+            // conversion fails if client bit-depth and the component bit-depth differs
+            if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
+                ALOGD("Bit depth of client: %d and component: %d differs",
+                    *clientBitDepth, bitDepth);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+            C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+            C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+            if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
+                    || uPlane.channel != C2PlaneInfo::CHANNEL_CB
+                    || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
+                ALOGD("Converter: not YUV layout");
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
+            if (yuv420888) {
+                for (uint32_t i = 0; i < 3; ++i) {
+                    const C2PlaneInfo &plane = layout.planes[i];
+                    if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
+                        yuv420888 = false;
+                        break;
+                    }
+                }
+                yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
+            }
+            int32_t copyFormat = mClientColorFormat;
+            if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
+                if (uPlane.colInc == 2 && vPlane.colInc == 2
+                        && yPlane.rowInc == uPlane.rowInc) {
+                    copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
+                } else if (uPlane.colInc == 1 && vPlane.colInc == 1
+                        && yPlane.rowInc == uPlane.rowInc * 2) {
+                    copyFormat = COLOR_FormatYUV420PackedPlanar;
+                }
+            }
+            ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
+                    "v:{colInc=%d rowInc=%d}",
+                    mClientColorFormat,
+                    yPlane.colInc, yPlane.rowInc,
+                    uPlane.colInc, uPlane.rowInc,
+                    vPlane.colInc, vPlane.rowInc);
+            switch (copyFormat) {
+                case COLOR_FormatYUV420Flexible:
+                case COLOR_FormatYUV420Planar:
+                case COLOR_FormatYUV420PackedPlanar:
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                    if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                        tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
+                                && yPlane.rowInc == uPlane.rowInc * 2
+                                && view.data()[0] < view.data()[1]
+                                && view.data()[1] < view.data()[2];
+                    }
+                    break;
+
+                case COLOR_FormatYUV420SemiPlanar:
+                case COLOR_FormatYUV420PackedSemiPlanar:
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                    if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                        tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
+                                && yPlane.rowInc == uPlane.rowInc
+                                && view.data()[0] < view.data()[1]
+                                && view.data()[1] < view.data()[2];
+                    }
+                    break;
+
+                case COLOR_FormatYUVP010:
+                    // stride is in bytes
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 4;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 4;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+                    if (tryWrapping) {
+                        tryWrapping = yPlane.allocatedDepth == 16
+                                && uPlane.allocatedDepth == 16
+                                && vPlane.allocatedDepth == 16
+                                && yPlane.bitDepth == 10
+                                && uPlane.bitDepth == 10
+                                && vPlane.bitDepth == 10
+                                && yPlane.rightShift == 6
+                                && uPlane.rightShift == 6
+                                && vPlane.rightShift == 6
+                                && yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                                && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                                && vPlane.rowSampling == 2 && vPlane.colSampling == 2
+                                && yPlane.colInc == 2
+                                && uPlane.colInc == 4
+                                && vPlane.colInc == 4
+                                && yPlane.rowInc == uPlane.rowInc
+                                && yPlane.rowInc == vPlane.rowInc;
+                    }
+                    break;
+
+                default: {
+                    // default to fully planar format --- this will be overridden if wrapping
+                    // TODO: keep interleaved format
+                    int32_t colInc = divUp(mAllocatedDepth, 8u);
+                    int32_t rowInc = stride * colInc / yPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
+                    int32_t offset = rowInc * vStride / yPlane.rowSampling;
+
+                    rowInc = stride * colInc / uPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->U].mOffset = offset;
+                    mediaImage->mPlane[mediaImage->U].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
+                    offset += rowInc * vStride / uPlane.rowSampling;
+
+                    rowInc = stride * colInc / vPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->V].mOffset = offset;
+                    mediaImage->mPlane[mediaImage->V].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
+                    break;
+                }
+            }
+            break;
+        }
+
+        case C2PlanarLayout::TYPE_YUVA:
+            ALOGD("Converter: unrecognized color format "
+                    "(client %d component %d) for YUVA layout",
+                    mClientColorFormat, mComponentColorFormat);
+            mInitCheck = NO_INIT;
+            return;
+        case C2PlanarLayout::TYPE_RGB:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
+            // TODO: support MediaImage layout
+            switch (mClientColorFormat) {
+                case COLOR_FormatSurface:
+                case COLOR_FormatRGBFlexible:
+                case COLOR_Format24bitBGR888:
+                case COLOR_Format24bitRGB888:
+                    ALOGD("Converter: accept color format "
+                            "(client %d component %d) for RGB layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    break;
+                default:
+                    ALOGD("Converter: unrecognized color format "
+                            "(client %d component %d) for RGB layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = BAD_VALUE;
+                    return;
+            }
+            if (layout.numPlanes != 3) {
+                ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            break;
+        case C2PlanarLayout::TYPE_RGBA:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
+            // TODO: support MediaImage layout
+            switch (mClientColorFormat) {
+                case COLOR_FormatSurface:
+                case COLOR_FormatRGBAFlexible:
+                case COLOR_Format32bitABGR8888:
+                case COLOR_Format32bitARGB8888:
+                case COLOR_Format32bitBGRA8888:
+                    ALOGD("Converter: accept color format "
+                            "(client %d component %d) for RGBA layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    break;
+                default:
+                    ALOGD("Converter: unrecognized color format "
+                            "(client %d component %d) for RGBA layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = BAD_VALUE;
+                    return;
+            }
+            if (layout.numPlanes != 4) {
+                ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            break;
+        default:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+            if (layout.numPlanes == 1) {
+                const C2PlaneInfo &plane = layout.planes[0];
+                if (plane.colInc < 0 || plane.rowInc < 0) {
+                    // Copy-only if we have negative colInc/rowInc
+                    tryWrapping = false;
+                }
+                mediaImage->mPlane[0].mOffset = 0;
+                mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
+                mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
+                mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
+                mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
+            } else {
+                ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
+                        mClientColorFormat, mComponentColorFormat);
+                mInitCheck = NO_INIT;
+                return;
+            }
+            break;
+    }
+    if (tryWrapping) {
+        // try to map directly. check if the planes are near one another
+        const uint8_t *minPtr = mView.data()[0];
+        const uint8_t *maxPtr = mView.data()[0];
+        int32_t planeSize = 0;
+        for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+            const C2PlaneInfo &plane = layout.planes[i];
+            int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
+            ssize_t minOffset = plane.minOffset(
+                    mWidth / plane.colSampling, mHeight / plane.rowSampling);
+            ssize_t maxOffset = plane.maxOffset(
+                    mWidth / plane.colSampling, mHeight / plane.rowSampling);
+            if (minPtr > mView.data()[i] + minOffset) {
+                minPtr = mView.data()[i] + minOffset;
+            }
+            if (maxPtr < mView.data()[i] + maxOffset) {
+                maxPtr = mView.data()[i] + maxOffset;
+            }
+            planeSize += planeStride * divUp(mAllocatedDepth, 8u)
+                    * align(mHeight, 64) / plane.rowSampling;
+        }
+
+        if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
+            // FIXME: this is risky as reading/writing data out of bound results
+            //        in an undefined behavior, but gralloc does assume a
+            //        contiguous mapping
+            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+                const C2PlaneInfo &plane = layout.planes[i];
+                mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
+                mediaImage->mPlane[i].mColInc = plane.colInc;
+                mediaImage->mPlane[i].mRowInc = plane.rowInc;
+                mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
+                mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
+            }
+            mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
+            ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
+        }
+    }
+    mediaImage->mNumPlanes = layout.numPlanes;
+    mediaImage->mWidth = view.crop().width;
+    mediaImage->mHeight = view.crop().height;
+    mediaImage->mBitDepth = bitDepth;
+    mediaImage->mBitDepthAllocated = mAllocatedDepth;
+
+    uint32_t bufferSize = 0;
+    for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+        const C2PlaneInfo &plane = layout.planes[i];
+        if (plane.allocatedDepth < plane.bitDepth
+                || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
+            ALOGD("rightShift value of %u unsupported", plane.rightShift);
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
+            ALOGD("endianness value of %u unsupported", plane.endianness);
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
+            ALOGD("different allocatedDepth/bitDepth per plane unsupported");
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        // stride is in bytes
+        bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
+    }
+
+    mBackBufferSize = bufferSize;
+    mInitCheck = OK;
+}
+
+status_t GraphicView2MediaImageConverter::initCheck() const { return mInitCheck; }
+
+uint32_t GraphicView2MediaImageConverter::backBufferSize() const { return mBackBufferSize; }
+
+sp<ABuffer> GraphicView2MediaImageConverter::wrap() const {
+    if (mBackBuffer == nullptr) {
+        return mWrapped;
+    }
+    return nullptr;
+}
+
+bool GraphicView2MediaImageConverter::setBackBuffer(const sp<ABuffer> &backBuffer) {
+    if (backBuffer == nullptr) {
+        return false;
+    }
+    if (backBuffer->capacity() < mBackBufferSize) {
+        return false;
+    }
+    backBuffer->setRange(0, mBackBufferSize);
+    mBackBuffer = backBuffer;
+    return true;
+}
+
+status_t GraphicView2MediaImageConverter::copyToMediaImage() {
+    ATRACE_CALL();
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+    return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
+}
+
+const sp<ABuffer> &GraphicView2MediaImageConverter::imageData() const { return mMediaImage; }
+
+MediaImage2 *GraphicView2MediaImageConverter::getMediaImage() {
+    return (MediaImage2 *)mMediaImage->base();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 6b0ba7f..8daf3d8 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -22,6 +22,7 @@
 #include <C2ParamDef.h>
 
 #include <media/hardware/VideoAPI.h>
+#include <utils/StrongPointer.h>
 #include <utils/Errors.h>
 
 namespace android {
@@ -194,6 +195,61 @@
     std::shared_ptr<Impl> mImpl;
 };
 
+struct ABuffer;
+struct AMessage;
+
+class GraphicView2MediaImageConverter {
+public:
+    /**
+     * Creates a C2GraphicView <=> MediaImage converter
+     *
+     * \param view C2GraphicView object
+     * \param format buffer format
+     * \param copy whether the converter is used for copy or not
+     */
+    GraphicView2MediaImageConverter(
+            const C2GraphicView &view, const sp<AMessage> &format, bool copy);
+
+    status_t initCheck() const;
+
+    uint32_t backBufferSize() const;
+
+    /**
+     * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
+     * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
+     * data into a backing buffer explicitly.
+     *
+     * \return media buffer. This is null if wrapping failed.
+     */
+    sp<ABuffer> wrap() const;
+
+    bool setBackBuffer(const sp<ABuffer> &backBuffer);
+
+    /**
+     * Copy C2GraphicView to MediaImage2.
+     */
+    status_t copyToMediaImage();
+
+    const sp<ABuffer> &imageData() const;
+
+private:
+    status_t mInitCheck;
+
+    const C2GraphicView mView;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
+    int32_t mComponentColorFormat;  ///< SDK color format from component
+    sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
+    uint32_t mAllocatedDepth;
+    uint32_t mBackBufferSize;
+    sp<ABuffer> mMediaImage;
+
+    sp<ABuffer> mBackBuffer;    ///< backing buffer if we have to copy C2Buffer <=> ABuffer
+
+    MediaImage2 *getMediaImage();
+};
+
 } // namespace android
 
 #endif  // CODEC2_BUFFER_UTILS_H_
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index bb06bff..8cb1beb 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -33,17 +33,9 @@
     ],
     shared_libs: [
         "android.hardware.audio.common-util",
-        "av-types-aidl-cpp",
-        "effect-aidl-cpp",
         "libaudioflinger",
-        "libaudiomanager",
         "libaudiopolicyservice",
-        "libaudioprocessing",
         "libdl",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libvibrator",
         "libvndksupport",
         "mediametricsservice-aidl-cpp",
     ],
@@ -73,6 +65,7 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "libaudioclient_aidl_fuzzer_defaults",
+        "libaudioflinger_dependencies",
         "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 86dd663..2f2f69f 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -487,8 +487,11 @@
     *outStream = stream;
     /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
     {
-        std::lock_guard l(mLock);
+        std::lock_guard l(mCallbacksLock);
         mCallbacks.emplace(cbCookie, Callbacks{});
+    }
+    {
+        std::lock_guard l(mLock);
         mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
     }
     if (streamCb) {
@@ -1328,7 +1331,7 @@
 }
 
 void DeviceHalAidl::clearCallbacks(void* cookie) {
-    std::lock_guard l(mLock);
+    std::lock_guard l(mCallbacksLock);
     mCallbacks.erase(cookie);
 }
 
@@ -1361,18 +1364,21 @@
     setCallbackImpl(cookie, &Callbacks::latency, cb);
 }
 
-template<class C>
+template <class C>
 sp<C> DeviceHalAidl::getCallbackImpl(void* cookie, wp<C> DeviceHalAidl::Callbacks::* field) {
-    std::lock_guard l(mLock);
-    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
-        return ((it->second).*field).promote();
+    wp<C> result;
+    {
+        std::lock_guard l(mCallbacksLock);
+        if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+            result = (it->second).*field;
+        }
     }
-    return nullptr;
+    return result.promote();
 }
 template<class C>
 void DeviceHalAidl::setCallbackImpl(
         void* cookie, wp<C> DeviceHalAidl::Callbacks::* field, const sp<C>& cb) {
-    std::lock_guard l(mLock);
+    std::lock_guard l(mCallbacksLock);
     if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
         (it->second).*field = cb;
     }
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index d925b46..1778881 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -242,8 +242,11 @@
     const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
     const std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
 
+    std::mutex mCallbacksLock;
+    // Use 'mCallbacksLock' only to implement exclusive access to 'mCallbacks'. Never hold it
+    // while making any calls.
+    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mCallbacksLock);
     std::mutex mLock;
-    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
     std::set<audio_port_handle_t> mDeviceDisconnectionNotified GUARDED_BY(mLock);
     Hal2AidlMapper mMapper GUARDED_BY(mLock);
     LockedAccessor<Hal2AidlMapper> mMapperAccessor;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 94de8ea..7a80a6a 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -417,8 +417,16 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::pause>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPlayOrRecordState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::pause>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else {
+        ALOGD("%s: already stream in one of the PAUSED kind of states, current state: %s", __func__,
+              toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
@@ -473,8 +481,19 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::flush>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPausedState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::flush>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else if (isInPlayOrRecordState(state)) {
+        ALOGE("%s: found stream in non-flushable state: %s", __func__, toString(state).c_str());
+        return INVALID_OPERATION;
+    } else {
+        ALOGD("%s: already stream in one of the flushable state: current state: %s", __func__,
+              toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::exit() {
@@ -798,9 +817,9 @@
 status_t StreamOutHalAidl::drain(bool earlyNotify) {
     if (!mStream) return NO_INIT;
 
-    if(const auto state = getState(); state == StreamDescriptor::State::IDLE) {
-        ALOGD("%p %s stream already in IDLE state", this, __func__);
-        if(mContext.isAsynchronous()) onDrainReady();
+    if (const auto state = getState(); isInDrainedState(state)) {
+        ALOGD("%p %s stream already in %s", this, __func__, toString(state).c_str());
+        if (mContext.isAsynchronous()) onDrainReady();
         return OK;
     }
 
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 0587640..baf4ac0 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -220,6 +220,41 @@
         return mLastReply.state;
     }
 
+    bool isInDrainedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::IDLE ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::STANDBY) {
+            // drain equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPlayOrRecordState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::ACTIVE ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::TRANSFERRING ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAINING) {
+            // play or record equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPausedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State& state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::PAUSED ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAIN_PAUSED ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::
+                             TRANSFER_PAUSED) {
+            // pause equivalent states
+            return true;
+        }
+        return false;
+    }
+
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 50b748e..dbd1f60 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -18,6 +18,7 @@
 #include <memory>
 #include <mutex>
 #include <string>
+#include <thread>
 #include <vector>
 
 #define LOG_TAG "CoreAudioHalAidlTest"
@@ -28,6 +29,7 @@
 #include <StreamHalAidl.h>
 #include <aidl/android/hardware/audio/core/BnModule.h>
 #include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
 #include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
 #include <aidl/android/media/audio/common/AudioGainMode.h>
 #include <aidl/android/media/audio/common/Int.h>
@@ -64,13 +66,13 @@
     const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
 
   protected:
-    ndk::ScopedAStatus getVendorParametersImpl(const std::vector<std::string>& in_parameterIds) {
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds) {
         mGetParameterIds.insert(mGetParameterIds.end(), in_parameterIds.begin(),
                                 in_parameterIds.end());
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus setVendorParametersImpl(const std::vector<VendorParameter>& in_parameters,
-                                               bool async) {
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) {
         if (async) {
             mAsyncParameters.insert(mAsyncParameters.end(), in_parameters.begin(),
                                     in_parameters.end());
@@ -187,6 +189,11 @@
     speakerOutDevice.profiles = standardPcmAudioProfiles;
     c.ports.push_back(speakerOutDevice);
 
+    AudioPort primaryOutMix =
+            createPort(c.nextPortId++, "primary output", 0, false, createPortMixExt(1, 1));
+    primaryOutMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryOutMix);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -200,11 +207,141 @@
     c.ports.push_back(btOutMix);
 
     c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
+    c.routes.push_back(createRoute({primaryOutMix}, speakerOutDevice));
     c.routes.push_back(createRoute({btOutMix}, btOutDevice));
 
     return c;
 }
 
+class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
+                         public VendorParameterMock {
+    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
+                                           std::vector<VendorParameter>*) override {
+        return VendorParameterMock::getVendorParameters(in_parameterIds);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) override {
+        return VendorParameterMock::setVendorParameters(in_parameters, async);
+    }
+    ndk::ScopedAStatus addEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+class StreamContext {
+  public:
+    using Descriptor = ::aidl::android::hardware::audio::core::StreamDescriptor;
+    typedef ::android::AidlMessageQueue<
+            Descriptor::Command, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            CommandMQ;
+    typedef ::android::AidlMessageQueue<
+            Descriptor::Reply, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            ReplyMQ;
+    typedef ::android::AidlMessageQueue<
+            int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            DataMQ;
+
+    StreamContext() = default;
+    StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
+                  std::unique_ptr<DataMQ> dataMQ)
+        : mCommandMQ(std::move(commandMQ)),
+          mReplyMQ(std::move(replyMQ)),
+          mDataMQ(std::move(dataMQ)) {}
+    void fillDescriptor(Descriptor* desc) {
+        if (mCommandMQ) {
+            desc->command = mCommandMQ->dupeDesc();
+        }
+        if (mReplyMQ) {
+            desc->reply = mReplyMQ->dupeDesc();
+        }
+        if (mDataMQ) {
+            desc->frameSizeBytes = 2;
+            desc->bufferSizeFrames = 48;
+            desc->audio.set<Descriptor::AudioBuffer::Tag::fmq>(mDataMQ->dupeDesc());
+        }
+    }
+
+  private:
+    std::unique_ptr<CommandMQ> mCommandMQ =
+            std::make_unique<CommandMQ>(1, true /*configureEventFlagWord*/);
+    std::unique_ptr<ReplyMQ> mReplyMQ =
+            std::make_unique<ReplyMQ>(1, true /*configureEventFlagWord*/);
+    std::unique_ptr<DataMQ> mDataMQ = std::make_unique<DataMQ>(96);
+};
+
+class StreamOutMock : public ::aidl::android::hardware::audio::core::BnStreamOut {
+  public:
+    explicit StreamOutMock(StreamContext&& ctx) : mContext(std::move(ctx)) {}
+
+  private:
+    ndk::ScopedAStatus getStreamCommon(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
+            override {
+        if (!mCommon) {
+            mCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+        }
+        *_aidl_return = mCommon;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateMetadata(
+            const ::aidl::android::hardware::audio::common::SourceMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateOffloadMetadata(
+            const ::aidl::android::hardware::audio::common::AudioOffloadMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getHwVolume(std::vector<float>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setHwVolume(const std::vector<float>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioDescriptionMixLevel(float*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioDescriptionMixLevel(float) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getDualMonoMode(
+            ::aidl::android::media::audio::common::AudioDualMonoMode*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setDualMonoMode(
+            ::aidl::android::media::audio::common::AudioDualMonoMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getRecommendedLatencyModes(
+            std::vector<::aidl::android::media::audio::common::AudioLatencyMode>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setLatencyMode(
+            ::aidl::android::media::audio::common::AudioLatencyMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getPlaybackRateParameters(
+            ::aidl::android::media::audio::common::AudioPlaybackRate*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setPlaybackRateParameters(
+            const ::aidl::android::media::audio::common::AudioPlaybackRate&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    StreamContext mContext;
+    std::shared_ptr<StreamCommonMock> mCommon;
+};
+
 class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
                    public VendorParameterMock {
   public:
@@ -339,7 +476,10 @@
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
-                                        OpenOutputStreamReturn*) override {
+                                        OpenOutputStreamReturn* _aidl_return) override {
+        StreamContext context;
+        context.fillDescriptor(&_aidl_return->desc);
+        _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>(std::move(context));
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
@@ -351,6 +491,7 @@
         if (requested.id == 0) {
             *patch = requested;
             patch->id = mConfig.nextPatchId++;
+            patch->latenciesMs.push_back(100);
             mConfig.patches.push_back(*patch);
             ALOGD("%s: returning %s", __func__, patch->toString().c_str());
         } else {
@@ -437,11 +578,11 @@
     ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override { return ndk::ScopedAStatus::ok(); }
     ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
                                            std::vector<VendorParameter>*) override {
-        return getVendorParametersImpl(in_parameterIds);
+        return VendorParameterMock::getVendorParameters(in_parameterIds);
     }
     ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
                                            bool async) override {
-        return setVendorParametersImpl(in_parameters, async);
+        return VendorParameterMock::setVendorParameters(in_parameters, async);
     }
     ndk::ScopedAStatus addDeviceEffect(
             int32_t,
@@ -474,29 +615,6 @@
     ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
 };
 
-class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
-                         public VendorParameterMock {
-    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
-                                           std::vector<VendorParameter>*) override {
-        return getVendorParametersImpl(in_parameterIds);
-    }
-    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
-                                           bool async) override {
-        return setVendorParametersImpl(in_parameters, async);
-    }
-    ndk::ScopedAStatus addEffect(
-            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
-        return ndk::ScopedAStatus::ok();
-    }
-    ndk::ScopedAStatus removeEffect(
-            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
-        return ndk::ScopedAStatus::ok();
-    }
-};
-
 VendorParameter makeVendorParameter(const std::string& id, int value) {
     VendorParameter result{.id = id};
     // Note: in real life, a parcelable type defined by vendor must be used,
@@ -708,7 +826,7 @@
 class DeviceHalAidlTest : public testing::Test {
   public:
     void SetUp() override {
-        mModule = ndk::SharedRefBase::make<ModuleMock>();
+        mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
         mDevice = sp<DeviceHalAidl>::make("test", mModule, nullptr /*vext*/);
     }
     void TearDown() override {
@@ -750,6 +868,46 @@
     EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
 }
 
+// See http://b/357487484#comment6
+TEST_F(DeviceHalAidlTest, StreamReleaseOnMapperCleanup) {
+    ASSERT_EQ(OK, mDevice->initCheck());
+    // Since the test is in effect probabilistic, try multiple times.
+    for (int i = 0; i < 100; ++i) {
+        sp<StreamOutHalInterface> stream1;
+        struct audio_config config = AUDIO_CONFIG_INITIALIZER;
+        config.sample_rate = 48000;
+        config.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        config.format = AUDIO_FORMAT_PCM_16_BIT;
+        ASSERT_EQ(OK, mDevice->openOutputStream(42 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+                                                AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+                                                &stream1));
+        ASSERT_EQ(1, stream1->getStrongCount());
+        std::atomic<bool> stopReleaser = false;
+        // Try to catch the moment when Hal2AidlMapper promotes its wp<StreamHalInterface> to sp<>
+        // in Hal2AidlMapper::resetUnusedPatchesAndPortConfigs and release on our side in order to
+        // make Hal2AidlMapper the sole owner via a temporary sp and enforce destruction of the
+        // stream while the DeviceHalAidl::mLock is held.
+        std::thread releaser([&stream1, &stopReleaser]() {
+            while (!stopReleaser) {
+                if (stream1->getStrongCount() > 1) {
+                    stream1.clear();
+                    break;
+                }
+                std::this_thread::yield();
+            }
+        });
+        sp<StreamOutHalInterface> stream2;
+        // Opening another stream triggers a call to
+        // Hal2AidlMapper::resetUnusedPatchesAndPortConfigs.  It must not cause a deadlock of the
+        // test (main) thread.
+        ASSERT_EQ(OK, mDevice->openOutputStream(43 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+                                                AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+                                                &stream2));
+        stopReleaser = true;
+        releaser.join();
+    }
+}
+
 class DeviceHalAidlVendorParametersTest : public testing::Test {
   public:
     void SetUp() override {
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index b1e6db2..b1f9d18 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -64,6 +64,7 @@
         "mediarecorder_fuzzer.cpp",
     ],
     defaults: [
+        "libaudioflinger_dependencies",
         "libmediaplayerserviceFuzzer_defaults",
     ],
     static_libs: [
@@ -76,12 +77,10 @@
     ],
     shared_libs: [
         "android.hardware.media.omx@1.0",
-        "av-types-aidl-cpp",
         "media_permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "libactivitymanager_aidl",
         "libandroid_net",
-        "libaudioclient",
+        "libaudioflinger",
         "libcamera_client",
         "libcodec2_client",
         "libcrypto",
@@ -89,24 +88,13 @@
         "libdrmframework",
         "libgui",
         "libhidlbase",
-        "liblog",
         "libmedia_codeclist",
         "libmedia_omx",
         "libmediadrm",
-        "libmediametrics",
-        "libmediautils",
-        "libmemunreachable",
         "libnetd_client",
-        "libpowermanager",
         "libstagefright_httplive",
         "packagemanager_aidl-cpp",
         "libfakeservicemanager",
-        "libvibrator",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libaudioprocessing",
-        "libaudioflinger",
         "libresourcemanagerservice",
         "libmediametricsservice",
         "mediametricsservice-aidl-cpp",
@@ -122,10 +110,6 @@
         "android.hardware.camera.device@3.4",
         "libaudiohal@7.0",
     ],
-    header_libs: [
-        "libaudiohal_headers",
-        "libaudioflinger_headers",
-    ],
 }
 
 cc_fuzz {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 04364a8..717106b 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -487,7 +487,7 @@
                                     .id = getId(mClient),
                                     .name = mCodecName,
                                     .importance = mImportance};
-        return std::move(clientInfo);
+        return clientInfo;
     }
 
 private:
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d50bc1e..c2ff41f 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -332,7 +332,7 @@
                 <!-- profiles and levels:  ProfileBaseline : Level3 -->
                 <Limit name="block-count" range="1-1620" />
                 <Limit name="blocks-per-second" range="1-40500" />
-                <Limit name="bitrate" range="1-2000000" />
+                <Limit name="bitrate" range="1-10000000" />
             </Variant>
             <Feature name="intra-refresh" />
             <!-- Video Quality control -->
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 2341af1..d0f1c1b 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -51,6 +51,11 @@
     ],
 }
 
+vintf_fragment {
+    name: "manifest_media_c2_software_aidl.xml",
+    src: "manifest_media_c2_software_aidl.xml",
+}
+
 mediaserver_cc_binary {
     name: "mediaserver",
 
@@ -92,7 +97,7 @@
     vintf_fragments: ["manifest_media_c2_software_hidl.xml"],
     product_variables: {
         release_aidl_use_unfrozen: {
-            vintf_fragments: ["manifest_media_c2_software_aidl.xml"],
+            vintf_fragment_modules: ["manifest_media_c2_software_aidl.xml"],
         },
     },
 
diff --git a/media/module/bufferpool/2.0/BufferPoolClient.cpp b/media/module/bufferpool/2.0/BufferPoolClient.cpp
index cda23ff..66d11fa 100644
--- a/media/module/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/module/bufferpool/2.0/BufferPoolClient.cpp
@@ -762,6 +762,10 @@
     } else {
         connection = mRemoteConnection;
     }
+    if (!connection) {
+        ALOGE("connection null: fetchBufferHandle()");
+        return ResultStatus::CRITICAL_ERROR;
+    }
     ResultStatus status;
     Return<void> transResult = connection->fetch(
             transactionId, bufferId,
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index ec68de7..658191e 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -23,6 +23,7 @@
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 #include <audio_utils/clock.h>
+#include <cutils/properties.h>
 #include <mediautils/EventLog.h>
 #include <mediautils/FixedString.h>
 #include <mediautils/MethodStatistics.h>
@@ -36,6 +37,46 @@
 
 
 namespace android::mediautils {
+
+// Note: The sum of kDefaultTimeOutDurationMs and kDefaultSecondChanceDurationMs
+// should be no less than 2 seconds, otherwise spurious timeouts
+// may occur with system suspend.
+static constexpr int kDefaultTimeoutDurationMs = 3000;
+
+// Due to suspend abort not incrementing the monotonic clock,
+// we allow another second chance timeout after the first timeout expires.
+//
+// The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
+// and the result is more stable when the monotonic clock increments during suspend.
+//
+static constexpr int kDefaultSecondChanceDurationMs = 2000;
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultTimeoutDuration() {
+    static constinit std::atomic<int> defaultTimeoutDurationMs{};
+    auto defaultMs = defaultTimeoutDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.timeout_duration_ms", kDefaultTimeoutDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultTimeoutDurationMs;
+        defaultTimeoutDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultSecondChanceDuration() {
+    static constinit std::atomic<int> defaultSecondChanceDurationMs{};
+    auto defaultMs = defaultSecondChanceDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.second_chance_duration_ms", kDefaultSecondChanceDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultSecondChanceDurationMs;
+        defaultSecondChanceDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
 // This function appropriately signals a pid to dump a backtrace if we are
 // running on device (and the HAL exists). If we are not running on an Android
 // device, there is no HAL to signal (so we do nothing).
@@ -182,23 +223,25 @@
 
 /* static */
 std::string TimeCheck::analyzeTimeouts(
-        float requestedTimeoutMs, float elapsedSteadyMs, float elapsedSystemMs) {
+        float requestedTimeoutMs, float secondChanceMs,
+        float elapsedSteadyMs, float elapsedSystemMs) {
     // Track any OS clock issues with suspend.
     // It is possible that the elapsedSystemMs is much greater than elapsedSteadyMs if
     // a suspend occurs; however, we always expect the timeout ms should always be slightly
     // less than the elapsed steady ms regardless of whether a suspend occurs or not.
 
-    std::string s("Timeout ms ");
-    s.append(std::to_string(requestedTimeoutMs))
-        .append(" elapsed steady ms ").append(std::to_string(elapsedSteadyMs))
-        .append(" elapsed system ms ").append(std::to_string(elapsedSystemMs));
+    const float totalTimeoutMs = requestedTimeoutMs + secondChanceMs;
+    std::string s = std::format(
+            "Timeout ms {:.2f} ({:.2f} + {:.2f})"
+            " elapsed steady ms {:.4f} elapsed system ms {:.4f}",
+            totalTimeoutMs, requestedTimeoutMs, secondChanceMs, elapsedSteadyMs, elapsedSystemMs);
 
     // Is there something unusual?
     static constexpr float TOLERANCE_CONTEXT_SWITCH_MS = 200.f;
 
-    if (requestedTimeoutMs > elapsedSteadyMs || requestedTimeoutMs > elapsedSystemMs) {
+    if (totalTimeoutMs > elapsedSteadyMs || totalTimeoutMs > elapsedSystemMs) {
         s.append("\nError: early expiration - "
-                "requestedTimeoutMs should be less than elapsed time");
+                "totalTimeoutMs should be less than elapsed time");
     }
 
     if (elapsedSteadyMs > elapsedSystemMs + TOLERANCE_CONTEXT_SWITCH_MS) {
@@ -206,13 +249,13 @@
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSteadyMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSteadyMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nWarning: steady time significantly exceeds timeout "
                 "- possible thread stall or aborted suspend");
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSystemMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSystemMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nInformation: system time significantly exceeds timeout "
                 "- possible suspend");
     }
@@ -282,7 +325,7 @@
             .append(tag)
             .append(" scheduled ").append(formatTime(startSystemTime))
             .append(" on thread ").append(std::to_string(tid)).append("\n")
-            .append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
+            .append(analyzeTimeouts(requestedTimeoutMs, secondChanceMs,
                     elapsedSteadyMs, elapsedSystemMs)).append("\n")
             .append(halPids).append("\n")
             .append(snapshotAnalysis.toString());
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index f1d572f..3e8d35d 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -42,19 +42,29 @@
     //  float elapsedMs (the elapsed time to this event).
     using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
 
-    // The default timeout is chosen to be less than system server watchdog timeout
-    // Note: kDefaultTimeOutMs should be no less than 2 seconds, otherwise spurious timeouts
-    // may occur with system suspend.
-    static constexpr TimeCheck::Duration kDefaultTimeoutDuration = std::chrono::milliseconds(3000);
+    /**
+     * Returns the default timeout to use for TimeCheck.
+     *
+     * The default timeout of 3000ms (kDefaultTimeoutDurationMs) is chosen to be less than
+     * the system server watchdog timeout, and can be changed by the sysprop
+     * audio.timecheck.timeout_duration_ms.
+     * A second chance wait may be set to extend the check.
+     */
+    static TimeCheck::Duration getDefaultTimeoutDuration();
 
-    // Due to suspend abort not incrementing the monotonic clock,
-    // we allow another second chance timeout after the first timeout expires.
-    //
-    // The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
-    // and the result is more stable when the monotonic clock increments during suspend.
-    //
-    static constexpr TimeCheck::Duration kDefaultSecondChanceDuration =
-            std::chrono::milliseconds(2000);
+    /**
+     * Returns the second chance timeout to use for TimeCheck.
+     *
+     * Due to suspend abort not incrementing the monotonic clock,
+     * we allow another second chance timeout after the first timeout expires.
+     * The second chance timeout default of 2000ms (kDefaultSecondChanceDurationMs)
+     * may be changed by the sysprop audio.timecheck.second_chance_duration_ms.
+     *
+     * The total timeout is therefore
+     * getDefaultTimeoutDuration() + getDefaultSecondChanceDuration(),
+     * and the result is more stable when the monotonic clock increments during suspend.
+     */
+    static TimeCheck::Duration getDefaultSecondChanceDuration();
 
     /**
      * TimeCheck is a RAII object which will notify a callback
@@ -130,7 +140,8 @@
     // Returns a string that represents the timeout vs elapsed time,
     // and diagnostics if there are any potential issues.
     static std::string analyzeTimeouts(
-            float timeoutMs, float elapsedSteadyMs, float elapsedSystemMs);
+            float timeoutMs, float secondChanceMs,
+            float elapsedSteadyMs, float elapsedSystemMs);
 
     static TimerThread& getTimeCheckThread();
     static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 00ac800..5b21aa1 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -181,11 +181,6 @@
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
-
-    static_libs: [
-        "libaudiospdif",
-        "libmedialogservice",
-    ],
 }
 
 cc_library {
@@ -217,15 +212,13 @@
     ],
 
     static_libs: [
+        "libaudiospdif",
         "libcpustats",
-        "libpermission",
+        "libmedialogservice",
     ],
 
     header_libs: [
         "libaaudio_headers",
-        "libaudioclient_headers",
-        "libaudiohal_headers",
-        "libaudioutils_headers",
         "libmedia_headers",
     ],
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 1f4e16e..3b71758 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -4491,7 +4491,7 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(getDefaultVibratorInfo_l());
+                    getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
                 // Only set the vibrator info when it is a valid one.
                 audio_utils::lock_guard _cl(chain->mutex());
@@ -5104,8 +5104,8 @@
         } else {
             getIAudioFlingerStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
     true /* crashOnTimeout */);
 
     return delegate();
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index a9fd5ea..89a3951 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1695,7 +1695,7 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                    mAfThreadCallback->getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
                 audio_utils::lock_guard _cl(chain->mutex());
                 // Only set the vibrator info when it is a valid one.
@@ -2916,7 +2916,7 @@
                 // TODO(b/184194780): Use the vibrator information from the vibrator that will be
                 // used to play this track.
                  audio_utils::lock_guard _l(mAfThreadCallback->mutex());
-                vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                vibratorInfo = mAfThreadCallback->getDefaultVibratorInfo_l();
             }
             mutex().lock();
             track->setHapticScale(hapticScale);
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 0ae4dc5..680f76d 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -25,14 +25,7 @@
     shared_libs: [
         "libaudioflinger",
         "libaudiopolicyservice",
-        "libaudioprocessing",
-        "libhidlbase",
         "libmediaplayerservice",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
     ],
     static_libs: [
         "libaudiomockhal",
@@ -63,6 +56,7 @@
         "latest_android_hardware_audio_core_ndk_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudioflinger_dependencies",
         "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 811ca12..b7201f2 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -7127,7 +7127,8 @@
             if (status != OK) {
                 continue;
             }
-            if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
+            if ((client->getPrimaryMix() && client->getPrimaryMix() != primaryMix)
+                    || client->hasLostPrimaryMix()) {
                 if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
                     maxLatency = desc->latency();
                 }
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 7f0c902..5e8bf34 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1380,8 +1380,8 @@
         } else {
             getIAudioPolicyServiceStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
     true /* crashOnTimeout */);
 
     switch (code) {
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 2f91e94..df0c3a1 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -12,6 +12,7 @@
     name: "audiopolicy_tests",
 
     defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
         "latest_android_media_audio_common_types_cpp_static",
     ],
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 2d55f39..d99962d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -1570,7 +1570,7 @@
             s << ", " << std::to_string(i);
         }
     }
-    return std::move(s.str());
+    return s.str();
 }
 
 int32_t CameraService::mapToInterface(TorchModeStatus status) {
@@ -5155,7 +5155,7 @@
     }
     if (hasAny) ret << "\n";
     ret << "]\n";
-    return std::move(ret.str());
+    return ret.str();
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index caa6424..6347863 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -419,7 +419,7 @@
         result << "    none\n";
     }
 
-    std::string resultStr = std::move(result.str());
+    std::string resultStr = result.str();
 
     write(fd, resultStr.c_str(), resultStr.size());
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index c42e51a..65fee7d 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -485,7 +485,7 @@
                     streamId, bufferCount);
         }
     }
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 152687e..13b97b6 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -104,7 +104,7 @@
     lines << fmt::sprintf("      Total buffers: %zu, currently dequeued: %zu, "
             "currently cached: %zu\n", mTotalBufferCount, mHandoutTotalBufferCount,
             mCachedOutputBufferCount);
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 
     Camera3Stream::dump(fd, args);
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 38de93a..5258c0e 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -384,7 +384,7 @@
         }
         returnStr << "]\n";
     }
-    return std::move(returnStr.str());
+    return returnStr.str();
 }
 
 template<typename T>
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index ad9d83b..b030b10 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -57,8 +57,6 @@
 constexpr int kCameraId = 42;
 constexpr int kQvgaWidth = 320;
 constexpr int kQvgaHeight = 240;
-constexpr int k360pWidth = 640;
-constexpr int k360pHeight = 360;
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kHdWidth = 1280;
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index a8a1de1..8b3711c 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -821,7 +821,7 @@
     metricsLog += getAppsPixelCount(mProcessPixelsMap);
     metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
 
-    return std::move(metricsLog);
+    return metricsLog;
 }
 
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index 679ab13..49f68e9 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -94,7 +94,7 @@
         str.append("\n");
     }
 
-    return std::move(str);
+    return str;
 }
 
 bool ResourceList::operator==(const ResourceList& rhs) const {