Merge "Remove test mode from VirtualCameraRenderThread." into main
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index ab161d6..fe10e12 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -237,3 +237,13 @@
         purpose: PURPOSE_BUGFIX
     }
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "multi_res_raw_reprocessing"
+    description: "Allow multi-resolution raw reprocessing without reprocessing capability"
+    bug: "336922859"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 28670b1..03c765a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -362,8 +362,8 @@
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
         sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
-    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
-            String8("ScreenRecorder"), gSecureDisplay);
+    static const std::string kDisplayName("ScreenRecorder");
+    sp<IBinder> dpy = SurfaceComposerClient::createVirtualDisplay(kDisplayName, gSecureDisplay);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
@@ -797,7 +797,7 @@
     sp<Overlay> overlay;
 
     ~RecordingData() {
-        if (dpy != nullptr) SurfaceComposerClient::destroyDisplay(dpy);
+        if (dpy != nullptr) SurfaceComposerClient::destroyVirtualDisplay(dpy);
         if (overlay != nullptr) overlay->stop();
         if (encoder != nullptr) {
             encoder->stop();
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index 197d6e7..b926150 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -49,11 +49,6 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
-        for (size_t i = 0; i < mFilters.size(); ++i) {
-            mControlParamTypes.insert(
-                    mFilters[i].desc.controlParams.begin(),
-                    mFilters[i].desc.controlParams.end());
-        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -91,6 +86,12 @@
 
         // TODO: documentation
         mFilters = std::move(filters);
+        mControlParamTypes.clear();
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
         mTypeToIndexForQuery.clear();
         mTypeToIndexForConfig.clear();
         for (size_t i = 0; i < mFilters.size(); ++i) {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 20b6d7f..a897fa0 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2229,9 +2229,15 @@
     // See also b/300350761.
     //
     // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
-    // so we are reverting back to the logical sequence of the operations.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
     status_t err = C2_OK;
-    if (android::media::codec::provider_->stop_hal_before_surface()) {
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
         err = comp->stop();
         mChannel->stopUseOutputSurface(pushBlankBuffer);
     } else {
@@ -2334,8 +2340,14 @@
     // See also b/300350761.
     //
     // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
-    // so we are reverting back to the logical sequence of the operations.
-    if (android::media::codec::provider_->stop_hal_before_surface()) {
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
         comp->release();
         mChannel->stopUseOutputSurface(pushBlankBuffer);
     } else {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3984b83..c7ab82f 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -2784,7 +2784,16 @@
 }
 
 void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
-    mInfoBuffers.push_back(buffer);
+    if (mInputSurface == nullptr) {
+        mInfoBuffers.push_back(buffer);
+    } else {
+        std::list<std::unique_ptr<C2Work>> items;
+        std::unique_ptr<C2Work> work(new C2Work);
+        work->input.infoBuffers.emplace_back(*buffer);
+        work->worklets.emplace_back(new C2Worklet);
+        items.push_back(std::move(work));
+        c2_status_t err = mComponent->queue(&items);
+    }
 }
 
 status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 04a8a45..e667964 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -315,6 +315,7 @@
         "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
         "aidl/android/media/SurroundSoundConfig.aidl",
+        "aidl/android/media/TrackInternalMuteInfo.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
     ],
     defaults: [
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index aa5c840..cf3b43a 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -918,6 +918,11 @@
     return OK;
 }
 
+status_t AudioFlingerClientAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMuted) {
+    return statusTFromBinderStatus(mDelegate->setTracksInternalMute(tracksInternalMuted));
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1477,4 +1482,9 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    return Status::fromStatusT(mDelegate->setTracksInternalMute(tracksInternalMute));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 31d3af5..e8fcf77 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -41,6 +41,7 @@
 import android.media.ISoundDoseCallback;
 import android.media.MicrophoneInfoFw;
 import android.media.RenderPosition;
+import android.media.TrackInternalMuteInfo;
 import android.media.TrackSecondaryOutputInfo;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
@@ -293,6 +294,11 @@
      */
     AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
 
+    /**
+     * Set internal mute for a list of tracks.
+     */
+    void setTracksInternalMute(in TrackInternalMuteInfo[] tracksInternalMute);
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
new file mode 100644
index 0000000..05b1fa4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+parcelable TrackInternalMuteInfo {
+    /* Interpreted as audio_port_handle_t. */
+    int portId;
+    boolean muted;
+}
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 5a1e037..860a0bc 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -60,6 +60,7 @@
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
 #include "android/media/OpenOutputResponse.h"
+#include "android/media/TrackInternalMuteInfo.h"
 #include "android/media/TrackSecondaryOutputInfo.h"
 
 namespace android {
@@ -388,6 +389,9 @@
 
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) const = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
 /**
@@ -504,6 +508,8 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
     status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                              struct audio_port_v7 *mixPort) const override;
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -606,6 +612,7 @@
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
             GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
+            SET_TRACKS_INTERNAL_MUTE = media::BnAudioFlingerService::TRANSACTION_setTracksInternalMute,
         };
 
     protected:
@@ -742,6 +749,8 @@
     Status getAudioMixPort(const media::AudioPortFw& devicePort,
                            const media::AudioPortFw& mixPort,
                            media::AudioPortFw* _aidl_return) override;
+    Status setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 5aff5f2..84c1a8a 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -285,11 +285,12 @@
     return OK;
 }
 
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+        StatePositions* statePositions) {
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
     *frames = std::max<int64_t>(0, reply.observable.frames);
     *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
     return OK;
@@ -446,8 +447,12 @@
     if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
-        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                nullptr, true /*safeFromNonWorkerThread */);
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+                    true /*safeFromNonWorkerThread */);
+        // For compatibility with HIDL behavior, apply a "soft" position reset
+        // after receiving the "drain ready" callback.
+        std::lock_guard l(mLock);
+        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
     } else {
         ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
     }
@@ -455,15 +460,8 @@
 
 void StreamHalAidl::onAsyncError() {
     std::lock_guard l(mLock);
-    if (mLastReply.state == StreamDescriptor::State::IDLE ||
-        mLastReply.state == StreamDescriptor::State::DRAINING ||
-        mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
-        mLastReply.state = StreamDescriptor::State::ERROR;
-        ALOGW("%s: onError received", __func__);
-    } else {
-        ALOGW("%s: unexpected onError in the state %s", __func__,
-                toString(mLastReply.state).c_str());
-    }
+    ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+    mLastReply.state = StreamDescriptor::State::ERROR;
 }
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
@@ -514,9 +512,9 @@
 }
 
 status_t StreamHalAidl::sendCommand(
-        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
-        bool safeFromNonWorkerThread) {
+        bool safeFromNonWorkerThread, StatePositions* statePositions) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -548,6 +546,23 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+            if (!mIsInput && reply->status == STATUS_OK) {
+                if (command.getTag() == StreamDescriptor::Command::standby &&
+                        reply->state == StreamDescriptor::State::STANDBY) {
+                    mStatePositions.framesAtStandby = reply->observable.frames;
+                } else if (command.getTag() == StreamDescriptor::Command::flush &&
+                           reply->state == StreamDescriptor::State::IDLE) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } else if (!mContext.isAsynchronous() &&
+                        command.getTag() == StreamDescriptor::Command::drain &&
+                        (reply->state == StreamDescriptor::State::IDLE ||
+                                reply->state == StreamDescriptor::State::DRAINING)) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+            }
+            if (statePositions != nullptr) {
+                *statePositions = mStatePositions;
+            }
         }
     }
     switch (reply->status) {
@@ -563,7 +578,8 @@
 }
 
 status_t StreamHalAidl::updateCountersIfNeeded(
-        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+        StatePositions* statePositions) {
     bool doUpdate = false;
     {
         std::lock_guard l(mLock);
@@ -573,10 +589,13 @@
         // Since updates are paced, it is OK to perform them from any thread, they should
         // not interfere with I/O operations of the worker.
         return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                reply, true /*safeFromNonWorkerThread */);
+                reply, true /*safeFromNonWorkerThread */, statePositions);
     } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
+        if (statePositions != nullptr) {
+            *statePositions = mStatePositions;
+        }
     }
     return OK;
 }
@@ -668,8 +687,19 @@
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *dspFrames = aidlFrames;
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(
+            getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // Number of audio frames since the stream has exited standby.
+    // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+    int64_t mostRecentResetPoint;
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        mostRecentResetPoint = statePositions.framesAtStandby;
+    } else {
+        mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+    }
+    *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
 }
 
@@ -726,8 +756,16 @@
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *frames = aidlFrames;
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // See the table at the start of 'StreamHalInterface'.
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        *frames = aidlFrames;
+    } else {
+        const int64_t mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+    }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
     timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
     return OK;
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 8a398d8..fff7a92 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -194,6 +194,11 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
+    struct StatePositions {
+        int64_t framesAtFlushOrDrain;
+        int64_t framesAtStandby;
+    };
+
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
             const std::shared_ptr<T>& stream);
@@ -212,7 +217,8 @@
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
-    status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+    status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+            StatePositions* statePositions = nullptr);
 
     // Always returns non-negative values.
     status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -268,11 +274,13 @@
     // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
     // it with `mLock` being held.
     status_t sendCommand(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
-            bool safeFromNonWorkerThread = false);
+            bool safeFromNonWorkerThread = false,
+            StatePositions* statePositions = nullptr);
     status_t updateCountersIfNeeded(
-            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+            StatePositions* statePositions = nullptr);
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
@@ -280,6 +288,9 @@
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
     int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+    // Cached values of observable positions when the stream last entered certain state.
+    // Updated for output streams only.
+    StatePositions mStatePositions GUARDED_BY(mLock) = {};
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
@@ -328,10 +339,14 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     status_t drain(bool earlyNotify) override;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     status_t flush() override;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
 
     // Notifies the HAL layer that the framework considers the current playback as completed.
@@ -413,6 +428,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     status_t getCapturePosition(int64_t *frames, int64_t *time) override;
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 80379d0..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -161,10 +161,14 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify);
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush();
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
 
     // Notifies the HAL layer that the framework considers the current playback as completed.
@@ -259,6 +263,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
 
     // Get active microphones
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index eb14f6b..585a895 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -135,6 +135,38 @@
     virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ *               | Mixed      | Direct       | Direct
+ *               |            | non-offload  | offload
+ * ==============|============|==============|==============
+ *  PCM and      | Continuous |              |
+ *  encapsulated |            |              |
+ *  bitstream    |            |              |
+ * --------------|------------| Continuous†  |
+ *  Bitstream    |            |              | Reset on
+ *  encapsulated |            |              | flush, drain
+ *  into PCM     |            |              | and standby
+ *               | Not        |              |
+ * --------------| supported  |--------------|
+ *  Bitstream    |            | Reset on     |
+ *               |            | flush, drain |
+ *               |            | and standby  |
+ *               |            |              |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -173,10 +205,14 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify) = 0;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush() = 0;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
 
     // Notifies the HAL layer that the framework considers the current playback as completed.
@@ -270,6 +306,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
 
     // Get active microphones
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 7a1411d..0031855 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -44,7 +44,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback+bugs@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d50bc1e..6fb9232 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -203,8 +203,7 @@
             <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
-            <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
@@ -224,7 +223,8 @@
             <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
+            <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index cb2994e..b3707c8 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -1615,6 +1615,39 @@
 
             mLastTrack->timescale = ntohl(timescale);
 
+            // 14496-12 says all ones means indeterminate, but some files seem to use
+            // 0 instead. We treat both the same.
+            int64_t duration = 0;
+            if (version == 1) {
+                if (mDataSource->readAt(
+                            timescale_offset + 4, &duration, sizeof(duration))
+                        < (ssize_t)sizeof(duration)) {
+                    return ERROR_IO;
+                }
+                if (duration != -1) {
+                    duration = ntoh64(duration);
+                }
+            } else {
+                uint32_t duration32;
+                if (mDataSource->readAt(
+                            timescale_offset + 4, &duration32, sizeof(duration32))
+                        < (ssize_t)sizeof(duration32)) {
+                    return ERROR_IO;
+                }
+                if (duration32 != 0xffffffff) {
+                    duration = ntohl(duration32);
+                }
+            }
+            if (duration != 0 && mLastTrack->timescale != 0) {
+                long double durationUs = ((long double)duration * 1000000) / mLastTrack->timescale;
+                if (durationUs < 0 || durationUs > INT64_MAX) {
+                    ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+                          (long long) duration, (long long) mLastTrack->timescale);
+                    return ERROR_MALFORMED;
+                }
+                AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+            }
+
             uint8_t lang[2];
             off64_t lang_offset;
             if (version == 1) {
@@ -3874,18 +3907,17 @@
     }
 
     int32_t id;
-    int64_t duration;
 
     if (version == 1) {
         // we can get ctime value from U64_AT(&buffer[4])
         // we can get mtime value from U64_AT(&buffer[12])
         id = U32_AT(&buffer[20]);
-        duration = U64_AT(&buffer[28]);
+        // we can get duration value from U64_AT(&buffer[28])
     } else if (version == 0) {
         // we can get ctime value from U32_AT(&buffer[4])
         // we can get mtime value from U32_AT(&buffer[8])
         id = U32_AT(&buffer[12]);
-        duration = U32_AT(&buffer[20]);
+        // we can get duration value from U32_AT(&buffer[20])
     } else {
         return ERROR_UNSUPPORTED;
     }
@@ -3894,15 +3926,6 @@
         return ERROR_MALFORMED;
 
     AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
-    if (duration != 0 && mHeaderTimescale != 0) {
-        long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
-        if (durationUs < 0 || durationUs > INT64_MAX) {
-            ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
-                  (long long) duration, (long long) mHeaderTimescale);
-            return ERROR_MALFORMED;
-        }
-        AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
-    }
 
     size_t matrixOffset = dynSize + 16;
     int32_t a00 = U32_AT(&buffer[matrixOffset]);
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..3d873df 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -192,7 +192,6 @@
     header_libs: [
         "libstagefright_headers",
         "libmedia_headers",
-        "libstagefright_headers",
     ],
 
     shared_libs: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index d5d778f..af67a56 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1935,10 +1935,11 @@
     if (mPrimaryHardwareDev == nullptr) {
         return 0;
     }
+    if (mInputBufferSizeOrderedDevs.empty()) {
+        return 0;
+    }
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
     if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1968,6 +1969,22 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
+    auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+                                 size_t* bytes) -> status_t {
+        if (!dev) {
+            return BAD_VALUE;
+        }
+        status_t result = dev->getInputBufferSize(&config, bytes);
+        if (result == BAD_VALUE) {
+            // Retry with the config suggested by the HAL.
+            result = dev->getInputBufferSize(&config, bytes);
+        }
+        if (result != OK || *bytes == 0) {
+            return BAD_VALUE;
+        }
+        return result;
+    };
+
     // Change parameters of the configuration each iteration until we find a
     // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -1979,16 +1996,15 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
-                audio_config_t loopConfig = config;
-                status_t result = dev->getInputBufferSize(&config, &bytes);
-                if (result == BAD_VALUE) {
-                    // Retry with the config suggested by the HAL.
-                    result = dev->getInputBufferSize(&config, &bytes);
+                ret = BAD_VALUE;
+                for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+                    ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+                    if (ret == OK) {
+                        break;
+                    }
                 }
-                if (result != OK || bytes == 0) {
-                    config = loopConfig;
-                    continue;
-                }
+                if (ret == BAD_VALUE) continue;
+
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
                     uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2610,12 +2626,43 @@
     }
 
     mAudioHwDevs.add(handle, audioDevice);
+    if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+        mInputBufferSizeOrderedDevs.insert(audioDevice);
+    }
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
     return audioDevice;
 }
 
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+    static const std::map<std::string_view, int> kPriorities = {
+        { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+        { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+        { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+    };
+
+    const std::string_view lhsName = lhs->moduleName();
+    const std::string_view rhsName = rhs->moduleName();
+
+    auto lhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+        lhsPriority = lhsIt->second;
+    }
+    auto rhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+        rhsPriority = rhsIt->second;
+    }
+
+    if (lhsPriority != rhsPriority) {
+        return lhsPriority < rhsPriority;
+    }
+    return lhsName < rhsName;
+}
+
 // ----------------------------------------------------------------------------
 
 uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
@@ -4915,6 +4962,23 @@
     return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
 }
 
+status_t AudioFlinger::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    audio_utils::lock_guard _l(mutex());
+    ALOGV("%s", __func__);
+
+    std::map<audio_port_handle_t, bool> tracksInternalMuteMap;
+    for (const auto& trackInternalMute : tracksInternalMute) {
+        audio_port_handle_t portId = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_port_handle_t(trackInternalMute.portId));
+        tracksInternalMuteMap.emplace(portId, trackInternalMute.muted);
+    }
+    for (size_t i = 0; i < mPlaybackThreads.size() && !tracksInternalMuteMap.empty(); i++) {
+        mPlaybackThreads.valueAt(i)->setTracksInternalMute(&tracksInternalMuteMap);
+    }
+    return NO_ERROR;
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4949,6 +5013,7 @@
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
         case TransactionCode::GET_AUDIO_MIX_PORT:
+        case TransactionCode::SET_TRACKS_INTERNAL_MUTE:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 4e46bea..4711dc5 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -259,6 +259,10 @@
     status_t getAudioMixPort(const struct audio_port_v7* devicePort,
                              struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) final
+            EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -630,6 +634,10 @@
     DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
             GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
 
+    static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+    std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+            mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
      const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
              DevicesFactoryHalInterface::create();
      /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;  // set onFirstRef().
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 4b6ab89..204cc63 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -536,6 +536,9 @@
     virtual const std::atomic<int64_t>& framesWritten() const = 0;
 
     virtual bool usesHwAvSync() const = 0;
+
+    virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index a0d20ac..168bcca 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -427,6 +427,10 @@
     virtual FillingStatus& fillingStatus() = 0;
     virtual int8_t& retryCount() = 0;
     virtual FastTrackUnderruns& fastTrackUnderruns() = 0;
+
+    // Internal mute, this is currently only used for bit-perfect playback
+    virtual bool getInternalMute() const = 0;
+    virtual void setInternalMute(bool muted) = 0;
 };
 
 // playback track, used by DuplicatingThread
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 89ee5c0..1e1b6d2 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -220,6 +220,8 @@
      */
     void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState) final;
 
+    bool getInternalMute() const final { return mInternalMute; }
+    void setInternalMute(bool muted) final { mInternalMute = muted; }
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
@@ -401,6 +403,8 @@
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
     mute_state_t        mMuteState;
+
+    bool                mInternalMute = false;
 };  // end of Track
 
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 7c248dc..d59697d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -33,6 +33,7 @@
 #include <afutils/Vibrator.h>
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
+#include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
 #include <cpustats/ThreadCpuUsage.h>
@@ -2696,14 +2697,17 @@
             }
         }
 
-        // Set DIRECT flag if current thread is DirectOutputThread. This can
-        // happen when the playback is rerouted to direct output thread by
+        // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+        // This can happen when the playback is rerouted to direct output/offload thread by
         // dynamic audio policy.
         // Do NOT report the flag changes back to client, since the client
-        // doesn't explicitly request a direct flag.
+        // doesn't explicitly request a direct/offload flag.
         audio_output_flags_t trackFlags = *flags;
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        } else if (mType == OFFLOAD) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+                                   AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
         }
         *afTrackFlags = trackFlags;
 
@@ -5771,6 +5775,11 @@
                 vlf *= volume;
                 vrf *= volume;
 
+                if (track->getInternalMute()) {
+                    vlf = 0.f;
+                    vrf = 0.f;
+                }
+
                 track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
@@ -5970,6 +5979,11 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
+            if (track->getInternalMute()) {
+                vrf = 0.f;
+                vlf = 0.f;
+            }
+
             track->setFinalVolume(vlf, vrf);
 
             // Delegate volume control to effect in track effect chain if needed
@@ -11353,14 +11367,15 @@
     // If there is only one active track and it is bit-perfect, enable tee buffer.
     float volumeLeft = 1.0f;
     float volumeRight = 1.0f;
-    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
-        const int trackId = mActiveTracks[0]->id();
+    if (sp<IAfTrack> bitPerfectTrack = getTrackToStreamBitPerfectly_l();
+        bitPerfectTrack != nullptr) {
+        const int trackId = bitPerfectTrack->id();
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
                     (void *)(uintptr_t)mNormalFrameCount);
-        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        bitPerfectTrack->getFinalVolume(&volumeLeft, &volumeRight);
         mIsBitPerfect = true;
     } else {
         mIsBitPerfect = false;
@@ -11385,4 +11400,39 @@
     mHasDataCopiedToSinkBuffer = mIsBitPerfect;
 }
 
+void BitPerfectThread::setTracksInternalMute(
+        std::map<audio_port_handle_t, bool>* tracksInternalMute) {
+    for (auto& track : mTracks) {
+        if (auto it = tracksInternalMute->find(track->portId()); it != tracksInternalMute->end()) {
+            track->setInternalMute(it->second);
+            tracksInternalMute->erase(it);
+        }
+    }
+}
+
+sp<IAfTrack> BitPerfectThread::getTrackToStreamBitPerfectly_l() {
+    if (com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        sp<IAfTrack> bitPerfectTrack = nullptr;
+        bool allOtherTracksMuted = true;
+        // Return the bit perfect track if all other tracks are muted
+        for (const auto& track : mActiveTracks) {
+            if (track->isBitPerfect()) {
+                bitPerfectTrack = track;
+            } else if (track->getFinalVolume() != 0.f) {
+                allOtherTracksMuted = false;
+                if (bitPerfectTrack != nullptr) {
+                    break;
+                }
+            }
+        }
+        return allOtherTracksMuted ? bitPerfectTrack : nullptr;
+    } else {
+        if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+            return mActiveTracks[0];
+        }
+    }
+    return nullptr;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 86e1894..7e9bef1 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1200,6 +1200,11 @@
                     }
                     return mHalStarted;
                 }
+
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* /* tracksInternalMute */)
+            override EXCLUDES_ThreadBase_Mutex {
+        // Do nothing. It is only used for bit perfect thread
+    }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -2449,12 +2454,17 @@
     BitPerfectThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut *output,
                      audio_io_handle_t id, bool systemReady);
 
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMuted)
+            final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
             REQUIRES(mutex(), ThreadBase_ThreadLoop);
     void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
 
 private:
+    sp<IAfTrack> getTrackToStreamBitPerfectly_l() REQUIRES(mutex());
+
     // These variables are only accessed on the threadLoop; hence need no mutex.
     bool mIsBitPerfect GUARDED_BY(ThreadBase_ThreadLoop) = false;
     float mVolumeLeft GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index c40f795..f8cfc12 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -913,7 +913,7 @@
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
                         " G db  L dB  R dB  VS dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect"
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
 }
@@ -999,7 +999,7 @@
                         "%08X %08X %6u "
                         "%2u %3x %2x "
                         "%5.2g %5.2g %5.2g %5.2g%c "
-                        "%08X %6zu%c %6zu %c %9u%c %7u %10s",
+                        "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
             mSessionId,
@@ -1029,7 +1029,8 @@
             mAudioTrackServerProxy->getUnderrunFrames(),
             nowInUnderrun,
             (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
-            isBitPerfect() ? "true" : "false"
+            isBitPerfect() ? "true" : "false",
+            getInternalMute() ? "true" : "false"
             );
 
     if (isServerLatencySupported()) {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8f17ffc..de6eed8 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <android/media/DeviceConnectedState.h>
+#include <android/media/TrackInternalMuteInfo.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -585,6 +586,9 @@
     // Get the attributes of the mix port when connecting to the given device port.
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 4643bd1..04d5362 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <system/audio.h>
+#include <set>
 #include <vector>
 
 #include <media/AudioContainers.h>
@@ -27,6 +28,10 @@
 
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
+static const std::set<audio_usage_t > gHighPriorityUseCases = {
+        AUDIO_USAGE_ALARM, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE
+};
+
 } // namespace android
 
 static const audio_format_t gDynamicFormat = AUDIO_FORMAT_DEFAULT;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 00958aa..914f3fe 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -29,6 +29,7 @@
 #include "ClientDescriptor.h"
 #include "DeviceDescriptor.h"
 #include "PolicyAudioPort.h"
+#include "PreferredMixerAttributesInfo.h"
 #include <vector>
 
 namespace android {
@@ -477,6 +478,16 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    bool isBitPerfect() const {
+        return (getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    /**
+     * Return true if there is any client with the same usage active on the given device.
+     * When the given device is null, return true if there is any client active.
+     */
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     virtual std::string info() const override;
 
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
@@ -489,7 +500,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
-    bool mUsePreferredMixerAttributes = false;
+    sp<PreferredMixerAttributesInfo> mPreferredAttrInfo = nullptr;
 };
 
 // Audio output driven by an input device directly.
@@ -616,6 +627,8 @@
      */
     bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
 
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     void dump(String8 *dst) const;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index fe90a1e..a596c43 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -165,6 +165,18 @@
         mIsInvalid = true;
     }
 
+    bool getInternalMute() const { return mInternalMute; }
+
+    /**
+     * Set the internal mute for a client. Return true if the existing value is different from
+     * the given value.
+     */
+    bool setInternalMute(bool muted) {
+        const bool result = (mInternalMute != muted);
+        mInternalMute = muted;
+        return result;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -178,6 +190,7 @@
      */
     uint32_t mActivityCount = 0;
     bool mIsInvalid = false;
+    bool mInternalMute = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
index 9472481..a493e3c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -44,6 +44,17 @@
 
     void increaseActiveClient() { mActiveClientsCount++; }
     void decreaseActiveClient() { mActiveClientsCount--; }
+    void resetActiveClient() { mActiveClientsCount = 0; }
+
+    bool isBitPerfect() const {
+        return (getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    bool configMatches(const audio_config_t& config) const {
+        return config.format == mMixerAttributes.config.format &&
+                config.channel_mask == mMixerAttributes.config.channel_mask &&
+                config.sample_rate == mMixerAttributes.config.sample_rate;
+    }
 
     void dump(String8 *dst);
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 6537a00..68b2e7b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -561,6 +561,7 @@
             audio_port_config config = {};
             devicePort->toAudioPortConfig(&config);
             config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+            config.gain.mode = gains[0]->getMode();
             config.gain.values[0] = gainValueMb;
             return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
         }
@@ -791,6 +792,16 @@
     mDevices = devices;
 }
 
+bool SwAudioOutputDescriptor::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    if (device != nullptr && !mDevices.contains(device)) {
+        return false;
+    }
+    return std::any_of(mActiveClients.begin(), mActiveClients.end(),
+                       [usage](sp<TrackClientDescriptor> client) {
+                           return client->attributes().usage == usage; });
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
@@ -1016,6 +1027,17 @@
     return clientsForStream;
 }
 
+bool SwAudioOutputCollection::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    for (size_t i = 0; i < this->size(); i++) {
+        const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
+        if (outputDesc->isUsageActiveOnDevice(usage, device)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string SwAudioOutputDescriptor::info() const {
     std::string result;
     result.append("[" );
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 2aee501..667c189 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -57,8 +57,8 @@
 void TrackClientDescriptor::dump(String8 *dst, int spaces) const
 {
     ClientDescriptor::dump(dst, spaces);
-    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d\n", spaces, "",
-            mStream, mFlags, mActivityCount);
+    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d; InternalMute: %s\n",
+            spaces, "", mStream, mFlags, mActivityCount, mInternalMute ? "Yes" : "No");
     dst->appendFormat("%*sDAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
     if (!mSecondaryOutputs.empty()) {
         dst->appendFormat("%*sDAP Secondary Outputs: ", spaces - 2, "");
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 7f4be79..3e693ec 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -338,7 +338,7 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices
@@ -917,15 +917,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
+        if (state != AUDIO_MODE_NORMAL && oldState == AUDIO_MODE_NORMAL
+                && desc->mPreferredAttrInfo != nullptr) {
+            // If the output is using preferred mixer attributes and the audio mode is not normal,
+            // the output need to reopen with default configuration.
+            outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+            continue;
+        }
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
-            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
-                // If the device is using preferred mixer attributes, the output need to reopen
-                // with default configuration when the new selected devices are different from
-                // current routing devices.
-                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
-                continue;
-            }
             setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
@@ -1337,23 +1337,40 @@
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
-                ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
-                        info->getActiveClientCount() == 0)) {
+                (!info->isBitPerfect() || info->getActiveClientCount() == 0)) {
                 info = nullptr;
             }
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                if (info != nullptr && info->getUid() == uid &&
+                    info->configMatches(*config) &&
+                    (mEngine->getPhoneState() != AUDIO_MODE_NORMAL ||
+                            std::any_of(gHighPriorityUseCases.begin(), gHighPriorityUseCases.end(),
+                                        [this, &outputDevices](audio_usage_t usage) {
+                                            return mOutputs.isUsageActiveOnDevice(
+                                                    usage, outputDevices[0]); }))) {
+                    // Bit-perfect request is not allowed when the phone mode is not normal or
+                    // there is any higher priority user case active.
+                    return INVALID_OPERATION;
+                }
+            }
         }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
                 flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
         // The client will be active if the client is currently preferred mixer owner and the
         // requested configuration matches the preferred mixer configuration.
         *isBitPerfect = (info != nullptr
-                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->isBitPerfect()
                 && info->getUid() == uid
                 && *output != AUDIO_IO_HANDLE_NONE
                 // When bit-perfect output is selected for the preferred mixer attributes owner,
                 // only need to consider the config matches.
                 && mOutputs.valueFor(*output)->isConfigurationMatched(
                         clientConfig, AUDIO_OUTPUT_FLAG_NONE));
+
+        if (*isBitPerfect) {
+            *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+        }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         AudioProfileVector profiles;
@@ -1708,6 +1725,24 @@
             // at this stage we should ignore the DIRECT flag as no direct output could be
             // found earlier
             *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                // If the preferred mixer attributes is null, do not select the bit-perfect output
+                // unless the bit-perfect output is the only output.
+                // The bit-perfect output can exist while the passed in preferred mixer attributes
+                // info is null when it is a high priority client. The high priority clients are
+                // ringtone or alarm, which is not a bit-perfect use case.
+                size_t i = 0;
+                while (i < outputs.size() && outputs.size() > 1) {
+                    auto desc = mOutputs.valueFor(outputs[i]);
+                    // The output descriptor must not be null here.
+                    if (desc->isBitPerfect()) {
+                        outputs.removeItemsAt(i);
+                    } else {
+                        i += 1;
+                    }
+                }
+            }
             output = selectOutput(
                     outputs, *flags, config->format, channelMask, config->sample_rate, session);
         }
@@ -2191,6 +2226,20 @@
     ALOGV("startOutput() output %d, stream %d, session %d",
           outputDesc->mIoHandle, client->stream(), client->session());
 
+    if (com::android::media::audioserver::fix_concurrent_playback_behavior_with_bit_perfect_client()
+            && gHighPriorityUseCases.count(client->attributes().usage) != 0
+            && outputDesc->isBitPerfect()) {
+        // Usually, APM selects bit-perfect output for high priority use cases only when
+        // bit-perfect output is the only output that can be routed to the selected device.
+        // However, here is no need to play high priority use cases such as ringtone and alarm
+        // on the bit-perfect path. Reopen the output and return DEAD_OBJECT so that the client
+        // can attach to new output.
+        ALOGD("%s: reopen bit-perfect output as high priority use case(%d) is starting",
+              __func__, client->stream());
+        reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        return DEAD_OBJECT;
+    }
+
     status_t status = outputDesc->start();
     if (status != NO_ERROR) {
         return status;
@@ -2209,7 +2258,6 @@
                 ALOGE("%s unable to open output with default config", __func__);
                 return status;
             }
-            desc->mUsePreferredMixerAttributes = true;
         }
         return status;
     }
@@ -2233,14 +2281,13 @@
                 if (desc == nullptr) {
                     return BAD_VALUE;
                 }
-                desc->mUsePreferredMixerAttributes = true;
+                desc->mPreferredAttrInfo = info;
                 // Intentionally return error to let the client side resending request for
                 // creating and starting.
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
-            if (info->getActiveClientCount() == 1 &&
-                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            if (info->getActiveClientCount() == 1 && info->isBitPerfect()) {
                 // If it is first bit-perfect client, reroute all clients that will be routed to
                 // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
                 PortHandleVector clientsToInvalidate;
@@ -2270,6 +2317,15 @@
         usleep(delayMs * 1000);
     }
 
+    if (status == NO_ERROR &&
+        outputDesc->mPreferredAttrInfo != nullptr &&
+        outputDesc->isBitPerfect() &&
+        com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // A new client is started on bit-perfect output, update all clients internal mute.
+        updateClientsInternalMute(outputDesc);
+    }
+
     return status;
 }
 
@@ -2374,6 +2430,11 @@
              followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_NOTIFICATION)) ||
              (beaconMuteLatency > 0));
         uint32_t waitMs = beaconMuteLatency;
+        const bool needToCloseBitPerfectOutput =
+                (com::android::media::audioserver::
+                        fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+                gHighPriorityUseCases.count(clientAttr.usage) != 0);
+        std::vector<sp<SwAudioOutputDescriptor>> outputsToReopen;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
@@ -2410,15 +2471,22 @@
                 // Note restoring AudioTracks onto this output needs to invoke
                 // a volume ramp if there is no mute.
                 requiresMuteCheck |= sharedDevice && isActive;
+
+                if (needToCloseBitPerfectOutput && desc->isBitPerfect()) {
+                    outputsToReopen.push_back(desc);
+                }
             }
         }
 
-        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+        if (outputDesc->mPreferredAttrInfo != nullptr && devices != outputDesc->devices()) {
             // If the output is open with preferred mixer attributes, but the routed device is
             // changed when calling this function, returning DEAD_OBJECT to indicate routing
             // changed.
             return DEAD_OBJECT;
         }
+        for (auto& outputToReopen : outputsToReopen) {
+            reopenOutput(outputToReopen, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
         const uint32_t muteWaitMs =
                 setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
                                  requiresMuteCheck);
@@ -2497,7 +2565,7 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
-                if (desc->mUsePreferredMixerAttributes && force) {
+                if (desc->mPreferredAttrInfo != nullptr && force) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices.
@@ -2545,12 +2613,21 @@
     if (outputDesc->devices().size() == 1) {
         sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
                 outputDesc->devices()[0]->getId(), client->strategy());
+        bool outputReopened = false;
         if (info != nullptr && info->getUid() == client->uid()) {
             info->decreaseActiveClient();
             if (info->getActiveClientCount() == 0) {
                 reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+                outputReopened = true;
             }
         }
+        if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+            !outputReopened && outputDesc->isBitPerfect()) {
+            // Only need to update the clients' internal mute when the output is bit-perfect and it
+            // is not reopened.
+            updateClientsInternalMute(outputDesc);
+        }
     }
     return status;
 }
@@ -2622,7 +2699,7 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
-                    if (desc->mUsePreferredMixerAttributes && force) {
+                    if (desc->mPreferredAttrInfo != nullptr && force) {
                         // If the device is using preferred mixer attributes, the output need to
                         // reopen with default configuration when the new selected devices are
                         // different from current routing devices.
@@ -4114,7 +4191,7 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             bool forceRouting = !newDevices.isEmpty();
-            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && newDevices != outputDesc->devices()) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -4715,7 +4792,7 @@
         const auto output = mOutputs.valueAt(i);
         if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
             if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
-                output->mUsePreferredMixerAttributes = true;
+                output->mPreferredAttrInfo = mixerAttrInfo;
             } else {
                 for (const auto &client: output->getActiveClients()) {
                     if (client->uid() == uid && client->strategy() == strategy) {
@@ -4737,7 +4814,7 @@
             ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        desc->mUsePreferredMixerAttributes = true;
+        desc->mPreferredAttrInfo = mixerAttrInfo;
     }
 
     return NO_ERROR;
@@ -4753,8 +4830,7 @@
     }
     if (activeBitPerfectPreferred) {
         for (auto [strategy, info] : it->second) {
-            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
-                && info->getActiveClientCount() != 0) {
+            if (info->isBitPerfect() && info->getActiveClientCount() != 0) {
                 return info;
             }
         }
@@ -5554,7 +5630,7 @@
             invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
-            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && outputDesc->devices() != newDevices) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -6925,8 +7001,7 @@
         closingOutput->stop();
     }
     closingOutput->close();
-    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
-            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+    if (closingOutput->isBitPerfect()) {
         for (const auto device : closingOutput->devices()) {
             device->setPreferredConfig(nullptr);
         }
@@ -6958,6 +7033,10 @@
             setMsdOutputPatches();
         }
     }
+
+    if (closingOutput->mPreferredAttrInfo != nullptr) {
+        closingOutput->mPreferredAttrInfo->resetActiveClient();
+    }
 }
 
 void AudioPolicyManager::closeInput(audio_io_handle_t input)
@@ -8815,4 +8894,60 @@
     mpClientInterface->invalidateTracks(clients);
 }
 
+void AudioPolicyManager::updateClientsInternalMute(
+        const sp<android::SwAudioOutputDescriptor> &desc) {
+    if (!desc->isBitPerfect() ||
+        !com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // This is only used for bit perfect output now.
+        return;
+    }
+    sp<TrackClientDescriptor> bitPerfectClient = nullptr;
+    bool bitPerfectClientInternalMute = false;
+    std::vector<media::TrackInternalMuteInfo> clientsInternalMute;
+    for (const sp<TrackClientDescriptor>& client : desc->getActiveClients()) {
+        if ((client->flags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            bitPerfectClient = client;
+            continue;
+        }
+        bool muted = false;
+        if (client->stream() == AUDIO_STREAM_SYSTEM) {
+            // System sound is muted.
+            muted = true;
+        } else {
+            bitPerfectClientInternalMute = true;
+        }
+        if (client->setInternalMute(muted)) {
+            auto result = legacy2aidl_audio_port_handle_t_int32_t(client->portId());
+            if (!result.ok()) {
+                ALOGE("%s, failed to convert port id(%d) to aidl", __func__, client->portId());
+                continue;
+            }
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = client->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        }
+    }
+    if (bitPerfectClient != nullptr &&
+        bitPerfectClient->setInternalMute(bitPerfectClientInternalMute)) {
+        auto result = legacy2aidl_audio_port_handle_t_int32_t(bitPerfectClient->portId());
+        if (result.ok()) {
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = bitPerfectClient->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        } else {
+            ALOGE("%s, failed to convert port id(%d) of bit perfect client to aidl",
+                  __func__, bitPerfectClient->portId());
+        }
+    }
+    if (!clientsInternalMute.empty()) {
+        if (status_t status = mpClientInterface->setTracksInternalMute(clientsInternalMute);
+                status != NO_ERROR) {
+            ALOGE("%s, failed to update tracks internal mute, err=%d", __func__, status);
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 2d015d3..83597d8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -1386,6 +1386,8 @@
 
         bool checkHapticCompatibilityOnSpatializerOutput(const audio_config_t* config,
                                                          audio_session_t sessionId) const;
+
+        void updateClientsInternalMute(const sp<SwAudioOutputDescriptor>& desc);
 };
 
 };
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 6de71a3..f70dc52 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -352,4 +352,13 @@
     return af->getAudioMixPort(devicePort, port);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setTracksInternalMute(tracksInternalMute);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5297e47..eb9d081 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -872,6 +872,9 @@
         status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                  struct audio_port_v7 *port) override;
 
+        status_t setTracksInternalMute(
+                const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index fc349ee..dc61115 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -40,6 +40,7 @@
     static_libs: [
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
         "libaudiopolicycomponents",
         "libflagtest",
         "libgmock",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 7ef0266..30894c1 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -221,6 +221,15 @@
         return NO_ERROR;
     }
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override {
+        for (const auto& trackInternalMute : tracksInternalMute) {
+            mTracksInternalMute[(audio_port_handle_t)trackInternalMute.portId] =
+                    trackInternalMute.muted;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
@@ -229,6 +238,11 @@
         mSupportedChannelMasks.insert(channelMask);
     }
 
+    bool getTrackInternalMute(audio_port_handle_t portId) {
+        auto it = mTracksInternalMute.find(portId);
+        return it == mTracksInternalMute.end() ? false : it->second;
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -241,6 +255,7 @@
     std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
     std::set<audio_format_t> mSupportedFormats;
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
+    std::map<audio_port_handle_t, bool> mTracksInternalMute;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index e55e935..c15adcb 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -110,6 +110,11 @@
                              struct audio_port_v7 *mixPort __unused) override {
         return INVALID_OPERATION;
     }
+
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 5dc039c..4f9e98e 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -29,6 +29,7 @@
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
 #include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
@@ -1155,129 +1156,6 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
-TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
-    const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
-    const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
-    const uint32_t bitPerfectSampleRate = 48000;
-    mClient->addSupportedFormat(bitPerfectFormat);
-    mClient->addSupportedChannelMask(bitPerfectChannelMask);
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                                                           "", "", AUDIO_FORMAT_DEFAULT));
-    auto devices = mManager->getAvailableOutputDevices();
-    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
-    for (auto device : devices) {
-        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
-            usbPortId = device->getId();
-            break;
-        }
-    }
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
-
-    const uid_t uid = 1234;
-    const uid_t anotherUid = 5678;
-    const audio_attributes_t mediaAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
-            .usage = AUDIO_USAGE_MEDIA,
-    };
-
-    std::vector<audio_mixer_attributes_t> mixerAttributes;
-    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
-    EXPECT_GT(mixerAttributes.size(), 0);
-    size_t bitPerfectIndex = 0;
-    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
-        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
-            break;
-        }
-    }
-    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
-    EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
-    EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
-    EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
-    EXPECT_EQ(NO_ERROR,
-              mManager->setPreferredMixerAttributes(
-                      &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
-
-    audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    bool isBitPerfect;
-
-    // When there is no active bit-perfect playback, the output selection will follow default
-    // routing strategy.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
-    const auto outputDesc = mManager->getOutputs().valueFor(output);
-    EXPECT_NE(nullptr, outputDesc);
-    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // Start bit-perfect playback
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-            mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-    status_t status = mManager->startOutput(bitPerfectPortId);
-    if (status == DEAD_OBJECT) {
-        getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-                bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-                mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-        status = mManager->startOutput(bitPerfectPortId);
-    }
-    EXPECT_EQ(NO_ERROR, status);
-    EXPECT_TRUE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
-    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
-    EXPECT_NE(nullptr, bitPerfectOutputDesc);
-    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
-              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // If the playback is from preferred mixer attributes owner but the request doesn't match
-    // preferred mixer attributes, it will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    const audio_attributes_t dtmfAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
-            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-    };
-    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
-    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    portId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
-
-    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
-    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    EXPECT_EQ(NO_ERROR,
-              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                           "", "", AUDIO_FORMAT_LDAC));
-}
-
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
     const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
     mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -3612,3 +3490,314 @@
     // unregister effect should succeed since effect shall have been restore on the client session
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
 }
+
+class AudioPolicyManagerTestBitPerfectBase : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    void TearDown() override;
+
+    void startBitPerfectOutput();
+    void reset();
+    void getBitPerfectOutput(status_t expected);
+
+    const audio_format_t mBitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t mBitPerfectChannelMask = AUDIO_CHANNEL_OUT_STEREO;
+    const uint32_t mBitPerfectSampleRate = 48000;
+    const uid_t mUid = 1234;
+    audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+
+    audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+
+    static constexpr audio_attributes_t sMediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+};
+
+void AudioPolicyManagerTestBitPerfectBase::SetUp() {
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUp());
+
+    mClient->addSupportedFormat(mBitPerfectFormat);
+    mClient->addSupportedChannelMask(mBitPerfectChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            mUsbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, mUsbPortId);
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(mUsbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    size_t bitPerfectIndex = 0;
+    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+            break;
+        }
+    }
+    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+    EXPECT_EQ(mBitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+    EXPECT_EQ(mBitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+    EXPECT_EQ(mBitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &sMediaAttr, mUsbPortId, mUid, &mixerAttributes[bitPerfectIndex]));
+}
+
+void AudioPolicyManagerTestBitPerfectBase::TearDown() {
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&sMediaAttr, mUsbPortId, mUid));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::TearDown());
+}
+
+void AudioPolicyManagerTestBitPerfectBase::startBitPerfectOutput() {
+    reset();
+    bool isBitPerfect;
+
+    getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                     &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    status_t status = mManager->startOutput(mBitPerfectPortId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                         mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                         &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+        status = mManager->startOutput(mBitPerfectPortId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_TRUE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, mBitPerfectOutput);
+    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(mBitPerfectOutput);
+    EXPECT_NE(nullptr, bitPerfectOutputDesc);
+    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+};
+
+void AudioPolicyManagerTestBitPerfectBase::reset() {
+    mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+}
+
+void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
+    reset();
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    AttributionSourceState attributionSource = createAttributionSourceState(mUid);
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mBitPerfectSampleRate;
+    config.channel_mask = mBitPerfectChannelMask;
+    config.format = mBitPerfectFormat;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+    AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
+    bool isBitPerfect;
+    EXPECT_EQ(expected,
+              mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
+                                         &stream, attributionSource, &config, &flags,
+                                         &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+                                         &isSpatialized, &isBitPerfect));
+}
+
+class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
+};
+
+TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
+    const uid_t anotherUid = 5678;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+
+    // When there is no active bit-perfect playback, the output selection will follow default
+    // routing strategy.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    const auto outputDesc = mManager->getOutputs().valueFor(output);
+    EXPECT_NE(nullptr, outputDesc);
+    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // Start bit-perfect playback
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // If the playback is from preferred mixer attributes owner but the request doesn't match
+    // preferred mixer attributes, it will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-pefect playback is active, dtmf will also be routed to bit-perfect output.
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, dtmfOutput);
+
+    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestBitPerfect,
+        InternalMuteWhenBitPerfectCLientIsActive,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver,
+                             fix_concurrent_playback_behavior_with_bit_perfect_client))
+) {
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // When bit-perfect playback is active, the system sound will be routed to bit-perfect output.
+    // The system sound will be muted internally in this case. The bit-perfect client will be
+    // played normally.
+    const uint32_t anotherSampleRate = 44100;
+    audio_port_handle_t systemSoundPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t systemSoundOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t systemSoundAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
+                     &systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, systemSoundOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(systemSoundPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    // When bit-perfect playback is active, the notification will be routed to bit-perfect output.
+    // The notification sound will be played normally while the bit-perfect client will be muted
+    // internally.
+    audio_port_handle_t notificationPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t notificationOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t notificationAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_NOTIFICATION,
+    };
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &notificationOutput,
+                     &notificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
+                     &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, notificationOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(notificationPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(mBitPerfectPortId));
+}
+
+class AudioPolicyManagerTestBitPerfectPhoneMode : public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_mode_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectPhoneMode, RejectBitPerfectWhenPhoneModeIsNotNormal) {
+    if (!com::android::media::audioserver::
+            fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_mode_t mode = GetParam();
+    mManager->setPhoneState(mode);
+    // When the phone mode is not normal, the bit-perfect output will be reopned
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When the phone mode is not normal, the bit-perfect output will be closed.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+
+    mManager->setPhoneState(AUDIO_MODE_NORMAL);
+}
+
+INSTANTIATE_TEST_CASE_P(
+        PhoneMode,
+        AudioPolicyManagerTestBitPerfectPhoneMode,
+        testing::Values(AUDIO_MODE_IN_CALL,
+                        AUDIO_MODE_RINGTONE,
+                        AUDIO_MODE_IN_COMMUNICATION,
+                        AUDIO_MODE_CALL_SCREEN)
+);
+
+class AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive :
+        public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_usage_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+       RejectBitPerfectWhenHigherPriorityUseCaseIsActive) {
+    if (!com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_attributes_t attr = {
+            .usage = GetParam(),
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(
+            getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                   48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
+    EXPECT_NE(mBitPerfectOutput, output);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
+    // When a high priority use case is active, the bit-perfect output will be closed.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When any higher priority use case is active, the bit-perfect request will be rejected.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        HigherPriorityUseCases,
+        AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+        testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                        AUDIO_USAGE_ALARM)
+);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 1bd0b85..8b41d00 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -517,6 +517,15 @@
     return false;
 }
 
+bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
+        return true;
+    }
+
+    return false;
+}
+
 static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
     return containerSet.find(value) != containerSet.end();
 }
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index f797f9c..3f8f6a2 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -46,6 +46,7 @@
     ~DepthCompositeStream() override;
 
     static bool isDepthCompositeStream(const sp<Surface> &surface);
+    static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 68e9ad4..225d7f5 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -94,6 +94,11 @@
     mMainImageSurface.clear();
 }
 
+bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
+}
+
 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
     ANativeWindow *anw = surface.get();
     status_t err;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index b539cdd..9cfd78b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -42,6 +42,7 @@
     ~HeicCompositeStream() override;
 
     static bool isHeicCompositeStream(const sp<Surface> &surface);
+    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index a1b9383..dadefcc 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -520,6 +520,15 @@
     return false;
 }
 
+bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
+            (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
@@ -832,8 +841,8 @@
     (*compositeOutput)[0].colorSpace =
         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
 
-    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
-                streamInfo.dynamicRangeProfile,
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+                staticInfo, dynamicRange,
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
         compositeOutput->push_back({});
         (*compositeOutput)[1].width = streamInfo.width;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 016d57c..6669739 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -43,6 +43,7 @@
     ~JpegRCompositeStream() override;
 
     static bool isJpegRCompositeStream(const sp<Surface> &surface);
+    static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index a7a2b5e..40ca276 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,6 +21,7 @@
 #include "../api2/HeicCompositeStream.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "api2/JpegRCompositeStream.h"
+#include "binder/Status.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -679,6 +680,67 @@
     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
+binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+        const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
+        size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
+        const std::string& logicalCameraId,
+        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
+        bool *earlyExit /*out*/) {
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
+    bool isHeicCompositeStream =
+            camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
+    bool isJpegRCompositeStream =
+            camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
+            !isCompositeJpegRDisabled;
+    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+        // We need to take in to account that composite streams can have
+        // additional internal camera streams.
+        std::vector<OutputStreamInfo> compositeStreams;
+        status_t ret;
+        if (isDepthCompositeStream) {
+          // TODO: Take care of composite streams.
+            ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                    deviceInfo, &compositeStreams);
+        } else if (isHeicCompositeStream) {
+            ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        } else {
+            ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        }
+
+        if (ret != OK) {
+            std::string msg = fmt::sprintf(
+                    "Camera %s: Failed adding composite streams: %s (%d)",
+                    logicalCameraId.c_str(), strerror(-ret), ret);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+
+        if (compositeStreams.size() == 0) {
+            // No internal streams means composite stream not
+            // supported.
+            *earlyExit = true;
+            return binder::Status::ok();
+        } else if (compositeStreams.size() > 1) {
+            size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
+            streamConfiguration.streams.resize(streamCount);
+        }
+
+        for (const auto& compositeStream : compositeStreams) {
+            mapStreamInfo(compositeStream, rotation,
+                    physicalId, groupId,
+                    &streamConfiguration.streams[(*streamIdx)++]);
+        }
+    } else {
+        mapStreamInfo(streamInfo, rotation,
+                physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
+    }
+
+    return binder::Status::ok();
+}
+
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
@@ -831,8 +893,13 @@
                                 "Deferred surface sensor pixel modes not valid");
             }
             streamInfo.streamUseCase = streamUseCase;
-            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
-                    &streamConfiguration.streams[streamIdx++]);
+            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
+                    logicalCameraId, streamConfiguration, earlyExit);
+            if (*earlyExit || !status.isOk()) {
+                return status;
+            }
+
             isStreamInfoValid = true;
 
             if (numBufferProducers == 0) {
@@ -851,57 +918,11 @@
                 return res;
 
             if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                bool isJpegRCompositeStream =
-                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
-                        !isCompositeJpegRDisabled;
-                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                      // TODO: Take care of composite streams.
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else if (isHeicCompositeStream) {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-
-                    if (ret != OK) {
-                        std::string msg = fmt::sprintf(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.c_str(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *earlyExit = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, groupId,
-                                &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
+                auto status  = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                        static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
+                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
+                if (*earlyExit || !status.isOk()) {
+                    return status;
                 }
                 isStreamInfoValid = true;
             }
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
 
 #include <string>
 #include <unordered_map>
+#include <vector>
 
 namespace android::mediametrics::types {
 
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 5bac062..3f04f69 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -47,7 +47,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "girishshetty@google.com",
         ],
         componentid: 155276,
         hotlists: [