Merge "Audio Policy: add flag for input stream sharing logic fix" into main
diff --git a/aidl/com/android/media/permission/UidPackageState.aidl b/aidl/com/android/media/permission/UidPackageState.aidl
index 41b8c65..747a7ef 100644
--- a/aidl/com/android/media/permission/UidPackageState.aidl
+++ b/aidl/com/android/media/permission/UidPackageState.aidl
@@ -20,6 +20,7 @@
  * Entity representing the package names associated with a particular uid/app-id
  * {@hide}
  */
+@JavaDerive(equals = true, toString = true)
 parcelable UidPackageState {
     int uid;
     @utf8InCpp List<String> packageNames;
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index cba26da..645a3d4 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2429,29 +2429,33 @@
      *
      * <p>Flash strength level to use in capture mode i.e. when the applications control
      * flash with either SINGLE or TORCH mode.</p>
-     * <p>Use android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel to check whether the device supports
+     * <p>Use ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL to check whether the device supports
      * flash strength control or not.
      * If the values of android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel are greater than 1,
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1,
      * then the device supports manual flash strength control.</p>
      * <p>If the ACAMERA_FLASH_MODE <code>==</code> TORCH the value must be &gt;= 1
-     * and &lt;= android.flash.info.torchStrengthMaxLevel.
+     * and &lt;= ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * If the application doesn't set the key and
-     * android.flash.info.torchStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL in
-     * android.flash.info.torchStrengthDefaultLevel.
+     * ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL.
      * If the ACAMERA_FLASH_MODE <code>==</code> SINGLE, then the value must be &gt;= 1
-     * and &lt;= android.flash.info.singleStrengthMaxLevel.
+     * and &lt;= ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * If the application does not set this key and
-     * android.flash.info.singleStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL
-     * in android.flash.info.singleStrengthDefaultLevel.
+     * in ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL.
      * If ACAMERA_CONTROL_AE_MODE is set to any of ON_AUTO_FLASH, ON_ALWAYS_FLASH,
      * ON_AUTO_FLASH_REDEYE, ON_EXTERNAL_FLASH values, then the strengthLevel will be ignored.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_STRENGTH_LEVEL =                              // int32
             ACAMERA_FLASH_START + 6,
@@ -2487,9 +2491,11 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to <code>android.flash.info.singleStrengthMaxLevel</code>.
+     * or equal to ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * Note for devices that do not support the manual flash strength control
      * feature, this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL =               // int32
             ACAMERA_FLASH_START + 8,
@@ -2509,12 +2515,14 @@
      * otherwise the value will be equal to 1.</p>
      * <p>Note that this level is just a number of supported levels(the granularity of control).
      * There is no actual physical power units tied to this level.
-     * There is no relation between android.flash.info.torchStrengthMaxLevel and
-     * android.flash.info.singleStrengthMaxLevel i.e. the ratio of
-     * android.flash.info.torchStrengthMaxLevel:android.flash.info.singleStrengthMaxLevel
+     * There is no relation between ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL i.e. the ratio of
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL:ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      * is not guaranteed to be the ratio of actual brightness.</p>
      *
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL =                    // int32
             ACAMERA_FLASH_START + 9,
@@ -2529,9 +2537,11 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to android.flash.info.torchStrengthMaxLevel.
+     * or equal to ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * Note for the devices that do not support the manual flash strength control feature,
      * this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL =                // int32
             ACAMERA_FLASH_START + 10,
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index 0812096..b0f59f1 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -89,6 +89,7 @@
     bool initCamera();
     void invokeCamera();
     void invokeSetParameters();
+    native_handle_t* createNativeHandle();
     sp<Camera> mCamera = nullptr;
     FuzzedDataProvider* mFDP = nullptr;
 
@@ -103,6 +104,18 @@
     };
 };
 
+native_handle_t* CameraFuzzer::createNativeHandle() {
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kMinElements, kMaxElements);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    for (int32_t i = 0; i < numFds; ++i) {
+        std::string filename = mFDP->ConsumeRandomLengthString(kMaxBytes);
+        int32_t fd = open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC);
+        handle->data[i] = fd;
+    }
+    return handle;
+}
+
 bool CameraFuzzer::initCamera() {
     ProcessState::self()->startThreadPool();
     sp<IServiceManager> sm = defaultServiceManager();
@@ -291,15 +304,11 @@
                 },
                 [&]() {
                     int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
                 },
                 [&]() {
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->releaseRecordingFrameHandle(handle);
                 },
                 [&]() { mCamera->releaseRecordingFrame(iMem); },
@@ -308,9 +317,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     mCamera->releaseRecordingFrameHandleBatch(handles);
@@ -320,9 +327,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     std::vector<nsecs_t> timestamps;
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 9f64a28..d662585 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -44,6 +44,16 @@
 }
 
 flag {
+  name: "input_surface_throttle"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for input surface throttle"
+  bug: "342269852"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "large_audio_frame_finish"
   namespace: "codec_fwk"
   description: "Implementation flag for large audio frame finishing tasks"
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index c160109..f9fb4c7 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -11,3 +11,10 @@
     description: "Enable the AAudio sample rate converter."
     bug: "219533889"
 }
+
+flag {
+    name: "start_stop_client_from_command_thread"
+    namespace: "media_audio"
+    description: "Start or stop client from command thread."
+    bug: "341627085"
+}
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 7b63e75..780660e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -155,7 +155,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -186,7 +186,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -236,7 +236,7 @@
               inSize, (int)work->input.ordinal.timestamp.peeku(),
               (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
     if (mIsFirstFrame && inSize) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -405,7 +405,7 @@
     C2WriteView wView = mOutputBlock->map().get();
     uint8_t* outData = wView.data();
     const uint32_t sampleRate = mIntf->getSampleRate();
-    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+    const int64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
     ALOGV("writing %zu bytes of encoded data on output", bytes);
     // increment mProcessedSamples to maintain audio synchronization during
     // play back
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index 1f3be3c..ed9c298 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -72,7 +72,7 @@
     bool mSignalledOutputEos;
     uint32_t mBlockSize;
     bool mIsFirstFrame;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // should the data received by the callback be written to the output port
     bool mEncoderWriteData;
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index a58044c..e644ee3 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,6 +14,10 @@
         "libcodec2_soft_sanitize_signed-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=0",
+    ],
+
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 491098d..562dcf5 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMpeg2Dec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -433,7 +436,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
-    s_fill_mem_ip.u4_keep_threads_active = 1;
+    s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -475,7 +478,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
-    s_init_ip.u4_keep_threads_active = 1;
+    s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 7d5740b..c205dcd 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -26,9 +26,6 @@
         "//apex_available:platform",
         "com.android.media.swcodec",
     ],
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: ["C2.cpp"],
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index 93c9d8b..0f23205 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -105,6 +105,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2AidlNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2AidlNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index 365a41d..9dd3504 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -68,13 +68,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
index 6f53e0f..585072d 100644
--- a/media/codec2/sfplugin/C2NodeImpl.cpp
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -25,6 +25,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+#include <android_media_codec.h>
 #include <android/fdsan.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <ui/Fence.h>
@@ -373,7 +374,10 @@
     }
     work->worklets.clear();
     work->worklets.emplace_back(new C2Worklet);
-    mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        buffers->mIdsInUse.emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    }
     mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
 
     return OK;
@@ -405,29 +409,74 @@
 }
 
 void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
-    if (mAidlHal) {
-        if (!mAidlBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    } else {
-        if (!mBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    }
-
-    int32_t bufferId = 0;
-    {
-        decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
-        auto it = bufferIds->find(index.peeku());
-        if (it == bufferIds->end()) {
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        auto it = buffers->mIdsInUse.find(index.peeku());
+        if (it == buffers->mIdsInUse.end()) {
             ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
             return;
         }
-        bufferId = it->second;
-        (void)bufferIds->erase(it);
+        int32_t bufferId = it->second;
+        (void)buffers->mIdsInUse.erase(it);
+        buffers->mAvailableIds.push_back(bufferId);
+    } else {
+        if (!hasBufferSource()) {
+            return;
+        }
+        int32_t bufferId = 0;
+        {
+            Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+            auto it = buffers->mIdsInUse.find(index.peeku());
+            if (it == buffers->mIdsInUse.end()) {
+                ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+                return;
+            }
+            bufferId = it->second;
+            (void)buffers->mIdsInUse.erase(it);
+        }
+        notifyInputBufferEmptied(bufferId);
     }
+}
+
+void C2NodeImpl::onInputBufferEmptied() {
+    if (!android::media::codec::provider_->input_surface_throttle()) {
+        ALOGE("onInputBufferEmptied should not be called "
+              "when input_surface_throttle is false");
+        return;
+    }
+    if (!hasBufferSource()) {
+        return;
+    }
+    int32_t bufferId = 0;
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        if (buffers->mAvailableIds.empty()) {
+            ALOGV("The codec is ready to take more input buffers "
+                    "but no input buffers are ready yet.");
+            return;
+        }
+        bufferId = buffers->mAvailableIds.front();
+        buffers->mAvailableIds.pop_front();
+    }
+    notifyInputBufferEmptied(bufferId);
+}
+
+bool C2NodeImpl::hasBufferSource() {
+    if (mAidlHal) {
+        if (!mAidlBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    } else {
+        if (!mBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    }
+    return true;
+}
+
+void C2NodeImpl::notifyInputBufferEmptied(int32_t bufferId) {
     if (mAidlHal) {
         ::ndk::ScopedFileDescriptor nullFence;
         (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
index e060fd8..cc826b4 100644
--- a/media/codec2/sfplugin/C2NodeImpl.h
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -73,13 +73,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
@@ -118,12 +124,24 @@
     c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
     c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
 
-    Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+    // Tracks the status of buffers
+    struct BuffersTracker {
+        BuffersTracker() = default;
+
+        // Keeps track of buffers that are used by the component. Maps timestamp -> ID
+        std::map<uint64_t, uint32_t> mIdsInUse;
+        // Keeps track of the buffer IDs that are available after being released from the component.
+        std::list<uint32_t> mAvailableIds;
+    };
+    Mutexed<BuffersTracker> mBuffersTracker;
 
     class QueueThread;
     sp<QueueThread> mQueueThread;
 
     bool mAidlHal;
+
+    bool hasBufferSource();
+    void notifyInputBufferEmptied(int32_t bufferId);
 };
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ce02c88..98e25e2 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -291,6 +291,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2OMXNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2OMXNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index d077202..5549b88 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -86,13 +86,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index a897fa0..ca0aabb 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -444,6 +444,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
@@ -663,6 +667,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index c7ab82f..f0a4180 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1069,6 +1069,10 @@
             return;
         }
     }
+    if (android::media::codec::provider_->input_surface_throttle()
+            && mInputSurface != nullptr) {
+        mInputSurface->onInputBufferEmptied();
+    }
     size_t numActiveSlots = 0;
     while (!mPipelineWatcher.lock()->pipelineFull()) {
         sp<MediaCodecBuffer> inBuffer;
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 4bf6cd0..c158c5b 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -102,6 +102,7 @@
     }
 
     /**
+     * Notify that the input buffer reference is no longer needed.
      * Clean up C2Work related references if necessary. No-op by default.
      *
      * \param index index of input work.
@@ -109,6 +110,12 @@
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
     /**
+     * Signal one input buffer as emptied.
+     * No-op by default.
+     */
+    virtual void onInputBufferEmptied() {}
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     virtual android_dataspace getDataspace() { return mDataSpace; }
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index fc8ad77..ba231c1 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -48,6 +48,7 @@
         "aaudio-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
+        "audio-permission-aidl-cpp",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 04a8a45..5785537 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -315,6 +315,7 @@
         "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
         "aidl/android/media/SurroundSoundConfig.aidl",
+        "aidl/android/media/TrackInternalMuteInfo.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
     ],
     defaults: [
@@ -457,6 +458,7 @@
         "latest_android_media_audio_common_types_import_interface",
     ],
     imports: [
+        "audio-permission-aidl",
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index aa51652..3f4fcfd 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1289,6 +1289,21 @@
     (void) status;
 }
 
+status_t AudioSystem::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                     const char *address,
+                                                     bool enabled,
+                                                     audio_stream_type_t streamToDriveAbs) {
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(deviceType, address));
+    AudioStreamType streamToDriveAbsAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(streamToDriveAbs));
+    return statusTFromBinderStatus(
+            aps->setDeviceAbsoluteVolumeEnabled(deviceAidl, enabled, streamToDriveAbsAidl));
+}
+
 status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
                                        int indexMin,
                                        int indexMax) {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index aa5c840..cf3b43a 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -918,6 +918,11 @@
     return OK;
 }
 
+status_t AudioFlingerClientAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMuted) {
+    return statusTFromBinderStatus(mDelegate->setTracksInternalMute(tracksInternalMuted));
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1477,4 +1482,9 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    return Status::fromStatusT(mDelegate->setTracksInternalMute(tracksInternalMute));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 31d3af5..e8fcf77 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -41,6 +41,7 @@
 import android.media.ISoundDoseCallback;
 import android.media.MicrophoneInfoFw;
 import android.media.RenderPosition;
+import android.media.TrackInternalMuteInfo;
 import android.media.TrackSecondaryOutputInfo;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
@@ -293,6 +294,11 @@
      */
     AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
 
+    /**
+     * Set internal mute for a list of tracks.
+     */
+    void setTracksInternalMute(in TrackInternalMuteInfo[] tracksInternalMute);
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 633493c..ac42ea9 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -58,6 +58,8 @@
 import android.media.audio.common.AudioUuid;
 import android.media.audio.common.Int;
 
+import com.android.media.permission.INativePermissionController;
+
 /**
  * IAudioPolicyService interface (see AudioPolicyInterface for method descriptions).
  *
@@ -114,6 +116,10 @@
 
     void releaseInput(int /* audio_port_handle_t */ portId);
 
+    oneway void setDeviceAbsoluteVolumeEnabled(in AudioDevice device,
+                                               boolean enabled,
+                                               AudioStreamType streamToDriveAbs);
+
     void initStreamVolume(AudioStreamType stream,
                           int indexMin,
                           int indexMax);
@@ -471,6 +477,11 @@
                                        int /* uid_t */ uid);
 
 
+    /**
+     * Get the native permission controller for audioserver, to push package and permission info
+     * required to control audio access.
+     */
+    INativePermissionController getPermissionController();
     // When adding a new method, please review and update
     // AudioPolicyService.cpp AudioPolicyService::onTransact()
     // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
new file mode 100644
index 0000000..05b1fa4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+parcelable TrackInternalMuteInfo {
+    /* Interpreted as audio_port_handle_t. */
+    int portId;
+    boolean muted;
+}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index a0c535d..11955c9 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -22,6 +22,7 @@
     name: "libaudioclient_aidl_fuzzer_defaults",
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
+        "audiopermissioncontroller",
         "libaudiomockhal",
         "libcgrouprc",
         "libcgrouprc_format",
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 5c9a7c6..9cfd540 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -374,9 +374,13 @@
     static status_t startInput(audio_port_handle_t portId);
     static status_t stopInput(audio_port_handle_t portId);
     static void releaseInput(audio_port_handle_t portId);
+    static status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                   const char *address,
+                                                   bool enabled,
+                                                   audio_stream_type_t streamToDriveAbs);
     static status_t initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax);
+                                     int indexMin,
+                                     int indexMax);
     static status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                          int index,
                                          audio_devices_t device);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 5a1e037..860a0bc 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -60,6 +60,7 @@
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
 #include "android/media/OpenOutputResponse.h"
+#include "android/media/TrackInternalMuteInfo.h"
 #include "android/media/TrackSecondaryOutputInfo.h"
 
 namespace android {
@@ -388,6 +389,9 @@
 
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) const = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
 /**
@@ -504,6 +508,8 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
     status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                              struct audio_port_v7 *mixPort) const override;
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -606,6 +612,7 @@
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
             GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
+            SET_TRACKS_INTERNAL_MUTE = media::BnAudioFlingerService::TRANSACTION_setTracksInternalMute,
         };
 
     protected:
@@ -742,6 +749,8 @@
     Status getAudioMixPort(const media::AudioPortFw& devicePort,
                            const media::AudioPortFw& mixPort,
                            media::AudioPortFw* _aidl_return) override;
+    Status setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 055da5b..9c3ad44 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -122,6 +122,7 @@
     ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 032533c..1990858 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -387,7 +387,7 @@
         return runCb([](CbRef cb) { cb->onWriteReady(); });
     }
     ndk::ScopedAStatus onError() override {
-        return runCb([](CbRef cb) { cb->onError(); });
+        return runCb([](CbRef cb) { cb->onError(true /*isHardError*/); });
     }
     ndk::ScopedAStatus onDrainReady() override {
         return runCb([](CbRef cb) { cb->onDrainReady(); });
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 453f9e2..cbade70 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -136,8 +136,8 @@
     // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
     // the source arguments, where only the audio configuration and device specifications
     // are relevant.
-    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
-            __func__, ::android::internal::ToString(sources).c_str(),
+    ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
+            __func__, *patchId, ::android::internal::ToString(sources).c_str(),
             ::android::internal::ToString(sinks).c_str());
     auto fillPortConfigs = [&](
             const std::vector<AudioPortConfig>& configs,
@@ -209,11 +209,24 @@
         // that there can only be one patch for an I/O thread.
         PatchMatch match = sourceIsDevice && sinkIsDevice ?
                 MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+        auto requestedPatch = patch;
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
                                                  &patch, &created));
         // No cleanup of the patch is needed, it is managed by the framework.
         *patchId = patch.id;
         if (!created) {
+            requestedPatch.id = patch.id;
+            if (patch != requestedPatch) {
+                ALOGI("%s: Updating transient patch. Current: %s, new: %s",
+                        __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+                // Since matching may be done by mix port only, update the patch if the device port
+                // config has changed.
+                patch = requestedPatch;
+                RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mModule->setAudioPatch(patch, &patch)));
+                existingPatchIt = mPatches.find(patch.id);
+                existingPatchIt->second = patch;
+            }
             // The framework might have "created" a patch which already existed due to
             // stream creation. Need to release the ownership from the stream.
             for (auto& s : mStreams) {
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 84c1a8a..2fb4756 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -257,20 +257,71 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    const auto state = getState();
-    StreamDescriptor::Reply reply;
-    if (state == StreamDescriptor::State::STANDBY) {
-        RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
-        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
     }
-
-    return INVALID_OPERATION;
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    switch (reply.state) {
+        case StreamDescriptor::State::STANDBY:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::IDLE:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::ACTIVE:
+            return OK;
+        case StreamDescriptor::State::DRAINING:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            return OK;
+        default:
+            ALOGE("%s: not supported from %s stream state %s",
+                    __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+            return INVALID_OPERATION;
+    }
 }
 
 status_t StreamHalAidl::stop() {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return standby();
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
+        return drain(false /*earlyNotify*/, nullptr);
+    } else if (state == StreamDescriptor::State::DRAINING) {
+        RETURN_STATUS_IF_ERROR(pause());
+        return flush();
+    } else if (state == StreamDescriptor::State::PAUSED) {
+        return flush();
+    } else if (state != StreamDescriptor::State::IDLE &&
+            state != StreamDescriptor::State::STANDBY) {
+        ALOGE("%s: not supported from %s stream state %s",
+                __func__, mIsInput ? "input" : "output", toString(state).c_str());
+        return INVALID_OPERATION;
+    }
+    return OK;
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -905,10 +956,10 @@
     }
 }
 
-void StreamOutHalAidl::onError() {
+void StreamOutHalAidl::onError(bool isHardError) {
     onAsyncError();
     if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
-        clientCb->onError();
+        clientCb->onError(isHardError);
     }
 }
 
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index fff7a92..9cb2cff 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -95,7 +95,8 @@
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
     size_t getBufferDurationMs(int32_t sampleRate) const {
-        return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+        auto bufferSize = mIsMmapped ? getMmapBurstSize() : mBufferSizeFrames;
+        return sampleRate != 0 ? bufferSize * MILLIS_PER_SECOND / sampleRate : 0;
     }
     CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
@@ -104,7 +105,7 @@
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
     const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
-
+    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
             const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -385,7 +386,7 @@
     // StreamOutHalInterfaceCallback
     void onWriteReady() override;
     void onDrainReady() override;
-    void onError() override;
+    void onError(bool isHardError) override;
 
   private:
     friend class sp<StreamOutHalAidl>;
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 9e22700..a931fdd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -997,7 +997,7 @@
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onError");
-    callback->onError();
+    callback->onError(false /*isHardError*/);
 }
 
 void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 585a895..4bd7e3d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -107,7 +107,7 @@
   public:
     virtual void onWriteReady() {}
     virtual void onDrainReady() {}
-    virtual void onError() {}
+    virtual void onError(bool /*isHardError*/) {}
 
   protected:
     StreamOutHalInterfaceCallback() = default;
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 5106874..0bd6fb0 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -39,6 +39,7 @@
 using ::aidl::android::hardware::audio::core::VendorParameter;
 using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDevice;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioDeviceType;
 using ::aidl::android::media::audio::common::AudioFormatDescription;
@@ -160,6 +161,24 @@
             createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
     Configuration c;
 
+    AudioPort micInDevice =
+            createPort(c.nextPortId++, "Built-In Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE,
+                                           1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInDevice);
+
+    AudioPort micInBackDevice =
+            createPort(c.nextPortId++, "Built-In Back Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInBackDevice);
+
+    AudioPort primaryInMix =
+            createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
+    primaryInMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryInMix);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -172,6 +191,7 @@
     btOutMix.profiles = standardPcmAudioProfiles;
     c.ports.push_back(btOutMix);
 
+    c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
     c.routes.push_back(createRoute({btOutMix}, btOutDevice));
 
     return c;
@@ -184,6 +204,11 @@
     explicit ModuleMock(const Configuration& config) : mConfig(config) {}
     bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
     ScreenRotation getScreenRotation() const { return mScreenRotation; }
+    std::vector<AudioPatch> getPatches() {
+        std::vector<AudioPatch> result;
+        getAudioPatches(&result);
+        return result;
+    }
 
   private:
     ndk::ScopedAStatus setModuleDebug(
@@ -1141,3 +1166,51 @@
     EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
     EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
 }
+
+TEST_F(Hal2AidlMapperTest, ChangeTransientPatchDevice) {
+    std::mutex mutex;  // Only needed for cleanups.
+    auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+    Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+    AudioConfig config;
+    config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    config.base.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    config.base.sampleRate = 48000;
+    AudioDevice defaultDevice;
+    defaultDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioPortConfig mixPortConfig;
+    AudioPatch transientPatch;
+    ASSERT_EQ(OK, mMapper->prepareToOpenStream(43 /*ioHandle*/, defaultDevice,
+                                               AudioIoFlags::make<AudioIoFlags::input>(0),
+                                               AudioSource::DEFAULT, &cleanups, &config,
+                                               &mixPortConfig, &transientPatch));
+    cleanups.disarmAll();
+    ASSERT_NE(0, transientPatch.id);
+    ASSERT_NE(0, mixPortConfig.id);
+    sp<StreamHalInterface> stream = sp<StreamHalMock>::make();
+    mMapper->addStream(stream, mixPortConfig.id, transientPatch.id);
+
+    AudioPatch patch{};
+    int32_t patchId;
+    AudioPortConfig backMicPortConfig;
+    backMicPortConfig.channelMask = config.base.channelMask;
+    backMicPortConfig.format = config.base.format;
+    backMicPortConfig.sampleRate = aidl::android::media::audio::common::Int{config.base.sampleRate};
+    backMicPortConfig.flags = AudioIoFlags::make<AudioIoFlags::input>(0);
+    backMicPortConfig.ext = createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0);
+    ASSERT_EQ(OK, mMapper->createOrUpdatePatch({backMicPortConfig}, {mixPortConfig}, &patchId,
+                                               &cleanups));
+    cleanups.disarmAll();
+    ASSERT_EQ(android::OK,
+              mMapper->findPortConfig(backMicPortConfig.ext.get<AudioPortExt::device>().device,
+                                      &backMicPortConfig));
+    EXPECT_NE(0, backMicPortConfig.id);
+
+    EXPECT_EQ(transientPatch.id, patchId);
+    auto patches = mModule->getPatches();
+    auto patchIt = findById(patches, patchId);
+    ASSERT_NE(patchIt, patches.end());
+    EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
+    EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
+}
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 44ea2a4..3ae3edc 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "ReverbContext"
 #include <android-base/logging.h>
 #include <Utils.h>
+#include <audio_utils/primitives.h>
 
 #include "ReverbContext.h"
 #include "VectorArithmetic.h"
@@ -347,6 +348,15 @@
             mCommon.output.base.channelMask);
     int frameCount = mCommon.input.frameCount;
 
+    if (mBypass) {
+        if (isAuxiliary()) {
+            memset(out, 0, getOutputFrameSize() * frameCount);
+        } else {
+            memcpy_to_float_from_float_with_clamping(out, in, samples, 1);
+        }
+        return {STATUS_OK, samples, outChannels * frameCount};
+    }
+
     // Reverb only effects the stereo channels in multichannel source.
     if (channels < 1 || channels > LVM_MAX_CHANNELS) {
         LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 840897f..9cd0e6e 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -81,9 +81,6 @@
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 649f813..b5867a6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -30,9 +30,6 @@
     name: "libmedia_helper",
     vendor_available: true,
     min_sdk_version: "29",
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: [
         "AudioParameter.cpp",
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 3ab32f0..f0e1b9e 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -2119,6 +2119,11 @@
 
     if (tsLayers > 1) {
         uint32_t bLayers = std::min(2u, tsLayers - 1); // use up-to 2 B-layers
+        // TODO(b/341121900): Remove this once B frames are handled correctly in screen recorder
+        // use case in case of mic only
+        if (mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
+            bLayers = 0;
+        }
         uint32_t pLayers = tsLayers - bLayers;
         format->setString(
                 "ts-schema", AStringPrintf("android.generic.%u+%u", pLayers, bLayers));
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 0031855..78163e4 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -141,7 +141,6 @@
         "libplayerservice_datasource",
     ],
     shared_libs: [
-        "libmediaplayerservice",
         "libdatasource",
         "libdrmframework",
         "libstagefright_httplive",
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d50bc1e..6fb9232 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -203,8 +203,7 @@
             <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
-            <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
@@ -224,7 +223,8 @@
             <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
+            <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 79ab009..630817c 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -20,9 +20,6 @@
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -218,9 +215,6 @@
 cc_library_shared {
     name: "libstagefright_omx_utils",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: ["OMXUtils.cpp"],
     export_include_dirs: [
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 22b13f6..bb850ca 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -84,9 +84,6 @@
     name: "librenderfright",
     defaults: ["librenderfright_defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     cflags: [
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 2f204f9..2c5e81a 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -16,9 +16,6 @@
 cc_library_shared {
     name: "libstagefright_xmlparser",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index c4dadd0..f9b7dea 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -69,9 +69,6 @@
     name: "libstagefright_bufferqueue_helper",
     defaults: ["libstagefright_bufferqueue-defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     min_sdk_version: "29",
 
     shared_libs: [ "libgui" ],
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index 930b026..bdab103 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -60,7 +60,4 @@
     vendor_available: true,
     // TODO: b/147147992
     double_loadable: true,
-    vndk: {
-        enabled: true,
-    },
 }
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index dc8384d..edf4cb5 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -33,9 +33,6 @@
 cc_defaults {
     name: "libstagefright_foundation_defaults",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     host_supported: true,
     double_loadable: true,
 
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5b7319a..e340b40 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -113,6 +113,7 @@
 
     export_shared_lib_headers: [
         "libpermission",
+        "packagemanager_aidl-cpp",
     ],
 
     required: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e8da710..c1d1a63 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2189,6 +2189,13 @@
     }
 }
 
+void AudioFlinger::onHardError(std::set<audio_port_handle_t>& trackPortIds) {
+    ALOGI("releasing tracks due to a hard error occurred on an I/O thread");
+    for (const auto portId : trackPortIds) {
+        AudioSystem::releaseOutput(portId);
+    }
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -4962,6 +4969,23 @@
     return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
 }
 
+status_t AudioFlinger::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    audio_utils::lock_guard _l(mutex());
+    ALOGV("%s", __func__);
+
+    std::map<audio_port_handle_t, bool> tracksInternalMuteMap;
+    for (const auto& trackInternalMute : tracksInternalMute) {
+        audio_port_handle_t portId = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_port_handle_t(trackInternalMute.portId));
+        tracksInternalMuteMap.emplace(portId, trackInternalMute.muted);
+    }
+    for (size_t i = 0; i < mPlaybackThreads.size() && !tracksInternalMuteMap.empty(); i++) {
+        mPlaybackThreads.valueAt(i)->setTracksInternalMute(&tracksInternalMuteMap);
+    }
+    return NO_ERROR;
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4996,6 +5020,7 @@
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
         case TransactionCode::GET_AUDIO_MIX_PORT:
+        case TransactionCode::SET_TRACKS_INTERNAL_MUTE:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 6acd194..a8f983b 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -259,6 +259,10 @@
     status_t getAudioMixPort(const struct audio_port_v7* devicePort,
                              struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) final
+            EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -397,6 +401,8 @@
     void onSupportedLatencyModesChanged(
             audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
             EXCLUDES_AudioFlinger_ClientMutex;
+    void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
+            EXCLUDES_AudioFlinger_ClientMutex;
 
     // ---- end of IAfThreadCallback interface
 
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 4b6ab89..4518d48 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -116,9 +116,11 @@
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
     virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
-    virtual void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+    virtual void onSupportedLatencyModesChanged(audio_io_handle_t output,
+                                                const std::vector<audio_latency_mode_t>& modes)
             EXCLUDES_AudioFlinger_ClientMutex = 0;
+
+    virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -536,6 +538,9 @@
     virtual const std::atomic<int64_t>& framesWritten() const = 0;
 
     virtual bool usesHwAvSync() const = 0;
+
+    virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index a0d20ac..168bcca 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -427,6 +427,10 @@
     virtual FillingStatus& fillingStatus() = 0;
     virtual int8_t& retryCount() = 0;
     virtual FastTrackUnderruns& fastTrackUnderruns() = 0;
+
+    // Internal mute, this is currently only used for bit-perfect playback
+    virtual bool getInternalMute() const = 0;
+    virtual void setInternalMute(bool muted) = 0;
 };
 
 // playback track, used by DuplicatingThread
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 89ee5c0..1e1b6d2 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -220,6 +220,8 @@
      */
     void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState) final;
 
+    bool getInternalMute() const final { return mInternalMute; }
+    void setInternalMute(bool muted) final { mInternalMute = muted; }
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
@@ -401,6 +403,8 @@
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
     mute_state_t        mMuteState;
+
+    bool                mInternalMute = false;
 };  // end of Track
 
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index d45fca1..66e89e4 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -33,6 +33,7 @@
 #include <afutils/Vibrator.h>
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
+#include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
 #include <cpustats/ThreadCpuUsage.h>
@@ -3006,6 +3007,23 @@
     }
 }
 
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds_l()
+{
+    std::set<int32_t> result;
+    for (const auto& t : mTracks) {
+        if (t->isExternalTrack()) {
+            result.insert(t->portId());
+        }
+    }
+    return result;
+}
+
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds()
+{
+    audio_utils::lock_guard _l(mutex());
+    return getTrackPortIds_l();
+}
+
 String8 PlaybackThread::getParameters(const String8& keys)
 {
     audio_utils::lock_guard _l(mutex());
@@ -3059,9 +3077,9 @@
     mCallbackThread->resetDraining();
 }
 
-void PlaybackThread::onError()
+void PlaybackThread::onError(bool isHardError)
 {
-    mCallbackThread->setAsyncError();
+    mCallbackThread->setAsyncError(isHardError);
 }
 
 void PlaybackThread::onCodecFormatChanged(
@@ -5427,11 +5445,15 @@
     broadcast_l();
 }
 
-void PlaybackThread::onAsyncError()
+void PlaybackThread::onAsyncError(bool isHardError)
 {
+    auto allTrackPortIds = getTrackPortIds();
     for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
         invalidateTracks((audio_stream_type_t)i);
     }
+    if (isHardError) {
+        mAfThreadCallback->onHardError(allTrackPortIds);
+    }
 }
 
 void MixerThread::threadLoop_mix()
@@ -5774,6 +5796,11 @@
                 vlf *= volume;
                 vrf *= volume;
 
+                if (track->getInternalMute()) {
+                    vlf = 0.f;
+                    vrf = 0.f;
+                }
+
                 track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
@@ -5973,6 +6000,11 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
+            if (track->getInternalMute()) {
+                vrf = 0.f;
+                vlf = 0.f;
+            }
+
             track->setFinalVolume(vlf, vrf);
 
             // Delegate volume control to effect in track effect chain if needed
@@ -7178,7 +7210,7 @@
         mPlaybackThread(playbackThread),
         mWriteAckSequence(0),
         mDrainSequence(0),
-        mAsyncError(false)
+        mAsyncError(ASYNC_ERROR_NONE)
 {
 }
 
@@ -7192,7 +7224,7 @@
     while (!exitPending()) {
         uint32_t writeAckSequence;
         uint32_t drainSequence;
-        bool asyncError;
+        AsyncError asyncError;
 
         {
             audio_utils::unique_lock _l(mutex());
@@ -7213,7 +7245,7 @@
             drainSequence = mDrainSequence;
             mDrainSequence &= ~1;
             asyncError = mAsyncError;
-            mAsyncError = false;
+            mAsyncError = ASYNC_ERROR_NONE;
         }
         {
             const sp<PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -7224,8 +7256,8 @@
                 if (drainSequence & 1) {
                     playbackThread->resetDraining(drainSequence >> 1);
                 }
-                if (asyncError) {
-                    playbackThread->onAsyncError();
+                if (asyncError != ASYNC_ERROR_NONE) {
+                    playbackThread->onAsyncError(asyncError == ASYNC_ERROR_HARD);
                 }
             }
         }
@@ -7275,10 +7307,10 @@
     }
 }
 
-void AsyncCallbackThread::setAsyncError()
+void AsyncCallbackThread::setAsyncError(bool isHardError)
 {
     audio_utils::lock_guard _l(mutex());
-    mAsyncError = true;
+    mAsyncError = isHardError ? ASYNC_ERROR_HARD : ASYNC_ERROR_SOFT;
     mWaitWorkCV.notify_one();
 }
 
@@ -11356,14 +11388,15 @@
     // If there is only one active track and it is bit-perfect, enable tee buffer.
     float volumeLeft = 1.0f;
     float volumeRight = 1.0f;
-    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
-        const int trackId = mActiveTracks[0]->id();
+    if (sp<IAfTrack> bitPerfectTrack = getTrackToStreamBitPerfectly_l();
+        bitPerfectTrack != nullptr) {
+        const int trackId = bitPerfectTrack->id();
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
                     (void *)(uintptr_t)mNormalFrameCount);
-        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        bitPerfectTrack->getFinalVolume(&volumeLeft, &volumeRight);
         mIsBitPerfect = true;
     } else {
         mIsBitPerfect = false;
@@ -11388,4 +11421,39 @@
     mHasDataCopiedToSinkBuffer = mIsBitPerfect;
 }
 
+void BitPerfectThread::setTracksInternalMute(
+        std::map<audio_port_handle_t, bool>* tracksInternalMute) {
+    for (auto& track : mTracks) {
+        if (auto it = tracksInternalMute->find(track->portId()); it != tracksInternalMute->end()) {
+            track->setInternalMute(it->second);
+            tracksInternalMute->erase(it);
+        }
+    }
+}
+
+sp<IAfTrack> BitPerfectThread::getTrackToStreamBitPerfectly_l() {
+    if (com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        sp<IAfTrack> bitPerfectTrack = nullptr;
+        bool allOtherTracksMuted = true;
+        // Return the bit perfect track if all other tracks are muted
+        for (const auto& track : mActiveTracks) {
+            if (track->isBitPerfect()) {
+                bitPerfectTrack = track;
+            } else if (track->getFinalVolume() != 0.f) {
+                allOtherTracksMuted = false;
+                if (bitPerfectTrack != nullptr) {
+                    break;
+                }
+            }
+        }
+        return allOtherTracksMuted ? bitPerfectTrack : nullptr;
+    } else {
+        if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+            return mActiveTracks[0];
+        }
+    }
+    return nullptr;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 86e1894..4b2ade4 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -948,7 +948,7 @@
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
     virtual     void        onDrainReady();
-    virtual     void        onError();
+    virtual     void        onError(bool /*isHardError*/);
 
 public: // AsyncCallbackThread
                 void        resetWriteBlocked(uint32_t sequence);
@@ -960,7 +960,7 @@
     virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
     virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
-                void        onAsyncError(); // error reported by AsyncCallbackThread
+                void        onAsyncError(bool isHardError); // error reported by AsyncCallbackThread
 protected:
     // StreamHalInterfaceCodecFormatCallback implementation
                 void        onCodecFormatChanged(
@@ -1200,6 +1200,11 @@
                     }
                     return mHalStarted;
                 }
+
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* /* tracksInternalMute */)
+            override EXCLUDES_ThreadBase_Mutex {
+        // Do nothing. It is only used for bit perfect thread
+    }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1373,6 +1378,8 @@
     bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
     void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds_l() REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds();
 
     void readOutputParameters_l() REQUIRES(mutex());
     MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
@@ -1836,7 +1843,7 @@
             void        resetWriteBlocked();
             void        setDraining(uint32_t sequence);
             void        resetDraining();
-            void        setAsyncError();
+            void        setAsyncError(bool isHardError);
 
 private:
     const wp<PlaybackThread>   mPlaybackThread;
@@ -1850,7 +1857,8 @@
     uint32_t                   mDrainSequence;
     audio_utils::condition_variable mWaitWorkCV;
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
-    bool                       mAsyncError;
+    enum AsyncError { ASYNC_ERROR_NONE, ASYNC_ERROR_SOFT, ASYNC_ERROR_HARD };
+    AsyncError                 mAsyncError;
 
     audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
         return mMutex;
@@ -2449,12 +2457,17 @@
     BitPerfectThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut *output,
                      audio_io_handle_t id, bool systemReady);
 
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMuted)
+            final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
             REQUIRES(mutex(), ThreadBase_ThreadLoop);
     void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
 
 private:
+    sp<IAfTrack> getTrackToStreamBitPerfectly_l() REQUIRES(mutex());
+
     // These variables are only accessed on the threadLoop; hence need no mutex.
     bool mIsBitPerfect GUARDED_BY(ThreadBase_ThreadLoop) = false;
     float mVolumeLeft GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index c40f795..f8cfc12 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -913,7 +913,7 @@
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
                         " G db  L dB  R dB  VS dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect"
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
 }
@@ -999,7 +999,7 @@
                         "%08X %08X %6u "
                         "%2u %3x %2x "
                         "%5.2g %5.2g %5.2g %5.2g%c "
-                        "%08X %6zu%c %6zu %c %9u%c %7u %10s",
+                        "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
             mSessionId,
@@ -1029,7 +1029,8 @@
             mAudioTrackServerProxy->getUnderrunFrames(),
             nowInUnderrun,
             (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
-            isBitPerfect() ? "true" : "false"
+            isBitPerfect() ? "true" : "false",
+            getInternalMute() ? "true" : "false"
             );
 
     if (isServerLatencySupported()) {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8f17ffc..e91e2a3 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <android/media/DeviceConnectedState.h>
+#include <android/media/TrackInternalMuteInfo.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -179,10 +180,16 @@
     // volume control functions
     //
 
+    // notifies the audio policy manager that the absolute volume mode is enabled/disabled on
+    // the passed device. Also specifies the stream that is controlling the absolute volume.
+    virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t device,
+                                                    const char *address,
+                                                    bool enabled,
+                                                    audio_stream_type_t streamToDriveAbs) = 0;
     // initialises stream volume conversion parameters by specifying volume index range.
     virtual void initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax) = 0;
+                                  int indexMin,
+                                  int indexMax) = 0;
 
     // sets the new stream volume at a level corresponding to the supplied index for the
     // supplied device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME means
@@ -585,6 +592,9 @@
     // Get the attributes of the mix port when connecting to the given device port.
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 4643bd1..04d5362 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <system/audio.h>
+#include <set>
 #include <vector>
 
 #include <media/AudioContainers.h>
@@ -27,6 +28,10 @@
 
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
+static const std::set<audio_usage_t > gHighPriorityUseCases = {
+        AUDIO_USAGE_ALARM, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE
+};
+
 } // namespace android
 
 static const audio_format_t gDynamicFormat = AUDIO_FORMAT_DEFAULT;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 00958aa..914f3fe 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -29,6 +29,7 @@
 #include "ClientDescriptor.h"
 #include "DeviceDescriptor.h"
 #include "PolicyAudioPort.h"
+#include "PreferredMixerAttributesInfo.h"
 #include <vector>
 
 namespace android {
@@ -477,6 +478,16 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    bool isBitPerfect() const {
+        return (getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    /**
+     * Return true if there is any client with the same usage active on the given device.
+     * When the given device is null, return true if there is any client active.
+     */
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     virtual std::string info() const override;
 
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
@@ -489,7 +500,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
-    bool mUsePreferredMixerAttributes = false;
+    sp<PreferredMixerAttributesInfo> mPreferredAttrInfo = nullptr;
 };
 
 // Audio output driven by an input device directly.
@@ -616,6 +627,8 @@
      */
     bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
 
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     void dump(String8 *dst) const;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index fe90a1e..a596c43 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -165,6 +165,18 @@
         mIsInvalid = true;
     }
 
+    bool getInternalMute() const { return mInternalMute; }
+
+    /**
+     * Set the internal mute for a client. Return true if the existing value is different from
+     * the given value.
+     */
+    bool setInternalMute(bool muted) {
+        const bool result = (mInternalMute != muted);
+        mInternalMute = muted;
+        return result;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -178,6 +190,7 @@
      */
     uint32_t mActivityCount = 0;
     bool mIsInvalid = false;
+    bool mInternalMute = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
index 9472481..a493e3c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -44,6 +44,17 @@
 
     void increaseActiveClient() { mActiveClientsCount++; }
     void decreaseActiveClient() { mActiveClientsCount--; }
+    void resetActiveClient() { mActiveClientsCount = 0; }
+
+    bool isBitPerfect() const {
+        return (getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    bool configMatches(const audio_config_t& config) const {
+        return config.format == mMixerAttributes.config.format &&
+                config.channel_mask == mMixerAttributes.config.channel_mask &&
+                config.sample_rate == mMixerAttributes.config.sample_rate;
+    }
 
     void dump(String8 *dst);
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index acd0ff5..68b2e7b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -792,6 +792,16 @@
     mDevices = devices;
 }
 
+bool SwAudioOutputDescriptor::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    if (device != nullptr && !mDevices.contains(device)) {
+        return false;
+    }
+    return std::any_of(mActiveClients.begin(), mActiveClients.end(),
+                       [usage](sp<TrackClientDescriptor> client) {
+                           return client->attributes().usage == usage; });
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
@@ -1017,6 +1027,17 @@
     return clientsForStream;
 }
 
+bool SwAudioOutputCollection::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    for (size_t i = 0; i < this->size(); i++) {
+        const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
+        if (outputDesc->isUsageActiveOnDevice(usage, device)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string SwAudioOutputDescriptor::info() const {
     std::string result;
     result.append("[" );
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 2aee501..667c189 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -57,8 +57,8 @@
 void TrackClientDescriptor::dump(String8 *dst, int spaces) const
 {
     ClientDescriptor::dump(dst, spaces);
-    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d\n", spaces, "",
-            mStream, mFlags, mActivityCount);
+    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d; InternalMute: %s\n",
+            spaces, "", mStream, mFlags, mActivityCount, mInternalMute ? "Yes" : "No");
     dst->appendFormat("%*sDAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
     if (!mSecondaryOutputs.empty()) {
         dst->appendFormat("%*sDAP Secondary Outputs: ", spaces - 2, "");
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 2c85955..a91ecd7 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -44,6 +44,7 @@
         "packagemanager_aidl-cpp",
     ],
     static_libs: [
+        "audiopermissioncontroller",
         "libaudiomockhal",
         "libfakeservicemanager",
         "libmediaplayerservice",
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 7f4be79..7ec5f48 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -338,7 +338,7 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices
@@ -563,15 +563,31 @@
         }
     }
     auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+    uint32_t muteWaitMs = 0;
     for (size_t i = 0; i < mOutputs.size(); i++) {
        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-       // mute media strategies and delay device switch by the largest
-       // This avoid sending the music tail into the earpiece or headset.
+       // mute media strategies to avoid sending the music tail into
+       // the earpiece or headset.
+       if (desc->isStrategyActive(musicStrategy)) {
+           uint32_t tempRecommendedMuteDuration = desc->getRecommendedMuteDurationMs();
+           uint32_t tempMuteDurationMs = tempRecommendedMuteDuration > 0 ?
+                        tempRecommendedMuteDuration : desc->latency() * 4;
+           if (muteWaitMs < tempMuteDurationMs) {
+               muteWaitMs = tempMuteDurationMs;
+           }
+       }
        setStrategyMute(musicStrategy, true, desc);
        setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
           mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
                                               nullptr, true /*fromCache*/).types());
     }
+    // Wait for the muted audio to propagate down the audio path see checkDeviceMuteStrategies().
+    // We assume that MUTE_TIME_MS is way larger than muteWaitMs so that unmuting still
+    // happens after the actual device switch.
+    if (muteWaitMs > 0) {
+        ALOGW_IF(MUTE_TIME_MS < muteWaitMs * 2, "%s excessive mute wait %d", __func__, muteWaitMs);
+        usleep(muteWaitMs * 1000);
+    }
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status_t status = setDeviceConnectionState(device,
@@ -917,15 +933,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
+        if (state != AUDIO_MODE_NORMAL && oldState == AUDIO_MODE_NORMAL
+                && desc->mPreferredAttrInfo != nullptr) {
+            // If the output is using preferred mixer attributes and the audio mode is not normal,
+            // the output need to reopen with default configuration.
+            outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+            continue;
+        }
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
-            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
-                // If the device is using preferred mixer attributes, the output need to reopen
-                // with default configuration when the new selected devices are different from
-                // current routing devices.
-                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
-                continue;
-            }
             setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
@@ -1337,23 +1353,40 @@
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
-                ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
-                        info->getActiveClientCount() == 0)) {
+                (!info->isBitPerfect() || info->getActiveClientCount() == 0)) {
                 info = nullptr;
             }
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                if (info != nullptr && info->getUid() == uid &&
+                    info->configMatches(*config) &&
+                    (mEngine->getPhoneState() != AUDIO_MODE_NORMAL ||
+                            std::any_of(gHighPriorityUseCases.begin(), gHighPriorityUseCases.end(),
+                                        [this, &outputDevices](audio_usage_t usage) {
+                                            return mOutputs.isUsageActiveOnDevice(
+                                                    usage, outputDevices[0]); }))) {
+                    // Bit-perfect request is not allowed when the phone mode is not normal or
+                    // there is any higher priority user case active.
+                    return INVALID_OPERATION;
+                }
+            }
         }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
                 flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
         // The client will be active if the client is currently preferred mixer owner and the
         // requested configuration matches the preferred mixer configuration.
         *isBitPerfect = (info != nullptr
-                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->isBitPerfect()
                 && info->getUid() == uid
                 && *output != AUDIO_IO_HANDLE_NONE
                 // When bit-perfect output is selected for the preferred mixer attributes owner,
                 // only need to consider the config matches.
                 && mOutputs.valueFor(*output)->isConfigurationMatched(
                         clientConfig, AUDIO_OUTPUT_FLAG_NONE));
+
+        if (*isBitPerfect) {
+            *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+        }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         AudioProfileVector profiles;
@@ -1708,6 +1741,24 @@
             // at this stage we should ignore the DIRECT flag as no direct output could be
             // found earlier
             *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                // If the preferred mixer attributes is null, do not select the bit-perfect output
+                // unless the bit-perfect output is the only output.
+                // The bit-perfect output can exist while the passed in preferred mixer attributes
+                // info is null when it is a high priority client. The high priority clients are
+                // ringtone or alarm, which is not a bit-perfect use case.
+                size_t i = 0;
+                while (i < outputs.size() && outputs.size() > 1) {
+                    auto desc = mOutputs.valueFor(outputs[i]);
+                    // The output descriptor must not be null here.
+                    if (desc->isBitPerfect()) {
+                        outputs.removeItemsAt(i);
+                    } else {
+                        i += 1;
+                    }
+                }
+            }
             output = selectOutput(
                     outputs, *flags, config->format, channelMask, config->sample_rate, session);
         }
@@ -2191,6 +2242,20 @@
     ALOGV("startOutput() output %d, stream %d, session %d",
           outputDesc->mIoHandle, client->stream(), client->session());
 
+    if (com::android::media::audioserver::fix_concurrent_playback_behavior_with_bit_perfect_client()
+            && gHighPriorityUseCases.count(client->attributes().usage) != 0
+            && outputDesc->isBitPerfect()) {
+        // Usually, APM selects bit-perfect output for high priority use cases only when
+        // bit-perfect output is the only output that can be routed to the selected device.
+        // However, here is no need to play high priority use cases such as ringtone and alarm
+        // on the bit-perfect path. Reopen the output and return DEAD_OBJECT so that the client
+        // can attach to new output.
+        ALOGD("%s: reopen bit-perfect output as high priority use case(%d) is starting",
+              __func__, client->stream());
+        reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        return DEAD_OBJECT;
+    }
+
     status_t status = outputDesc->start();
     if (status != NO_ERROR) {
         return status;
@@ -2209,7 +2274,6 @@
                 ALOGE("%s unable to open output with default config", __func__);
                 return status;
             }
-            desc->mUsePreferredMixerAttributes = true;
         }
         return status;
     }
@@ -2233,14 +2297,13 @@
                 if (desc == nullptr) {
                     return BAD_VALUE;
                 }
-                desc->mUsePreferredMixerAttributes = true;
+                desc->mPreferredAttrInfo = info;
                 // Intentionally return error to let the client side resending request for
                 // creating and starting.
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
-            if (info->getActiveClientCount() == 1 &&
-                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            if (info->getActiveClientCount() == 1 && info->isBitPerfect()) {
                 // If it is first bit-perfect client, reroute all clients that will be routed to
                 // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
                 PortHandleVector clientsToInvalidate;
@@ -2270,6 +2333,15 @@
         usleep(delayMs * 1000);
     }
 
+    if (status == NO_ERROR &&
+        outputDesc->mPreferredAttrInfo != nullptr &&
+        outputDesc->isBitPerfect() &&
+        com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // A new client is started on bit-perfect output, update all clients internal mute.
+        updateClientsInternalMute(outputDesc);
+    }
+
     return status;
 }
 
@@ -2374,6 +2446,11 @@
              followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_NOTIFICATION)) ||
              (beaconMuteLatency > 0));
         uint32_t waitMs = beaconMuteLatency;
+        const bool needToCloseBitPerfectOutput =
+                (com::android::media::audioserver::
+                        fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+                gHighPriorityUseCases.count(clientAttr.usage) != 0);
+        std::vector<sp<SwAudioOutputDescriptor>> outputsToReopen;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
@@ -2410,15 +2487,22 @@
                 // Note restoring AudioTracks onto this output needs to invoke
                 // a volume ramp if there is no mute.
                 requiresMuteCheck |= sharedDevice && isActive;
+
+                if (needToCloseBitPerfectOutput && desc->isBitPerfect()) {
+                    outputsToReopen.push_back(desc);
+                }
             }
         }
 
-        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+        if (outputDesc->mPreferredAttrInfo != nullptr && devices != outputDesc->devices()) {
             // If the output is open with preferred mixer attributes, but the routed device is
             // changed when calling this function, returning DEAD_OBJECT to indicate routing
             // changed.
             return DEAD_OBJECT;
         }
+        for (auto& outputToReopen : outputsToReopen) {
+            reopenOutput(outputToReopen, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
         const uint32_t muteWaitMs =
                 setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
                                  requiresMuteCheck);
@@ -2497,7 +2581,7 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
-                if (desc->mUsePreferredMixerAttributes && force) {
+                if (desc->mPreferredAttrInfo != nullptr && force) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices.
@@ -2545,12 +2629,21 @@
     if (outputDesc->devices().size() == 1) {
         sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
                 outputDesc->devices()[0]->getId(), client->strategy());
+        bool outputReopened = false;
         if (info != nullptr && info->getUid() == client->uid()) {
             info->decreaseActiveClient();
             if (info->getActiveClientCount() == 0) {
                 reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+                outputReopened = true;
             }
         }
+        if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+            !outputReopened && outputDesc->isBitPerfect()) {
+            // Only need to update the clients' internal mute when the output is bit-perfect and it
+            // is not reopened.
+            updateClientsInternalMute(outputDesc);
+        }
     }
     return status;
 }
@@ -2622,7 +2715,7 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
-                    if (desc->mUsePreferredMixerAttributes && force) {
+                    if (desc->mPreferredAttrInfo != nullptr && force) {
                         // If the device is using preferred mixer attributes, the output need to
                         // reopen with default configuration when the new selected devices are
                         // different from current routing devices.
@@ -3281,6 +3374,27 @@
     }
 }
 
+status_t AudioPolicyManager::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                            const char *address __unused,
+                                                            bool enabled,
+                                                            audio_stream_type_t streamToDriveAbs)
+{
+    audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
+    if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+        ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+              toString(streamToDriveAbs).c_str());
+        return BAD_VALUE;
+    }
+
+    if (enabled) {
+        mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+    } else {
+        mAbsoluteVolumeDrivingStreams.erase(deviceType);
+    }
+
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax)
 {
     ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax);
@@ -4114,7 +4228,7 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             bool forceRouting = !newDevices.isEmpty();
-            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && newDevices != outputDesc->devices()) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -4394,6 +4508,13 @@
 
     dst->appendFormat("\nPolicy Engine dump:\n");
     mEngine->dump(dst);
+
+    dst->appendFormat("\nAbsolute volume devices with driving streams:\n");
+    for (const auto it : mAbsoluteVolumeDrivingStreams) {
+        dst->appendFormat("   - device type: %s, driving stream %d\n",
+                          dumpDeviceTypes({it.first}).c_str(),
+                          mEngine->getVolumeGroupForAttributes(it.second));
+    }
 }
 
 status_t AudioPolicyManager::dump(int fd)
@@ -4715,7 +4836,7 @@
         const auto output = mOutputs.valueAt(i);
         if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
             if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
-                output->mUsePreferredMixerAttributes = true;
+                output->mPreferredAttrInfo = mixerAttrInfo;
             } else {
                 for (const auto &client: output->getActiveClients()) {
                     if (client->uid() == uid && client->strategy() == strategy) {
@@ -4737,7 +4858,7 @@
             ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        desc->mUsePreferredMixerAttributes = true;
+        desc->mPreferredAttrInfo = mixerAttrInfo;
     }
 
     return NO_ERROR;
@@ -4753,8 +4874,7 @@
     }
     if (activeBitPerfectPreferred) {
         for (auto [strategy, info] : it->second) {
-            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
-                && info->getActiveClientCount() != 0) {
+            if (info->isBitPerfect() && info->getActiveClientCount() != 0) {
                 return info;
             }
         }
@@ -5554,7 +5674,7 @@
             invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
-            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && outputDesc->devices() != newDevices) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -6925,8 +7045,7 @@
         closingOutput->stop();
     }
     closingOutput->close();
-    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
-            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+    if (closingOutput->isBitPerfect()) {
         for (const auto device : closingOutput->devices()) {
             device->setPreferredConfig(nullptr);
         }
@@ -6958,6 +7077,10 @@
             setMsdOutputPatches();
         }
     }
+
+    if (closingOutput->mPreferredAttrInfo != nullptr) {
+        closingOutput->mPreferredAttrInfo->resetActiveClient();
+    }
 }
 
 void AudioPolicyManager::closeInput(audio_io_handle_t input)
@@ -7891,14 +8014,57 @@
     return nullptr;
 }
 
+float AudioPolicyManager::adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                              VolumeSource volumeSource,
+                                                              int index,
+                                                              const DeviceTypeSet &deviceTypes)
+{
+    audio_devices_t volumeDevice = Volume::getDeviceForVolume(deviceTypes);
+    device_category deviceCategory = Volume::getDeviceCategory({volumeDevice});
+    float volumeDb = curves.volIndexToDb(deviceCategory, index);
+
+    if (com_android_media_audio_abs_volume_index_fix()) {
+        if (mAbsoluteVolumeDrivingStreams.find(volumeDevice) !=
+            mAbsoluteVolumeDrivingStreams.end()) {
+            audio_attributes_t attributesToDriveAbs = mAbsoluteVolumeDrivingStreams[volumeDevice];
+            auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
+            if (groupToDriveAbs == VOLUME_GROUP_NONE) {
+                ALOGD("%s: no group matching with %s", __FUNCTION__,
+                      toString(attributesToDriveAbs).c_str());
+                return volumeDb;
+            }
+
+            float volumeDbMax = curves.volIndexToDb(deviceCategory, curves.getVolumeIndexMax());
+            VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
+            if (vsToDriveAbs == volumeSource) {
+                // attenuation is applied by the abs volume controller
+                return volumeDbMax;
+            } else {
+                IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
+                int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
+                float volumeDbAbs = curvesAbs.volIndexToDb(deviceCategory, indexAbs);
+                float volumeDbAbsMax = curvesAbs.volIndexToDb(deviceCategory,
+                                                              curvesAbs.getVolumeIndexMax());
+                float newVolumeDb = fminf(volumeDb + volumeDbAbsMax - volumeDbAbs, volumeDbMax);
+                ALOGV("%s: abs vol stream %d with attenuation %f is adjusting stream %d from "
+                      "attenuation %f to attenuation %f %f", __func__, vsToDriveAbs, volumeDbAbs,
+                      volumeSource, volumeDb, newVolumeDb, volumeDbMax);
+                return newVolumeDb;
+            }
+        }
+        return volumeDb;
+    } else {
+        return volumeDb;
+    }
+}
+
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
                                         VolumeSource volumeSource,
                                         int index,
                                         const DeviceTypeSet& deviceTypes,
                                         bool computeInternalInteraction)
 {
-    float volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
-
+    float volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
     ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
           volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
 
@@ -8815,4 +8981,60 @@
     mpClientInterface->invalidateTracks(clients);
 }
 
+void AudioPolicyManager::updateClientsInternalMute(
+        const sp<android::SwAudioOutputDescriptor> &desc) {
+    if (!desc->isBitPerfect() ||
+        !com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // This is only used for bit perfect output now.
+        return;
+    }
+    sp<TrackClientDescriptor> bitPerfectClient = nullptr;
+    bool bitPerfectClientInternalMute = false;
+    std::vector<media::TrackInternalMuteInfo> clientsInternalMute;
+    for (const sp<TrackClientDescriptor>& client : desc->getActiveClients()) {
+        if ((client->flags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            bitPerfectClient = client;
+            continue;
+        }
+        bool muted = false;
+        if (client->stream() == AUDIO_STREAM_SYSTEM) {
+            // System sound is muted.
+            muted = true;
+        } else {
+            bitPerfectClientInternalMute = true;
+        }
+        if (client->setInternalMute(muted)) {
+            auto result = legacy2aidl_audio_port_handle_t_int32_t(client->portId());
+            if (!result.ok()) {
+                ALOGE("%s, failed to convert port id(%d) to aidl", __func__, client->portId());
+                continue;
+            }
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = client->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        }
+    }
+    if (bitPerfectClient != nullptr &&
+        bitPerfectClient->setInternalMute(bitPerfectClientInternalMute)) {
+        auto result = legacy2aidl_audio_port_handle_t_int32_t(bitPerfectClient->portId());
+        if (result.ok()) {
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = bitPerfectClient->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        } else {
+            ALOGE("%s, failed to convert port id(%d) of bit perfect client to aidl",
+                  __func__, bitPerfectClient->portId());
+        }
+    }
+    if (!clientsInternalMute.empty()) {
+        if (status_t status = mpClientInterface->setTracksInternalMute(clientsInternalMute);
+                status != NO_ERROR) {
+            ALOGE("%s, failed to update tracks internal mute, err=%d", __func__, status);
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 2d015d3..585f982 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -103,7 +103,7 @@
         virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
                 const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
-                                                                              const char *device_address);
+                                                                  const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
                                                   const char *device_address,
                                                   const char *device_name,
@@ -151,6 +151,10 @@
         virtual status_t stopInput(audio_port_handle_t portId);
         virtual void releaseInput(audio_port_handle_t portId);
         virtual void checkCloseInputs();
+        virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                        const char *address,
+                                                        bool enabled,
+                                                        audio_stream_type_t streamToDriveAbs);
         /**
          * @brief initStreamVolume: even if the engine volume files provides min and max, keep this
          * api for compatibility reason.
@@ -1386,6 +1390,17 @@
 
         bool checkHapticCompatibilityOnSpatializerOutput(const audio_config_t* config,
                                                          audio_session_t sessionId) const;
+
+        void updateClientsInternalMute(const sp<SwAudioOutputDescriptor>& desc);
+
+        float adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                  VolumeSource volumeSource,
+                                                  int index,
+                                                  const DeviceTypeSet &deviceTypes);
+
+        // Contains for devices that support absolute volume the audio attributes
+        // corresponding to the streams that are driving the volume changes
+        std::unordered_map<audio_devices_t, audio_attributes_t> mAbsoluteVolumeDrivingStreams;
 };
 
 };
diff --git a/services/audiopolicy/permission/Android.bp b/services/audiopolicy/permission/Android.bp
new file mode 100644
index 0000000..1aa29e0
--- /dev/null
+++ b/services/audiopolicy/permission/Android.bp
@@ -0,0 +1,102 @@
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library {
+    name: "audiopermissioncontroller",
+
+    srcs: [
+        "NativePermissionController.cpp",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+
+    header_libs: [
+        "libcutils_headers",
+        "liberror_headers",
+    ],
+    export_header_lib_headers: [
+        "liberror_headers",
+    ],
+    static_libs: [
+        "audio-permission-aidl-cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "libutils",
+        "liblog",
+    ],
+
+    host_supported: true,
+    sanitize: {
+        integer_overflow: true,
+    },
+    cflags: [
+        "-Wall",
+        "-Wdeprecated",
+        "-Wextra",
+        "-Werror=format",
+        "-Wextra-semi",
+        "-Wthread-safety",
+        "-Wconditional-uninitialized",
+        "-Wimplicit-fallthrough",
+        "-Wreorder-init-list",
+        "-Werror=reorder-init-list",
+        "-Wshadow-all",
+        "-Wunreachable-code-aggressive",
+        "-Werror",
+        "-DANDROID_BASE_UNIQUE_FD_DISABLE_IMPLICIT_CONVERSION",
+        "-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
+    ],
+    tidy: true,
+    tidy_checks: [
+        "android-*",
+        "bugprone-*",
+        "cert-*",
+        "clang-analyzer-security*",
+        "google-*",
+        "misc-*",
+        "modernize-*",
+        "performance-*",
+    ],
+    tidy_checks_as_errors: [
+        "android-*",
+        "bugprone-*",
+        "cert-*",
+        "clang-analyzer-security*",
+        "google-*",
+        "misc-*",
+        "modernize-*",
+        "performance-*",
+    ],
+}
+
+cc_test {
+    name: "audiopermissioncontroller_test",
+    host_supported: true,
+    defaults: [
+        "libmediautils_tests_config",
+    ],
+    static_libs: [
+        "audio-permission-aidl-cpp",
+        "audiopermissioncontroller",
+        "framework-permission-aidl-cpp",
+        "libgmock",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+    srcs: [
+        "tests/NativePermissionControllerTest.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    test_suites: ["general-tests"],
+}
diff --git a/services/audiopolicy/permission/NativePermissionController.cpp b/services/audiopolicy/permission/NativePermissionController.cpp
new file mode 100644
index 0000000..d88c69f
--- /dev/null
+++ b/services/audiopolicy/permission/NativePermissionController.cpp
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/NativePermissionController.h>
+
+#include <algorithm>
+#include <optional>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <cutils/android_filesystem_config.h>
+#include <utils/Errors.h>
+
+using ::android::base::unexpected;
+using ::android::binder::Status;
+using ::android::error::Result;
+
+namespace com::android::media::permission {
+static std::optional<std::string> getFixedPackageName(uid_t uid) {
+    // These values are in sync with AppOpsService
+    switch (uid % AID_USER_OFFSET) {
+        case AID_ROOT:
+            return "root";
+        case AID_SYSTEM:
+            return "system";
+        case AID_SHELL:
+            return "shell";
+        case AID_MEDIA:
+            return "media";
+        case AID_AUDIOSERVER:
+            return "audioserver";
+        case AID_CAMERASERVER:
+            return "cameraserver";
+        // These packages are not handled by AppOps, but labeling may be useful for us
+        case AID_RADIO:
+            return "telephony";
+        case AID_BLUETOOTH:
+            return "bluetooth";
+        default:
+            return std::nullopt;
+    }
+}
+
+// -- Begin Binder methods
+Status NativePermissionController::populatePackagesForUids(
+        const std::vector<UidPackageState>& initialPackageStates) {
+    std::lock_guard l{m_};
+    if (!is_package_populated_) is_package_populated_ = true;
+    package_map_.clear();
+    std::transform(initialPackageStates.begin(), initialPackageStates.end(),
+                   std::inserter(package_map_, package_map_.end()),
+                   [](const auto& x) -> std::pair<uid_t, std::vector<std::string>> {
+                       return {x.uid, x.packageNames};
+                   });
+    std::erase_if(package_map_, [](const auto& x) { return x.second.empty(); });
+    return Status::ok();
+}
+
+Status NativePermissionController::updatePackagesForUid(const UidPackageState& newPackageState) {
+    std::lock_guard l{m_};
+    package_map_.insert_or_assign(newPackageState.uid, newPackageState.packageNames);
+    const auto& cursor = package_map_.find(newPackageState.uid);
+
+    if (newPackageState.packageNames.empty()) {
+        if (cursor != package_map_.end()) {
+            package_map_.erase(cursor);
+        }
+    } else {
+        if (cursor != package_map_.end()) {
+            cursor->second = newPackageState.packageNames;
+        } else {
+            package_map_.insert({newPackageState.uid, newPackageState.packageNames});
+        }
+    }
+    return Status::ok();
+}
+
+// -- End Binder methods
+
+Result<std::vector<std::string>> NativePermissionController::getPackagesForUid(uid_t uid) const {
+    uid = uid % AID_USER_OFFSET;
+    const auto fixed_package_opt = getFixedPackageName(uid);
+    if (fixed_package_opt.has_value()) {
+        return Result<std::vector<std::string>>{std::in_place_t{}, {fixed_package_opt.value()}};
+    }
+    std::lock_guard l{m_};
+    if (!is_package_populated_) return unexpected{::android::NO_INIT};
+    const auto cursor = package_map_.find(uid);
+    if (cursor != package_map_.end()) {
+        return cursor->second;
+    } else {
+        return unexpected{::android::BAD_VALUE};
+    }
+}
+
+Result<bool> NativePermissionController::validateUidPackagePair(
+        uid_t uid, const std::string& packageName) const {
+    uid = uid % AID_USER_OFFSET;
+    const auto fixed_package_opt = getFixedPackageName(uid);
+    if (fixed_package_opt.has_value()) {
+        return packageName == fixed_package_opt.value();
+    }
+    std::lock_guard l{m_};
+    if (!is_package_populated_) return unexpected{::android::NO_INIT};
+    const auto cursor = package_map_.find(uid);
+    return (cursor != package_map_.end()) &&
+           (std::find(cursor->second.begin(), cursor->second.end(), packageName) !=
+            cursor->second.end());
+}
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/IPermissionProvider.h b/services/audiopolicy/permission/include/media/IPermissionProvider.h
new file mode 100644
index 0000000..9184ffb
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/IPermissionProvider.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <vector>
+
+#include <error/Result.h>
+
+namespace com::android::media::permission {
+
+class IPermissionProvider {
+  public:
+    // Get all package names which run under a certain app-id. Returns non-empty.
+    // Not user specific, since packages are across users. Special app-ids (system,
+    // shell, etc.) are handled.  Fails if the provider does not know about the
+    // app-id.
+    virtual ::android::error::Result<std::vector<std::string>> getPackagesForUid(
+            uid_t uid) const = 0;
+    // True iff the provided package name runs under the app-id of uid.
+    // Special app-ids (system, shell, etc.) are handled.
+    // Fails if the provider does not know about the app-id.
+    virtual ::android::error::Result<bool> validateUidPackagePair(
+            uid_t uid, const std::string& packageName) const = 0;
+    virtual ~IPermissionProvider() = default;
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/NativePermissionController.h b/services/audiopolicy/permission/include/media/NativePermissionController.h
new file mode 100644
index 0000000..c0e717c
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/NativePermissionController.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <optional>
+#include <unordered_map>
+
+#include "IPermissionProvider.h"
+
+#include <android-base/thread_annotations.h>
+#include <com/android/media/permission/BnNativePermissionController.h>
+
+namespace com::android::media::permission {
+
+class NativePermissionController : public BnNativePermissionController, public IPermissionProvider {
+    using Status = ::android::binder::Status;
+
+  public:
+    Status populatePackagesForUids(const std::vector<UidPackageState>& initialPackageStates) final;
+    Status updatePackagesForUid(const UidPackageState& newPackageState) final;
+    // end binder methods
+
+    ::android::error::Result<std::vector<std::string>> getPackagesForUid(uid_t uid) const final;
+    ::android::error::Result<bool> validateUidPackagePair(
+            uid_t uid, const std::string& packageName) const final;
+
+  private:
+    mutable std::mutex m_;
+    // map of app_ids to the set of packages names which could run in them (should be 1)
+    std::unordered_map<uid_t, std::vector<std::string>> package_map_ GUARDED_BY(m_);
+    bool is_package_populated_ GUARDED_BY(m_);
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp b/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
new file mode 100644
index 0000000..f23a8b3
--- /dev/null
+++ b/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/NativePermissionController.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <android-base/expected.h>
+
+using ::android::base::unexpected;
+using ::android::binder::Status;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::UidPackageState;
+
+class NativePermissionControllerTest : public ::testing::Test {
+  protected:
+    android::sp<NativePermissionController> holder_ =
+            android::sp<NativePermissionController>::make();
+    NativePermissionController& controller_ = *holder_;
+};
+static UidPackageState createState(uid_t uid, std::vector<std::string> packagesNames) {
+    UidPackageState out{};
+    out.uid = uid;
+    out.packageNames = std::move(packagesNames);
+    return out;
+}
+
+static std::vector<std::string> makeVector(const char* one) {
+    return {one};
+}
+
+static std::vector<std::string> makeVector(const char* one, const char* two) {
+    return {one, two};
+}
+
+#define UNWRAP_EQ(expr, desired_expr)                         \
+    do {                                                      \
+        auto tmp_ = (expr);                                   \
+        EXPECT_TRUE(tmp_.has_value());                        \
+        if (tmp_.has_value()) EXPECT_EQ(*tmp_, desired_expr); \
+    } while (0)
+
+// ---  Tests for non-populated ----
+TEST_F(NativePermissionControllerTest, getPackagesForUid_NotPopulated) {
+    // Verify errors are returned
+    EXPECT_EQ(controller_.getPackagesForUid(10000), unexpected{android::NO_INIT});
+    EXPECT_EQ(controller_.getPackagesForUid(10001), unexpected{android::NO_INIT});
+
+    // fixed uids should work
+    UNWRAP_EQ(controller_.getPackagesForUid(1000), makeVector("system"));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_NotPopulated) {
+    // Verify errors are returned
+    EXPECT_EQ(controller_.validateUidPackagePair(10000, "com.package"),
+              unexpected{android::NO_INIT});
+
+    // fixed uids should work
+    UNWRAP_EQ(controller_.validateUidPackagePair(1000, "system"), true);
+}
+// ---  Tests for populatePackagesForUids ----
+TEST_F(NativePermissionControllerTest, populatePackages_EmptyInput) {
+    std::vector<UidPackageState> input;
+
+    // succeeds
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+
+    // Verify unknown uid behavior
+    const auto res1 = controller_.getPackagesForUid(10000);
+    ASSERT_FALSE(res1.has_value());
+    EXPECT_EQ(res1.error(), ::android::BAD_VALUE);
+}
+
+TEST_F(NativePermissionControllerTest, populatePackages_ValidInput) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+
+    UNWRAP_EQ(controller_.getPackagesForUid(10000),
+              makeVector("com.example.app1", "com.example.app2"));
+    UNWRAP_EQ(controller_.getPackagesForUid(10001), makeVector("com.example2.app1"));
+}
+
+// --- Tests for updatePackagesForUid ---
+TEST_F(NativePermissionControllerTest, updatePackages_NewUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+    UidPackageState newState = createState(12000, {"com.example.other"});
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+    EXPECT_TRUE(controller_.updatePackagesForUid(newState).isOk());
+
+    // Verify the results: only the updated package should be changed
+    UNWRAP_EQ(controller_.getPackagesForUid(10000),
+              makeVector("com.example.app1", "com.example.app2"));
+    UNWRAP_EQ(controller_.getPackagesForUid(10001), makeVector("com.example2.app1"));
+    UNWRAP_EQ(controller_.getPackagesForUid(12000), makeVector("com.example.other"));
+}
+
+TEST_F(NativePermissionControllerTest, updatePackages_ExistingUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2", "com.example.app3"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+    // Update packages for existing uid
+    UidPackageState newState = createState(10000, {"com.example.other", "com.example.new"});
+    EXPECT_TRUE(controller_.updatePackagesForUid(newState).isOk());
+
+    // Verify update
+    UNWRAP_EQ(controller_.getPackagesForUid(10000),
+              makeVector("com.example.other", "com.example.new"));
+}
+
+TEST_F(NativePermissionControllerTest, updatePackages_EmptyRemovesEntry) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1"}),
+    };
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+
+    UidPackageState newState{};  // Empty package list
+    newState.uid = 10000;
+    EXPECT_TRUE(controller_.updatePackagesForUid(newState).isOk());
+    // getPackages for unknown UID should error out
+    const auto res = controller_.getPackagesForUid(10000);
+    ASSERT_FALSE(res.has_value());
+    EXPECT_EQ(res.error(), ::android::BAD_VALUE);
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_ValidPair) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+
+    UNWRAP_EQ(controller_.validateUidPackagePair(10000, "com.example.app1"), true);
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_InvalidPackage) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+
+    UNWRAP_EQ(controller_.validateUidPackagePair(10000, "com.example.other"), false);
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_UnknownUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_TRUE(controller_.populatePackagesForUids(input).isOk());
+
+    UNWRAP_EQ(controller_.validateUidPackagePair(12000, "any.package"), false);
+}
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 9b7a470..f23ec94 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -52,6 +52,8 @@
     ],
 
     static_libs: [
+        "audio-permission-aidl-cpp",
+        "audiopermissioncontroller",
         "libaudiopolicycomponents",
         "libeffectsconfig",
     ],
@@ -81,7 +83,7 @@
     ],
 
     static_libs: [
-        "framework-permission-aidl-cpp",
+        "framework-permission-aidl-cpp", // TODO remove when unnnecessary
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 6de71a3..f70dc52 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -352,4 +352,13 @@
     return af->getAudioMixPort(devicePort, port);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setTracksInternalMute(tracksInternalMute);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index a862037..8ba2814 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -14,12 +14,14 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioPolicyIntefaceImpl"
+#define LOG_TAG "AudioPolicyInterfaceImpl"
 //#define LOG_NDEBUG 0
 
 #include "AudioPolicyService.h"
 #include "AudioRecordClient.h"
 #include "TypeConverter.h"
+
+#include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
@@ -27,7 +29,6 @@
 #include <media/MediaMetricsItem.h>
 #include <media/PolicyAidlConversion.h>
 #include <utils/Log.h>
-#include <android/content/AttributionSourceState.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -49,6 +50,7 @@
 namespace audiopolicy_flags = android::media::audiopolicy;
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
+using com::android::media::permission::NativePermissionController;
 using content::AttributionSourceState;
 using media::audio::common::AudioConfig;
 using media::audio::common::AudioConfigBase;
@@ -1020,6 +1022,32 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::setDeviceAbsoluteVolumeEnabled(const AudioDevice& deviceAidl,
+                                                          bool enabled,
+                                                          AudioStreamType streamToDriveAbsAidl) {
+    audio_stream_type_t streamToDriveAbs = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamToDriveAbsAidl));
+    audio_devices_t deviceType;
+    std::string address;
+    RETURN_BINDER_STATUS_IF_ERROR(
+            aidl2legacy_AudioDevice_audio_device(deviceAidl, &deviceType, &address));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    if (!settingsAllowed()) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
+    if (uint32_t(streamToDriveAbs) >= AUDIO_STREAM_PUBLIC_CNT) {
+        return binderStatusFromStatusT(BAD_VALUE);
+    }
+    audio_utils::lock_guard _l(mMutex);
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDeviceAbsoluteVolumeEnabled(deviceType, address.c_str(),
+                                                                enabled, streamToDriveAbs));
+}
+
 Status AudioPolicyService::initStreamVolume(AudioStreamType streamAidl,
                                             int32_t indexMinAidl,
                                             int32_t indexMaxAidl) {
@@ -2643,4 +2671,9 @@
             mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
 }
 
+Status AudioPolicyService::getPermissionController(sp<INativePermissionController>* out) {
+    *out = mPermissionController;
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index f372c71..73c4a15 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -79,6 +79,7 @@
 BINDER_METHOD_ENTRY(startInput) \
 BINDER_METHOD_ENTRY(stopInput) \
 BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(setDeviceAbsoluteVolumeEnabled) \
 BINDER_METHOD_ENTRY(initStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
 BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
@@ -164,6 +165,7 @@
 BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
+BINDER_METHOD_ENTRY(getPermissionController) \
                                                      \
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
@@ -226,7 +228,9 @@
       mCaptureStateNotifier(false),
       mCreateAudioPolicyManager(createAudioPolicyManager),
       mDestroyAudioPolicyManager(destroyAudioPolicyManager),
-      mUsecaseValidator(media::createUsecaseValidator()) {
+      mUsecaseValidator(media::createUsecaseValidator()),
+      mPermissionController(sp<NativePermissionController>::make())
+{
       setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
       setInheritRt(true);
 }
@@ -1321,6 +1325,7 @@
         case TRANSACTION_setPhoneState:
 //FIXME: Allow setForceUse calls from system apps until a better use case routing API is available
 //      case TRANSACTION_setForceUse:
+        case TRANSACTION_setDeviceAbsoluteVolumeEnabled:
         case TRANSACTION_initStreamVolume:
         case TRANSACTION_setStreamVolumeIndex:
         case TRANSACTION_setVolumeIndexForAttributes:
@@ -1382,6 +1387,17 @@
             break;
     }
 
+    switch (code) {
+        case TRANSACTION_getPermissionController: {
+            if (!isAudioServerOrSystemServerUid(IPCThreadState::self()->getCallingUid())) {
+                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
+                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      IPCThreadState::self()->getCallingUid());
+                return INVALID_OPERATION;
+            }
+        }
+    }
+
     const std::string methodName = getIAudioPolicyServiceStatistics().getMethodForCode(code);
     mediautils::TimeCheck check(
             std::string("IAudioPolicyService::").append(methodName),
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5297e47..41b28c5 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -21,6 +21,7 @@
 #include <android/media/GetSpatializerResponse.h>
 #include <android-base/thread_annotations.h>
 #include <audio_utils/mutex.h>
+#include <com/android/media/permission/INativePermissionController.h>
 #include <cutils/misc.h>
 #include <cutils/config_utils.h>
 #include <cutils/compiler.h>
@@ -35,6 +36,7 @@
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
 #include <media/AudioPolicy.h>
+#include <media/NativePermissionController.h>
 #include <media/UsecaseValidator.h>
 #include <mediautils/ServiceUtilities.h>
 #include "AudioPolicyEffects.h"
@@ -68,6 +70,8 @@
 }
 
 using ::android::media::audiopolicy::AudioRecordClient;
+using ::com::android::media::permission::INativePermissionController;
+using ::com::android::media::permission::NativePermissionController;
 
 class AudioPolicyService :
     public BinderService<AudioPolicyService>,
@@ -121,6 +125,9 @@
     binder::Status startInput(int32_t portId) override;
     binder::Status stopInput(int32_t portId) override;
     binder::Status releaseInput(int32_t portId) override;
+    binder::Status setDeviceAbsoluteVolumeEnabled(const AudioDevice& device,
+                                                  bool enabled,
+                                                  AudioStreamType streamToDriveAbs) override;
     binder::Status initStreamVolume(AudioStreamType stream, int32_t indexMin,
                                     int32_t indexMax) override;
     binder::Status setStreamVolumeIndex(AudioStreamType stream,
@@ -314,6 +321,9 @@
     binder::Status getRegisteredPolicyMixes(
             std::vector <::android::media::AudioMix>* mixes) override;
 
+    // Should only be called by AudioService to push permission data down to audioserver
+    binder::Status getPermissionController(sp<INativePermissionController>* out) override;
+
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
     // IBinder::DeathRecipient
@@ -872,6 +882,9 @@
         status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                  struct audio_port_v7 *port) override;
 
+        status_t setTracksInternalMute(
+                const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
@@ -1045,6 +1058,7 @@
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
     std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
+    const sp<NativePermissionController> mPermissionController;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index fc349ee..dc61115 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -40,6 +40,7 @@
     static_libs: [
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
         "libaudiopolicycomponents",
         "libflagtest",
         "libgmock",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 7ef0266..30894c1 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -221,6 +221,15 @@
         return NO_ERROR;
     }
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override {
+        for (const auto& trackInternalMute : tracksInternalMute) {
+            mTracksInternalMute[(audio_port_handle_t)trackInternalMute.portId] =
+                    trackInternalMute.muted;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
@@ -229,6 +238,11 @@
         mSupportedChannelMasks.insert(channelMask);
     }
 
+    bool getTrackInternalMute(audio_port_handle_t portId) {
+        auto it = mTracksInternalMute.find(portId);
+        return it == mTracksInternalMute.end() ? false : it->second;
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -241,6 +255,7 @@
     std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
     std::set<audio_format_t> mSupportedFormats;
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
+    std::map<audio_port_handle_t, bool> mTracksInternalMute;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index e55e935..c15adcb 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -110,6 +110,11 @@
                              struct audio_port_v7 *mixPort __unused) override {
         return INVALID_OPERATION;
     }
+
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 5dc039c..4f9e98e 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -29,6 +29,7 @@
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
 #include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
@@ -1155,129 +1156,6 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
-TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
-    const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
-    const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
-    const uint32_t bitPerfectSampleRate = 48000;
-    mClient->addSupportedFormat(bitPerfectFormat);
-    mClient->addSupportedChannelMask(bitPerfectChannelMask);
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                                                           "", "", AUDIO_FORMAT_DEFAULT));
-    auto devices = mManager->getAvailableOutputDevices();
-    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
-    for (auto device : devices) {
-        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
-            usbPortId = device->getId();
-            break;
-        }
-    }
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
-
-    const uid_t uid = 1234;
-    const uid_t anotherUid = 5678;
-    const audio_attributes_t mediaAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
-            .usage = AUDIO_USAGE_MEDIA,
-    };
-
-    std::vector<audio_mixer_attributes_t> mixerAttributes;
-    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
-    EXPECT_GT(mixerAttributes.size(), 0);
-    size_t bitPerfectIndex = 0;
-    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
-        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
-            break;
-        }
-    }
-    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
-    EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
-    EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
-    EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
-    EXPECT_EQ(NO_ERROR,
-              mManager->setPreferredMixerAttributes(
-                      &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
-
-    audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    bool isBitPerfect;
-
-    // When there is no active bit-perfect playback, the output selection will follow default
-    // routing strategy.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
-    const auto outputDesc = mManager->getOutputs().valueFor(output);
-    EXPECT_NE(nullptr, outputDesc);
-    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // Start bit-perfect playback
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-            mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-    status_t status = mManager->startOutput(bitPerfectPortId);
-    if (status == DEAD_OBJECT) {
-        getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-                bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-                mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-        status = mManager->startOutput(bitPerfectPortId);
-    }
-    EXPECT_EQ(NO_ERROR, status);
-    EXPECT_TRUE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
-    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
-    EXPECT_NE(nullptr, bitPerfectOutputDesc);
-    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
-              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // If the playback is from preferred mixer attributes owner but the request doesn't match
-    // preferred mixer attributes, it will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    const audio_attributes_t dtmfAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
-            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-    };
-    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
-    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    portId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
-
-    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
-    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    EXPECT_EQ(NO_ERROR,
-              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                           "", "", AUDIO_FORMAT_LDAC));
-}
-
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
     const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
     mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -3612,3 +3490,314 @@
     // unregister effect should succeed since effect shall have been restore on the client session
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
 }
+
+class AudioPolicyManagerTestBitPerfectBase : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    void TearDown() override;
+
+    void startBitPerfectOutput();
+    void reset();
+    void getBitPerfectOutput(status_t expected);
+
+    const audio_format_t mBitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t mBitPerfectChannelMask = AUDIO_CHANNEL_OUT_STEREO;
+    const uint32_t mBitPerfectSampleRate = 48000;
+    const uid_t mUid = 1234;
+    audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+
+    audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+
+    static constexpr audio_attributes_t sMediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+};
+
+void AudioPolicyManagerTestBitPerfectBase::SetUp() {
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUp());
+
+    mClient->addSupportedFormat(mBitPerfectFormat);
+    mClient->addSupportedChannelMask(mBitPerfectChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            mUsbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, mUsbPortId);
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(mUsbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    size_t bitPerfectIndex = 0;
+    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+            break;
+        }
+    }
+    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+    EXPECT_EQ(mBitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+    EXPECT_EQ(mBitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+    EXPECT_EQ(mBitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &sMediaAttr, mUsbPortId, mUid, &mixerAttributes[bitPerfectIndex]));
+}
+
+void AudioPolicyManagerTestBitPerfectBase::TearDown() {
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&sMediaAttr, mUsbPortId, mUid));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::TearDown());
+}
+
+void AudioPolicyManagerTestBitPerfectBase::startBitPerfectOutput() {
+    reset();
+    bool isBitPerfect;
+
+    getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                     &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    status_t status = mManager->startOutput(mBitPerfectPortId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                         mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                         &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+        status = mManager->startOutput(mBitPerfectPortId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_TRUE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, mBitPerfectOutput);
+    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(mBitPerfectOutput);
+    EXPECT_NE(nullptr, bitPerfectOutputDesc);
+    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+};
+
+void AudioPolicyManagerTestBitPerfectBase::reset() {
+    mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+}
+
+void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
+    reset();
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    AttributionSourceState attributionSource = createAttributionSourceState(mUid);
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mBitPerfectSampleRate;
+    config.channel_mask = mBitPerfectChannelMask;
+    config.format = mBitPerfectFormat;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+    AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
+    bool isBitPerfect;
+    EXPECT_EQ(expected,
+              mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
+                                         &stream, attributionSource, &config, &flags,
+                                         &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+                                         &isSpatialized, &isBitPerfect));
+}
+
+class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
+};
+
+TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
+    const uid_t anotherUid = 5678;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+
+    // When there is no active bit-perfect playback, the output selection will follow default
+    // routing strategy.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    const auto outputDesc = mManager->getOutputs().valueFor(output);
+    EXPECT_NE(nullptr, outputDesc);
+    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // Start bit-perfect playback
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // If the playback is from preferred mixer attributes owner but the request doesn't match
+    // preferred mixer attributes, it will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-pefect playback is active, dtmf will also be routed to bit-perfect output.
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, dtmfOutput);
+
+    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestBitPerfect,
+        InternalMuteWhenBitPerfectCLientIsActive,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver,
+                             fix_concurrent_playback_behavior_with_bit_perfect_client))
+) {
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // When bit-perfect playback is active, the system sound will be routed to bit-perfect output.
+    // The system sound will be muted internally in this case. The bit-perfect client will be
+    // played normally.
+    const uint32_t anotherSampleRate = 44100;
+    audio_port_handle_t systemSoundPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t systemSoundOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t systemSoundAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
+                     &systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, systemSoundOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(systemSoundPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    // When bit-perfect playback is active, the notification will be routed to bit-perfect output.
+    // The notification sound will be played normally while the bit-perfect client will be muted
+    // internally.
+    audio_port_handle_t notificationPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t notificationOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t notificationAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_NOTIFICATION,
+    };
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &notificationOutput,
+                     &notificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
+                     &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, notificationOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(notificationPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(mBitPerfectPortId));
+}
+
+class AudioPolicyManagerTestBitPerfectPhoneMode : public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_mode_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectPhoneMode, RejectBitPerfectWhenPhoneModeIsNotNormal) {
+    if (!com::android::media::audioserver::
+            fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_mode_t mode = GetParam();
+    mManager->setPhoneState(mode);
+    // When the phone mode is not normal, the bit-perfect output will be reopned
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When the phone mode is not normal, the bit-perfect output will be closed.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+
+    mManager->setPhoneState(AUDIO_MODE_NORMAL);
+}
+
+INSTANTIATE_TEST_CASE_P(
+        PhoneMode,
+        AudioPolicyManagerTestBitPerfectPhoneMode,
+        testing::Values(AUDIO_MODE_IN_CALL,
+                        AUDIO_MODE_RINGTONE,
+                        AUDIO_MODE_IN_COMMUNICATION,
+                        AUDIO_MODE_CALL_SCREEN)
+);
+
+class AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive :
+        public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_usage_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+       RejectBitPerfectWhenHigherPriorityUseCaseIsActive) {
+    if (!com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_attributes_t attr = {
+            .usage = GetParam(),
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(
+            getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                   48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
+    EXPECT_NE(mBitPerfectOutput, output);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
+    // When a high priority use case is active, the bit-perfect output will be closed.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When any higher priority use case is active, the bit-perfect request will be rejected.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        HigherPriorityUseCases,
+        AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+        testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                        AUDIO_USAGE_ALARM)
+);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 1bd0b85..8b41d00 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -517,6 +517,15 @@
     return false;
 }
 
+bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
+        return true;
+    }
+
+    return false;
+}
+
 static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
     return containerSet.find(value) != containerSet.end();
 }
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index f797f9c..3f8f6a2 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -46,6 +46,7 @@
     ~DepthCompositeStream() override;
 
     static bool isDepthCompositeStream(const sp<Surface> &surface);
+    static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 68e9ad4..225d7f5 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -94,6 +94,11 @@
     mMainImageSurface.clear();
 }
 
+bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
+}
+
 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
     ANativeWindow *anw = surface.get();
     status_t err;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index b539cdd..9cfd78b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -42,6 +42,7 @@
     ~HeicCompositeStream() override;
 
     static bool isHeicCompositeStream(const sp<Surface> &surface);
+    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index a1b9383..dadefcc 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -520,6 +520,15 @@
     return false;
 }
 
+bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
+            (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
@@ -832,8 +841,8 @@
     (*compositeOutput)[0].colorSpace =
         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
 
-    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
-                streamInfo.dynamicRangeProfile,
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+                staticInfo, dynamicRange,
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
         compositeOutput->push_back({});
         (*compositeOutput)[1].width = streamInfo.width;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 016d57c..6669739 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -43,6 +43,7 @@
     ~JpegRCompositeStream() override;
 
     static bool isJpegRCompositeStream(const sp<Surface> &surface);
+    static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index a7a2b5e..40ca276 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,6 +21,7 @@
 #include "../api2/HeicCompositeStream.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "api2/JpegRCompositeStream.h"
+#include "binder/Status.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -679,6 +680,67 @@
     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
+binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+        const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
+        size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
+        const std::string& logicalCameraId,
+        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
+        bool *earlyExit /*out*/) {
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
+    bool isHeicCompositeStream =
+            camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
+    bool isJpegRCompositeStream =
+            camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
+            !isCompositeJpegRDisabled;
+    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+        // We need to take in to account that composite streams can have
+        // additional internal camera streams.
+        std::vector<OutputStreamInfo> compositeStreams;
+        status_t ret;
+        if (isDepthCompositeStream) {
+          // TODO: Take care of composite streams.
+            ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                    deviceInfo, &compositeStreams);
+        } else if (isHeicCompositeStream) {
+            ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        } else {
+            ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        }
+
+        if (ret != OK) {
+            std::string msg = fmt::sprintf(
+                    "Camera %s: Failed adding composite streams: %s (%d)",
+                    logicalCameraId.c_str(), strerror(-ret), ret);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+
+        if (compositeStreams.size() == 0) {
+            // No internal streams means composite stream not
+            // supported.
+            *earlyExit = true;
+            return binder::Status::ok();
+        } else if (compositeStreams.size() > 1) {
+            size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
+            streamConfiguration.streams.resize(streamCount);
+        }
+
+        for (const auto& compositeStream : compositeStreams) {
+            mapStreamInfo(compositeStream, rotation,
+                    physicalId, groupId,
+                    &streamConfiguration.streams[(*streamIdx)++]);
+        }
+    } else {
+        mapStreamInfo(streamInfo, rotation,
+                physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
+    }
+
+    return binder::Status::ok();
+}
+
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
@@ -831,8 +893,13 @@
                                 "Deferred surface sensor pixel modes not valid");
             }
             streamInfo.streamUseCase = streamUseCase;
-            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
-                    &streamConfiguration.streams[streamIdx++]);
+            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
+                    logicalCameraId, streamConfiguration, earlyExit);
+            if (*earlyExit || !status.isOk()) {
+                return status;
+            }
+
             isStreamInfoValid = true;
 
             if (numBufferProducers == 0) {
@@ -851,57 +918,11 @@
                 return res;
 
             if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                bool isJpegRCompositeStream =
-                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
-                        !isCompositeJpegRDisabled;
-                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                      // TODO: Take care of composite streams.
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else if (isHeicCompositeStream) {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-
-                    if (ret != OK) {
-                        std::string msg = fmt::sprintf(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.c_str(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *earlyExit = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, groupId,
-                                &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
+                auto status  = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                        static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
+                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
+                if (*earlyExit || !status.isOk()) {
+                    return status;
                 }
                 isStreamInfoValid = true;
             }
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index 90530f6..fc186fb 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -50,7 +50,6 @@
         "util/JpegUtil.cc",
         "util/MetadataUtil.cc",
         "util/Util.cc",
-        "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
         "util/EglFramebuffer.cc",
         "util/EglProgram.cc",
@@ -72,6 +71,7 @@
         "VirtualCameraStream.cc",
         "VirtualCameraService.cc",
         "VirtualCameraSessionContext.cc",
+        "VirtualCameraTestInstance.cc",
         "VirtualCameraRenderThread.cc",
     ],
     defaults: [
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index fe9e0ed..ba4ea6b 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -73,13 +73,14 @@
 
 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
 
-constexpr int32_t kMinFps = 15;
-
 constexpr std::chrono::nanoseconds kMaxFrameDuration =
-    std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+    std::chrono::duration_cast<std::chrono::nanoseconds>(
+        1e9ns / VirtualCameraDevice::kMinFps);
 
 constexpr uint8_t kPipelineMaxDepth = 2;
 
+constexpr int k30Fps = 30;
+
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
 const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
@@ -130,16 +131,20 @@
   std::set<FpsRange> availableRanges;
 
   for (const SupportedStreamConfiguration& config : configs) {
-    availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = config.maxFps});
     availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
   }
 
   if (std::any_of(configs.begin(), configs.end(),
                   [](const SupportedStreamConfiguration& config) {
-                    return config.maxFps >= 30;
+                    return config.maxFps >= k30Fps;
                   })) {
-    availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
-    availableRanges.insert({.minFps = 30, .maxFps = 30});
+    // Extend the set of available ranges with (minFps <= 15, 30) & (30, 30) as
+    // required by CDD.
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = k30Fps});
+    availableRanges.insert({.minFps = k30Fps, .maxFps = k30Fps});
   }
 
   return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index cba0674..53dcc4d 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -126,6 +126,9 @@
   // Default JPEG orientation.
   static constexpr uint8_t kDefaultJpegOrientation = 0;
 
+  // Lowest min fps advertised in supported fps ranges.
+  static constexpr int kMinFps = 1;
+
   // Default Make and Model for Exif
   static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
 
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index f5cf092..9a5bd1e 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -51,7 +51,6 @@
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
 #include "util/MetadataUtil.h"
-#include "util/TestPatternHelper.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
 
@@ -278,6 +277,16 @@
   return app1Data;
 }
 
+std::chrono::nanoseconds getMaxFrameDuration(
+    const RequestSettings& requestSettings) {
+  if (requestSettings.fpsRange.has_value()) {
+    return std::chrono::nanoseconds(static_cast<uint64_t>(
+        1e9 / std::max(1, requestSettings.fpsRange->minFps)));
+  }
+  return std::chrono::nanoseconds(
+      static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
+}
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -300,12 +309,12 @@
 VirtualCameraRenderThread::VirtualCameraRenderThread(
     VirtualCameraSessionContext& sessionContext,
     const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
-    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
+    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
     : mCameraDeviceCallback(cameraDeviceCallback),
       mInputSurfaceSize(inputSurfaceSize),
       mReportedSensorSize(reportedSensorSize),
-      mTestMode(testMode),
-      mSessionContext(sessionContext) {
+      mSessionContext(sessionContext),
+      mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
 }
 
 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
@@ -365,7 +374,7 @@
 }
 
 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
-  return mInputSurfacePromise.get_future().get();
+  return mInputSurfaceFuture.get();
 }
 
 std::unique_ptr<ProcessCaptureRequestTask>
@@ -401,10 +410,6 @@
   mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
       mInputSurfaceSize.width, mInputSurfaceSize.height);
 
-  sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
-  if (mTestMode) {
-    inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
-  }
   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
 
   while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
@@ -422,16 +427,10 @@
 
 void VirtualCameraRenderThread::processCaptureRequest(
     const ProcessCaptureRequestTask& request) {
-  if (mTestMode) {
-    // In test mode let's just render something to the Surface ourselves.
-    renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
-                              request.getFrameNumber());
-  }
-
   std::chrono::nanoseconds timestamp =
       std::chrono::duration_cast<std::chrono::nanoseconds>(
           std::chrono::steady_clock::now().time_since_epoch());
-  std::chrono::nanoseconds lastAcquisitionTimestamp(
+  const std::chrono::nanoseconds lastAcquisitionTimestamp(
       mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
                                                     std::memory_order_relaxed));
 
@@ -461,6 +460,29 @@
     }
   }
 
+  // Calculate the maximal amount of time we can afford to wait for next frame.
+  const std::chrono::nanoseconds maxFrameDuration =
+      getMaxFrameDuration(request.getRequestSettings());
+  const std::chrono::nanoseconds elapsedDuration =
+      timestamp - lastAcquisitionTimestamp;
+  if (elapsedDuration < maxFrameDuration) {
+    // We can afford to wait for next frame.
+    // Note that if there's already new frame in the input Surface, the call
+    // below returns immediatelly.
+    bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
+                                                            elapsedDuration);
+    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+        std::chrono::steady_clock::now().time_since_epoch());
+    if (!gotNewFrame) {
+      ALOGV(
+          "%s: No new frame received on input surface after waiting for "
+          "%" PRIu64 "ns, repeating last frame.",
+          __func__,
+          static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
+    }
+    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                               std::memory_order_relaxed);
+  }
   // Acquire new (most recent) image from the Surface.
   mEglSurfaceTexture->updateTexture();
 
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index dfb6f7b..b23c30c 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -111,8 +111,7 @@
       Resolution reportedSensorSize,
       std::shared_ptr<
           ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
-          cameraDeviceCallback,
-      bool testMode = false);
+          cameraDeviceCallback);
 
   ~VirtualCameraRenderThread();
 
@@ -184,7 +183,6 @@
 
   const Resolution mInputSurfaceSize;
   const Resolution mReportedSensorSize;
-  const int mTestMode;
 
   VirtualCameraSessionContext& mSessionContext;
 
@@ -206,6 +204,7 @@
   std::unique_ptr<EglSurfaceTexture> mEglSurfaceTexture;
 
   std::promise<sp<Surface>> mInputSurfacePromise;
+  std::shared_future<sp<Surface>> mInputSurfaceFuture;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index b5b07f0..724ec62 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -30,10 +30,12 @@
 
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraProvider.h"
+#include "VirtualCameraTestInstance.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/companion/virtualcamera/LensFacing.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "android/binder_auto_utils.h"
+#include "android/binder_interface_utils.h"
 #include "android/binder_libbinder.h"
 #include "android/binder_status.h"
 #include "binder/Status.h"
@@ -64,6 +66,7 @@
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kMaxFps = 60;
+constexpr int kTestCameraDefaultInputFps = 30;
 constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
 constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
 constexpr char kHelp[] = "help";
@@ -75,6 +78,7 @@
      Options:
        --camera_id=(ID) - override numerical ID for test camera instance
        --lens_facing=(front|back|external) - specifies lens facing for test camera instance
+       --input_fps=(fps) - specify input fps for test camera, valid values are from 1 to 1000
  * disable_test_camera
 )";
 constexpr char kCreateVirtualDevicePermission[] =
@@ -94,6 +98,13 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
+  if (configuration.virtualCameraCallback == nullptr) {
+    ALOGE("%s: Input configuration is missing virtual camera callback",
+          __func__);
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
   for (const SupportedStreamConfiguration& config :
        configuration.supportedStreamConfigs) {
     if (!isFormatSupportedForInput(config.width, config.height,
@@ -411,6 +422,18 @@
     }
   }
 
+  std::optional<int> inputFps;
+  it = options.find("input_fps");
+  if (it != options.end()) {
+    inputFps = parseInt(it->second);
+    if (!inputFps.has_value() || inputFps.value() < 1 ||
+        inputFps.value() > 1000) {
+      dprintf(err, "Invalid input fps: %s\n, must be integer in <1,1000> range.",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
   sp<BBinder> token = sp<BBinder>::make();
   mTestCameraToken.set(AIBinder_fromPlatformBinder(token));
 
@@ -418,9 +441,12 @@
   VirtualCameraConfiguration configuration;
   configuration.supportedStreamConfigs.push_back({.width = kVgaWidth,
                                                   .height = kVgaHeight,
-                                                  Format::YUV_420_888,
+                                                  Format::RGBA_8888,
                                                   .maxFps = kMaxFps});
   configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<VirtualCameraTestInstance>(
+          inputFps.value_or(kTestCameraDefaultInputFps));
   registerCamera(mTestCameraToken, configuration, cameraId.value_or(sNextId++),
                  kDefaultDeviceId, &ret);
   if (ret) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 28fa495..7f0adc3 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -66,7 +66,6 @@
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
 #include "util/MetadataUtil.h"
-#include "util/TestPatternHelper.h"
 #include "util/Util.h"
 
 namespace android {
@@ -360,12 +359,9 @@
       return cameraStatus(Status::ILLEGAL_ARGUMENT);
     }
     if (mRenderThread == nullptr) {
-      // If there's no client callback, start camera in test mode.
-      const bool testMode = mVirtualCameraClientCallback == nullptr;
       mRenderThread = std::make_unique<VirtualCameraRenderThread>(
           mSessionContext, resolutionFromInputConfig(*inputConfig),
-          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
-          testMode);
+          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
       mRenderThread->start();
       inputSurface = mRenderThread->getInputSurface();
     }
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.cc b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
new file mode 100644
index 0000000..bae871d
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VirtualCameraTestInstance"
+
+#include "VirtualCameraTestInstance.h"
+
+#include <atomic>
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <ratio>
+#include <thread>
+
+#include "GLES/gl.h"
+#include "android/binder_auto_utils.h"
+#include "android/native_window.h"
+#include "log/log.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglProgram.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::view::Surface;
+using ::ndk::ScopedAStatus;
+
+namespace {
+
+std::shared_ptr<ANativeWindow> nativeWindowFromSurface(const Surface& surface) {
+  ANativeWindow* nativeWindow = surface.get();
+  if (nativeWindow != nullptr) {
+    ANativeWindow_acquire(nativeWindow);
+  }
+  return std::shared_ptr<ANativeWindow>(nativeWindow, ANativeWindow_release);
+}
+
+std::chrono::nanoseconds getCurrentTimestamp() {
+  return std::chrono::duration_cast<std::chrono::nanoseconds>(
+      std::chrono::steady_clock::now().time_since_epoch());
+}
+
+}  // namespace
+
+TestPatternRenderer::TestPatternRenderer(
+    std::shared_ptr<ANativeWindow> nativeWindow, int fps)
+    : mFps(fps), mNativeWindow(nativeWindow) {
+}
+
+void TestPatternRenderer::start() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mRunning.exchange(true, std::memory_order_relaxed)) {
+    ALOGW("Render thread already started.");
+    return;
+  }
+  mThread =
+      std::thread(&TestPatternRenderer::renderThreadLoop, this, mNativeWindow);
+}
+
+void TestPatternRenderer::stop() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (!mRunning.exchange(false, std::memory_order_relaxed)) {
+    ALOGW("Render thread already stopped.");
+    return;
+  }
+  mThread.detach();
+  mRunning = false;
+}
+
+void TestPatternRenderer::renderThreadLoop(
+    std::shared_ptr<ANativeWindow> nativeWindow) {
+  // Prevent destruction of this instance until the thread terminates.
+  std::shared_ptr<TestPatternRenderer> thiz = shared_from_this();
+
+  ALOGV("Starting test client render loop");
+
+  EglDisplayContext eglDisplayContext(nativeWindow);
+  EglTestPatternProgram testPatternProgram;
+
+  const std::chrono::nanoseconds frameDuration(
+      static_cast<uint64_t>(1e9 / mFps));
+
+  std::chrono::nanoseconds lastFrameTs(0);
+  int frameNumber = 0;
+  while (mRunning) {
+    // Wait for appropriate amount of time to meet configured FPS.
+    std::chrono::nanoseconds ts = getCurrentTimestamp();
+    std::chrono::nanoseconds currentDuration = ts - lastFrameTs;
+    if (currentDuration < frameDuration) {
+      std::this_thread::sleep_for(frameDuration - currentDuration);
+    }
+
+    // Render the test pattern and update timestamp.
+    testPatternProgram.draw(frameNumber++);
+    eglDisplayContext.swapBuffers();
+    lastFrameTs = getCurrentTimestamp();
+  }
+
+  ALOGV("Terminating test client render loop");
+}
+
+VirtualCameraTestInstance::VirtualCameraTestInstance(const int fps)
+    : mFps(fps) {
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamConfigured(
+    const int32_t streamId, const Surface& surface, const int32_t width,
+    const int32_t height, const Format pixelFormat) {
+  ALOGV("%s: streamId %d, %dx%d pixFmt=%s", __func__, streamId, width, height,
+        toString(pixelFormat).c_str());
+
+  std::lock_guard<std::mutex> lock(mLock);
+  mRenderer = std::make_shared<TestPatternRenderer>(
+      nativeWindowFromSurface(surface), mFps);
+  mRenderer->start();
+
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onProcessCaptureRequest(
+    const int32_t /*in_streamId*/, const int32_t /*in_frameId*/) {
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamClosed(const int32_t streamId) {
+  ALOGV("%s: streamId %d", __func__, streamId);
+
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mRenderer != nullptr) {
+    mRenderer->stop();
+    mRenderer.reset();
+  }
+  return ScopedAStatus::ok();
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.h b/services/camera/virtualcamera/VirtualCameraTestInstance.h
new file mode 100644
index 0000000..43e33d5
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+
+#include <atomic>
+#include <condition_variable>
+#include <memory>
+#include <thread>
+
+#include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
+#include "android/native_window.h"
+#include "utils/Mutex.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Wraps render loop run in a dedicated thread, rendering test pattern to
+// provided Surface (a.k.a. native window) at configured FPS.
+class TestPatternRenderer
+    : public std::enable_shared_from_this<TestPatternRenderer> {
+ public:
+  TestPatternRenderer(std::shared_ptr<ANativeWindow> nativeWindow, int fps);
+
+  // Start rendering.
+  void start() EXCLUDES(mLock);
+
+  // Stop rendering.
+  // Call returns immediatelly, render thread might take some time (1 frame)
+  // to finish rendering and terminate the thread.
+  void stop() EXCLUDES(mLock);
+
+ private:
+  // Render thread entry point.
+  void renderThreadLoop(std::shared_ptr<ANativeWindow> nativeWindow);
+
+  const int mFps;
+
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
+  std::mutex mLock;
+  std::atomic_bool mRunning;
+  std::thread mThread GUARDED_BY(mLock);
+};
+
+// VirtualCamera callback implementation for test camera.
+//
+// For every configure call, starts rendering of test pattern on provided surface.
+class VirtualCameraTestInstance
+    : public aidl::android::companion::virtualcamera::BnVirtualCameraCallback {
+ public:
+  explicit VirtualCameraTestInstance(int fps = 30);
+
+  ::ndk::ScopedAStatus onStreamConfigured(
+      int32_t streamId, const ::aidl::android::view::Surface& surface,
+      int32_t width, int32_t height,
+      ::aidl::android::companion::virtualcamera::Format pixelFormat) override
+      EXCLUDES(mLock);
+
+  ::ndk::ScopedAStatus onProcessCaptureRequest(int32_t in_streamId,
+                                               int32_t in_frameId) override;
+
+  ::ndk::ScopedAStatus onStreamClosed(int32_t streamId) override EXCLUDES(mLock);
+
+ private:
+  const int mFps;
+
+  std::mutex mLock;
+  std::shared_ptr<TestPatternRenderer> mRenderer GUARDED_BY(mLock);
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
diff --git a/services/camera/virtualcamera/main.cc b/services/camera/virtualcamera/main.cc
index 43b0219..3db9d9c 100644
--- a/services/camera/virtualcamera/main.cc
+++ b/services/camera/virtualcamera/main.cc
@@ -38,7 +38,7 @@
 }  // namespace
 
 int main() {
-  ALOGI("CameraProvider: virtual webcam service is starting.");
+  ALOGI("virtual_camera service is starting.");
 
   ABinderProcess_setThreadPoolMaxThreadCount(HWBINDER_THREAD_COUNT);
 
diff --git a/services/camera/virtualcamera/tests/EglUtilTest.cc b/services/camera/virtualcamera/tests/EglUtilTest.cc
index 589e312..813be75 100644
--- a/services/camera/virtualcamera/tests/EglUtilTest.cc
+++ b/services/camera/virtualcamera/tests/EglUtilTest.cc
@@ -55,6 +55,11 @@
 };
 
 TEST_F(EglTest, EglTestPatternProgramSuccessfulInit) {
+  if (!isGlExtensionSupported(kGlExtYuvTarget)) {
+    GTEST_SKIP() << "Skipping test because of missing required GL extension "
+                 << kGlExtYuvTarget;
+  }
+
   EglTestPatternProgram eglTestPatternProgram;
 
   // Verify the shaders compiled and linked successfully.
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index 5927b05..50977d8 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -15,6 +15,7 @@
  */
 
 #include <algorithm>
+#include <cstdint>
 #include <cstdio>
 #include <iterator>
 #include <memory>
@@ -75,6 +76,17 @@
 
 const VirtualCameraConfiguration kEmptyVirtualCameraConfiguration;
 
+class MockVirtualCameraCallback : public BnVirtualCameraCallback {
+ public:
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamConfigured,
+              (int32_t, const ::aidl::android::view::Surface&, int, int,
+               ::aidl::android::companion::virtualcamera::Format pixelFormat),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onProcessCaptureRequest, (int32_t, int32_t),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int32_t), (override));
+};
+
 VirtualCameraConfiguration createConfiguration(const int width, const int height,
                                                const Format format,
                                                const int maxFps) {
@@ -85,6 +97,8 @@
                                                   .maxFps = maxFps});
   configuration.sensorOrientation = kSensorOrientation;
   configuration.lensFacing = kLensFacing;
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<MockVirtualCameraCallback>();
   return configuration;
 }
 
@@ -247,6 +261,24 @@
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
 
+TEST_F(VirtualCameraServiceTest,
+       ConfigurationWithoutVirtualCameraCallbackFails) {
+  sp<BBinder> token = sp<BBinder>::make();
+  ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
+  bool aidlRet;
+
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
+  config.virtualCameraCallback = nullptr;
+
+  ASSERT_FALSE(mCameraService
+                   ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+                   .isOk());
+
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
 TEST_F(VirtualCameraServiceTest, ConfigurationWithUnsupportedPixelFormatFails) {
   bool aidlRet;
 
@@ -456,6 +488,23 @@
               Eq(STATUS_BAD_VALUE));
 }
 
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=15"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=1001"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=0"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=foo"),
+              Eq(STATUS_BAD_VALUE));
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.cc b/services/camera/virtualcamera/util/EglDisplayContext.cc
index 6d343a2..166ac75 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.cc
+++ b/services/camera/virtualcamera/util/EglDisplayContext.cc
@@ -30,8 +30,9 @@
 namespace companion {
 namespace virtualcamera {
 
-EglDisplayContext::EglDisplayContext()
+EglDisplayContext::EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow)
     : mEglDisplay(EGL_NO_DISPLAY),
+      mEglSurface(EGL_NO_SURFACE),
       mEglContext(EGL_NO_CONTEXT),
       mEglConfig(nullptr) {
   EGLBoolean result;
@@ -52,8 +53,10 @@
 
   EGLint numConfigs = 0;
   EGLint configAttribs[] = {
-      EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, EGL_RENDERABLE_TYPE,
-      EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8,
+      EGL_SURFACE_TYPE,
+      nativeWindow == nullptr ? EGL_PBUFFER_BIT : EGL_WINDOW_BIT,
+      EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE,
+      8, EGL_BLUE_SIZE, 8,
       // no alpha
       EGL_NONE};
 
@@ -72,6 +75,14 @@
     return;
   }
 
+  if (nativeWindow != nullptr) {
+    mEglSurface = eglCreateWindowSurface(mEglDisplay, mEglConfig,
+                                         nativeWindow.get(), NULL);
+    if (mEglSurface == EGL_NO_SURFACE) {
+      ALOGE("eglCreateWindowSurface error: %#x", eglGetError());
+    }
+  }
+
   if (!makeCurrent()) {
     ALOGE(
         "Failed to set newly initialized EGLContext and EGLDisplay connection "
@@ -82,6 +93,9 @@
 }
 
 EglDisplayContext::~EglDisplayContext() {
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglDestroySurface(mEglDisplay, mEglSurface);
+  }
   if (mEglDisplay != EGL_NO_DISPLAY) {
     eglTerminate(mEglDisplay);
   }
@@ -99,8 +113,14 @@
   return mEglContext != EGL_NO_CONTEXT && mEglDisplay != EGL_NO_DISPLAY;
 }
 
+void EglDisplayContext::swapBuffers() const {
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglSwapBuffers(mEglDisplay, mEglSurface);
+  }
+}
+
 bool EglDisplayContext::makeCurrent() {
-  if (!eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, mEglContext)) {
+  if (!eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
     ALOGE("eglMakeCurrent failed: %#x", eglGetError());
     return false;
   }
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.h b/services/camera/virtualcamera/util/EglDisplayContext.h
index 402ca3c..6dc3080 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.h
+++ b/services/camera/virtualcamera/util/EglDisplayContext.h
@@ -17,7 +17,10 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 
+#include <memory>
+
 #include "EGL/egl.h"
+#include "system/window.h"
 
 namespace android {
 namespace companion {
@@ -30,7 +33,7 @@
 // out of scope.
 class EglDisplayContext {
  public:
-  EglDisplayContext();
+  EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow = nullptr);
   ~EglDisplayContext();
 
   // Sets EGLDisplay & EGLContext for current thread.
@@ -44,8 +47,13 @@
   // EGLDisplay & EGLContext.
   bool isInitialized() const;
 
+  void swapBuffers() const;
+
  private:
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
   EGLDisplay mEglDisplay;
+  EGLSurface mEglSurface;
   EGLContext mEglContext;
   EGLConfig mEglConfig;
 };
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 7554a67..7498fbc 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -35,19 +35,28 @@
 
 constexpr char kGlExtYuvTarget[] = "GL_EXT_YUV_target";
 
-constexpr char kIdentityVertexShader[] = R"(
-    attribute vec4 vPosition;
+constexpr char kJuliaFractalVertexShader[] = R"(#version 300 es
+    in vec4 aPosition;
+    in vec2 aTextureCoord;
+    out vec2 vFractalCoord;
+    out vec2 vUVCoord;
     void main() {
-      gl_Position = vPosition;
+      gl_Position = aPosition;
+      vUVCoord = aTextureCoord;
+      vFractalCoord = vec2(aTextureCoord.x - 0.5, aTextureCoord.y - 0.5) * 4.0;
     })";
 
-constexpr char kJuliaFractalFragmentShader[] = R"(
+constexpr char kJuliaFractalFragmentShader[] = R"(#version 300 es
+    #extension GL_EXT_YUV_target : require
     precision mediump float;
-    uniform vec2 uResolution;
-    uniform vec2 uC;
-    uniform vec2 uUV;
+
     const float kIter = 64.0;
 
+    in vec2 vFractalCoord;
+    in vec2 vUVCoord;
+    out vec4 fragColor;
+    uniform vec2 uC;
+
     vec2 imSq(vec2 n){
       return vec2(pow(n.x,2.0)-pow(n.y,2.0), 2.0*n.x*n.y);
     }
@@ -62,9 +71,8 @@
     }
 
     void main() {
-      vec2 uv = vec2(gl_FragCoord.x / uResolution.x - 0.5, gl_FragCoord.y / uResolution.y - 0.5);
-      float juliaVal = julia(uv * 4.0, uC);
-      gl_FragColor = vec4( juliaVal,uUV.x,uUV.y,0.0);
+      float juliaVal = julia(vFractalCoord, uC);
+      fragColor = vec4(yuv_2_rgb(vec3(juliaVal, vUVCoord.x, vUVCoord.y), itu_601_full_range), 0.0);
     })";
 
 constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
@@ -89,7 +97,7 @@
     })";
 
 constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
-    #extension GL_OES_EGL_image_external : require
+    #extension GL_OES_EGL_image_external_essl3 : require
     #extension GL_EXT_YUV_target : require
     precision mediump float;
     in vec2 vTextureCoord;
@@ -200,17 +208,28 @@
 }
 
 EglTestPatternProgram::EglTestPatternProgram() {
-  if (initialize(kIdentityVertexShader, kJuliaFractalFragmentShader)) {
+  if (initialize(kJuliaFractalVertexShader, kJuliaFractalFragmentShader)) {
     ALOGV("Successfully initialized EGL shaders for test pattern program.");
   } else {
     ALOGE("Test pattern EGL shader program initialization failed.");
   }
+
+  mCHandle = glGetUniformLocation(mProgram, "uC");
+  mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+  mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+
+  // Pass vertex array to draw.
+  glEnableVertexAttribArray(mPositionHandle);
+  // Prepare the triangle coordinate data.
+  glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+                        kSquareCoords.size(), kSquareCoords.data());
+
+  glEnableVertexAttribArray(mTextureCoordHandle);
+  glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+                        kTextureCoords.size(), kTextureCoords.data());
 }
 
-bool EglTestPatternProgram::draw(int width, int height, int frameNumber) {
-  glViewport(0, 0, static_cast<GLsizei>(width), static_cast<GLsizei>(height));
-  checkEglError("glViewport");
-
+bool EglTestPatternProgram::draw(int frameNumber) {
   // Load compiled shader.
   glUseProgram(mProgram);
   checkEglError("glUseProgram");
@@ -219,28 +238,8 @@
   float time = float(frameNumber) / 120.0f;
   const std::complex<float> c(std::sin(time) * 0.78f, std::cos(time) * 0.78f);
 
-  // Pass uniform values to the shader.
-  int resolutionHandle = glGetUniformLocation(mProgram, "uResolution");
-  checkEglError("glGetUniformLocation -> uResolution");
-  glUniform2f(resolutionHandle, static_cast<float>(width),
-              static_cast<float>(height));
-  checkEglError("glUniform2f -> uResolution");
-
   // Pass "C" constant value determining the Julia set to the shader.
-  int cHandle = glGetUniformLocation(mProgram, "uC");
-  glUniform2f(cHandle, c.imag(), c.real());
-
-  // Pass chroma value to the shader.
-  int uvHandle = glGetUniformLocation(mProgram, "uUV");
-  glUniform2f(uvHandle, (c.imag() + 1.f) / 2.f, (c.real() + 1.f) / 2.f);
-
-  // Pass vertex array to draw.
-  int positionHandle = glGetAttribLocation(mProgram, "vPosition");
-  glEnableVertexAttribArray(positionHandle);
-
-  // Prepare the triangle coordinate data.
-  glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
-                        kSquareCoords.size(), kSquareCoords.data());
+  glUniform2f(mCHandle, c.imag(), c.real());
 
   // Draw triangle strip forming a square filling the viewport.
   glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index c695cbb..d09f11e 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -46,7 +46,12 @@
  public:
   EglTestPatternProgram();
 
-  bool draw(int width, int height, int frameNumber);
+  bool draw(int frameNumber);
+
+ private:
+  int mPositionHandle = -1;
+  int mTextureCoordHandle = -1;
+  int mCHandle = -1;
 };
 
 // Shader program to  draw texture.
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 9f26e19..7de5020 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -15,6 +15,7 @@
  */
 
 // #define LOG_NDEBUG 0
+#include "utils/Timers.h"
 #define LOG_TAG "EglSurfaceTexture"
 
 #include <cstdint>
@@ -63,6 +64,11 @@
   return mGlConsumer->getCurrentBuffer();
 }
 
+bool EglSurfaceTexture::waitForNextFrame(const std::chrono::nanoseconds timeout) {
+  return mSurface->waitForNextFrame(mGlConsumer->getFrameNumber(),
+                                    static_cast<nsecs_t>(timeout.count()));
+}
+
 GLuint EglSurfaceTexture::updateTexture() {
   mGlConsumer->updateTexImage();
   return mTextureId;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index faad7c4..b9c5126 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 
+#include <chrono>
 #include <cstdint>
 
 #include "GLES/gl.h"
@@ -51,6 +52,12 @@
   // Get height of surface / texture.
   uint32_t getHeight() const;
 
+  // Wait for next frame to be available in the surface
+  // until timeout.
+  //
+  // Returns false on timeout, true if new frame was received before timeout.
+  bool waitForNextFrame(std::chrono::nanoseconds timeout);
+
   // Update the texture with the most recent submitted buffer.
   // Most be called on thread with EGL context.
   //
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.cc b/services/camera/virtualcamera/util/TestPatternHelper.cc
deleted file mode 100644
index 274996a..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.cc
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-
-#define LOG_TAG "TestPatternHelper"
-
-#include "TestPatternHelper.h"
-
-#include <complex>
-#include <cstdint>
-
-#include "log/log.h"
-#include "nativebase/nativebase.h"
-#include "system/graphics.h"
-#include "ui/GraphicBuffer.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using namespace std::chrono_literals;
-
-static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
-
-uint8_t julia(const std::complex<float> n, const std::complex<float> c) {
-  std::complex<float> z = n;
-  for (int i = 0; i < 64; i++) {
-    z = z * z + c;
-    if (std::abs(z) > 2.0) return i * 4;
-  }
-  return 0xff;
-}
-
-uint8_t pixelToFractal(const int x, const int y, const int width,
-                       const int height, const std::complex<float> c) {
-  std::complex<float> n(float(x) / float(width) - 0.5,
-                        float(y) / float(height) - 0.5);
-  return julia(n * 5.f, c);
-}
-
-void renderTestPatternYcbCr420(const android_ycbcr& ycbr, const int width,
-                               const int height, const int frameNumber) {
-  float time = float(frameNumber) / 120.0f;
-  const std::complex<float> c(std::sin(time), std::cos(time));
-
-  uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
-  uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
-  uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
-
-  for (int row = 0; row < height; row++) {
-    for (int col = 0; col < width; col++) {
-      y[row * ycbr.ystride + col] =
-          pixelToFractal(col, row, width, height, c * 0.78f);
-    }
-  }
-
-  int cWidth = width / 2;
-  int cHeight = height / 2;
-  for (int row = 0; row < cHeight; row++) {
-    for (int col = 0; col < cWidth; col++) {
-      cb[row * ycbr.cstride + col * ycbr.chroma_step] =
-          static_cast<uint8_t>((float(col) / float(cWidth)) * 255.f);
-      cr[row * ycbr.cstride + col * ycbr.chroma_step] =
-          static_cast<uint8_t>((float(row) / float(cHeight)) * 255.f);
-    }
-  }
-}
-
-}  // namespace
-
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
-  if (surface == nullptr) {
-    ALOGE("%s: null surface, skipping render", __func__);
-    return;
-  }
-
-  ANativeWindowBuffer* buffer;
-  int fenceFd;
-  int ret = ANativeWindow_dequeueBuffer(surface.get(), &buffer, &fenceFd);
-  if (ret != NO_ERROR) {
-    ALOGE(
-        "%s: Error while deuqueing buffer from surface, "
-        "ANativeWindow_dequeueBuffer returned %d",
-        __func__, ret);
-    return;
-  }
-
-  if (buffer == nullptr) {
-    ALOGE("%s: ANativeWindowBuffer is null after dequeing", __func__);
-    return;
-  }
-
-  sp<Fence> fence = sp<Fence>::make(fenceFd);
-  if (fence->isValid()) {
-    ret = fence->wait(kAcquireFenceTimeout.count());
-    if (ret != NO_ERROR) {
-      ALOGE("%s: Timeout while waiting for the fence to clear", __func__);
-      ANativeWindow_queueBuffer(surface.get(), buffer, fence->dup());
-      return;
-    }
-  }
-
-  sp<GraphicBuffer> gBuffer = GraphicBuffer::from(buffer);
-  android_ycbcr ycbr;
-
-  ret = gBuffer->lockAsyncYCbCr(GraphicBuffer::USAGE_SW_WRITE_OFTEN, &ycbr,
-                                fence->dup());
-  if (ret != NO_ERROR) {
-    ALOGE("%s: Failed to lock buffer retrieved from surface, ret %d", __func__,
-          ret);
-    return;
-  }
-
-  renderTestPatternYcbCr420(ycbr, gBuffer->getWidth(), gBuffer->getHeight(),
-                            frameNumber);
-
-  ret = gBuffer->unlock();
-  if (ret != NO_ERROR) {
-    ALOGE("%s: Failed to unlock buffer, ret %d", __func__, ret);
-    return;
-  }
-
-  ret = ANativeWindow_queueBuffer(surface.get(), buffer, /*fenceFd=*/-1);
-  if (ret != NO_ERROR) {
-    ALOGE(
-        "%s: Error while queing buffer to surface, ANativeWindow_queueBuffer "
-        "returned %d",
-        __func__, ret);
-    return;
-  }
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.h b/services/camera/virtualcamera/util/TestPatternHelper.h
deleted file mode 100644
index f842b29..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-
-#include "gui/Surface.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Helper function for rendering test pattern into Surface.
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber);
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
 
 #include <string>
 #include <unordered_map>
+#include <vector>
 
 namespace android::mediametrics::types {
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index adbfc21..4e46bbf 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -22,6 +22,7 @@
 #include <iostream>
 #include <mutex>
 
+#include <com_android_media_aaudio.h>
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -219,7 +220,7 @@
     return closeAndClear();
 }
 
-aaudio_result_t AAudioServiceStreamBase::startDevice() {
+aaudio_result_t AAudioServiceStreamBase::startDevice_l() {
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
@@ -274,7 +275,7 @@
     mAtomicStreamTimestamp.clear();
 
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
-    result = startDevice();
+    result = startDevice_l();
     if (result != AAUDIO_OK) goto error;
 
     // This should happen at the end of the start.
@@ -520,6 +521,18 @@
                                                        : exitStandby_l(param->mParcelable);
                     standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
                 } break;
+                case START_CLIENT: {
+                    auto param = (StartClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : startClient_l(param->mClient,
+                                                                       param->mAttr,
+                                                                       param->mClientHandle);
+                } break;
+                case STOP_CLIENT: {
+                    auto param = (StopClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : stopClient_l(param->mClientHandle);
+                } break;
                 default:
                     ALOGE("Invalid command op code: %d", command->operationCode);
                     break;
@@ -732,6 +745,26 @@
     return mCommandQueue.sendCommand(command);
 }
 
+aaudio_result_t AAudioServiceStreamBase::sendStartClientCommand(const android::AudioClient &client,
+                                                                const audio_attributes_t *attr,
+                                                                audio_port_handle_t *clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            START_CLIENT,
+            std::make_shared<StartClientParam>(client, attr, clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
+aaudio_result_t AAudioServiceStreamBase::sendStopClientCommand(audio_port_handle_t clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            STOP_CLIENT,
+            std::make_shared<StopClientParam>(clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 96a6d44..8057f87 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -279,7 +279,7 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    virtual aaudio_result_t startDevice();
+    virtual aaudio_result_t startDevice_l() REQUIRES(mLock);
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command)
             EXCLUDES(mUpMessageQueueLock);
@@ -288,6 +288,12 @@
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
+    aaudio_result_t sendStartClientCommand(const android::AudioClient& client,
+                                           const audio_attributes_t *attr,
+                                           audio_port_handle_t *clientHandle) EXCLUDES(mLock);
+
+    aaudio_result_t sendStopClientCommand(audio_port_handle_t clientHandle) EXCLUDES(mLock);
+
     /**
      * @param positionFrames
      * @param timeNanos
@@ -342,6 +348,40 @@
     }
     virtual void reportData_l() REQUIRES(mLock) { return; }
 
+    class StartClientParam : public AAudioCommandParam {
+    public:
+        StartClientParam(const android::AudioClient& client, const audio_attributes_t* attr,
+                         audio_port_handle_t* clientHandle)
+                : AAudioCommandParam(), mClient(client), mAttr(attr), mClientHandle(clientHandle) {
+        }
+        ~StartClientParam() override = default;
+
+        android::AudioClient mClient;
+        const audio_attributes_t* mAttr;
+        audio_port_handle_t* mClientHandle;
+    };
+    virtual aaudio_result_t startClient_l(
+            const android::AudioClient& client,
+            const audio_attributes_t *attr __unused,
+            audio_port_handle_t *clientHandle __unused) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::startClient_l(%p, ...) AAUDIO_ERROR_UNAVAILABLE", &client);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    class StopClientParam : public AAudioCommandParam {
+    public:
+        explicit StopClientParam(audio_port_handle_t clientHandle)
+                : AAudioCommandParam(), mClientHandle(clientHandle) {
+        }
+        ~StopClientParam() override = default;
+
+        audio_port_handle_t mClientHandle;
+    };
+    virtual aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::stopClient(%d) AAUDIO_ERROR_UNAVAILABLE", clientHandle);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -358,6 +398,8 @@
         UNREGISTER_AUDIO_THREAD,
         GET_DESCRIPTION,
         EXIT_STANDBY,
+        START_CLIENT,
+        STOP_CLIENT,
     };
     AAudioThread            mCommandThread;
     std::atomic_bool        mThreadEnabled{false};
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 89f6e33..fc53949 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -24,6 +24,7 @@
 #include <iostream>
 #include <stdint.h>
 
+#include <com_android_media_aaudio.h>
 #include <utils/String16.h>
 #include <media/nbaio/AudioStreamOutSink.h>
 #include <media/MmapStreamInterface.h>
@@ -83,11 +84,12 @@
 }
 
 // Start the flow of data.
-aaudio_result_t AAudioServiceStreamMMAP::startDevice() {
-    aaudio_result_t result = AAudioServiceStreamBase::startDevice();
+aaudio_result_t AAudioServiceStreamMMAP::startDevice_l() {
+    aaudio_result_t result = AAudioServiceStreamBase::startDevice_l();
     if (!mInService && result == AAUDIO_OK) {
         // Note that this can sometimes take 200 to 300 msec for a cold start!
-        result = startClient(mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
+        result = startClient_l(
+                mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
     }
     return result;
 }
@@ -100,7 +102,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::pause_l();
     // TODO put before base::pause()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -112,7 +114,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::stop_l();
     // TODO put before base::stop()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -149,6 +151,37 @@
 aaudio_result_t AAudioServiceStreamMMAP::startClient(const android::AudioClient& client,
                                                      const audio_attributes_t *attr,
                                                      audio_port_handle_t *clientHandle) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStartClientCommand(client, attr, clientHandle);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        // Start the client on behalf of the application. Generate a new porthandle.
+        aaudio_result_t result = endpoint->startClient(client, attr, clientHandle);
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStopClientCommand(clientHandle);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        aaudio_result_t result = endpoint->stopClient(clientHandle);
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::startClient_l(const android::AudioClient& client,
+                                                       const audio_attributes_t *attr,
+                                                       audio_port_handle_t *clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -159,7 +192,7 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+aaudio_result_t AAudioServiceStreamMMAP::stopClient_l(audio_port_handle_t clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 42032d7..f4ce83d 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -93,7 +93,13 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    aaudio_result_t startDevice() override;
+    aaudio_result_t startDevice_l() REQUIRES(mLock) override;
+
+    aaudio_result_t startClient_l(const android::AudioClient& client,
+                                  const audio_attributes_t *attr,
+                                  audio_port_handle_t *clientHandle) REQUIRES(mLock) override;
+
+    aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) override;
 
 private: