Merge "Enable global effect on Spatializer thread" into main
diff --git a/cmds/screenrecord/FrameOutput.cpp b/cmds/screenrecord/FrameOutput.cpp
index ee7ace6..6388518 100644
--- a/cmds/screenrecord/FrameOutput.cpp
+++ b/cmds/screenrecord/FrameOutput.cpp
@@ -15,11 +15,14 @@
  */
 
 #define LOG_TAG "ScreenRecord"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
 
+//#define LOG_NDEBUG 0
+
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <GLES2/gl2.h>
 #include <GLES2/gl2ext.h>
+#include <utils/Log.h>
 
 #include "FrameOutput.h"
 
@@ -67,11 +70,17 @@
         return UNKNOWN_ERROR;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mGlConsumer = new GLConsumer(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
+                                 /*isControlledByApp=*/false);
+    auto producer = mGlConsumer->getSurface()->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
     mGlConsumer = new GLConsumer(consumer, mExtTextureName,
                 GL_TEXTURE_EXTERNAL_OES, true, false);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mGlConsumer->setName(String8("virtual display"));
     mGlConsumer->setDefaultBufferSize(width, height);
     producer->setMaxDequeuedBufferCount(4);
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index a19ef8e..727f16a 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -172,10 +172,16 @@
         return UNKNOWN_ERROR;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mGlConsumer = new GLConsumer(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
+                                 /*isControlledByApp=*/false);
+    mProducer = mGlConsumer->getSurface()->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&mProducer, &consumer);
     mGlConsumer = new GLConsumer(consumer, mExtTextureName,
                 GL_TEXTURE_EXTERNAL_OES, true, false);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mGlConsumer->setName(String8("virtual display"));
     mGlConsumer->setDefaultBufferSize(width, height);
     mProducer->setMaxDequeuedBufferCount(4);
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 03c765a..de925b8 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -362,13 +362,26 @@
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
         sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
-    static const std::string kDisplayName("ScreenRecorder");
+    std::string displayName = gPhysicalDisplayId
+      ? "ScreenRecorder " + to_string(*gPhysicalDisplayId)
+      : "ScreenRecorder";
+    static const std::string kDisplayName(displayName);
+
     sp<IBinder> dpy = SurfaceComposerClient::createVirtualDisplay(kDisplayName, gSecureDisplay);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
+
+    // ensures that random layer stack assigned to virtual display changes
+    // between calls - if a list of displays with their layer stacks becomes
+    // available, we should use it to ensure a new layer stack is used here
+    std::srand(
+      std::chrono::duration_cast<std::chrono::milliseconds>(
+        std::chrono::system_clock::now().time_since_epoch()
+       ).count());
     ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
     t.setDisplayLayerStack(dpy, layerStack);
+
     PhysicalDisplayId displayId;
     status_t err = getPhysicalDisplayId(displayId);
     if (err != NO_ERROR) {
@@ -1224,6 +1237,8 @@
         "    see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
         "--verbose\n"
         "    Display interesting information on stdout.\n"
+        "--version\n"
+        "    Show Android screenrecord version.\n"
         "--help\n"
         "    Show this message.\n"
         "\n"
@@ -1255,6 +1270,7 @@
         { "bframes",            required_argument,  NULL, 'B' },
         { "display-id",         required_argument,  NULL, 'd' },
         { "capture-secure",     no_argument,        NULL, 'S' },
+        { "version",            no_argument,        NULL, 'x' },
         { NULL,                 0,                  NULL, 0 }
     };
 
@@ -1377,6 +1393,9 @@
         case 'S':
             gSecureDisplay = true;
             break;
+        case 'x':
+            fprintf(stderr, "%d.%d\n", kVersionMajor, kVersionMinor);
+            return 0;
         default:
             if (ic != '?') {
                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/screenrecord/screenrecord.h b/cmds/screenrecord/screenrecord.h
index cec7c13..57826b0 100644
--- a/cmds/screenrecord/screenrecord.h
+++ b/cmds/screenrecord/screenrecord.h
@@ -18,6 +18,6 @@
 #define SCREENRECORD_SCREENRECORD_H
 
 #define kVersionMajor 1
-#define kVersionMinor 3
+#define kVersionMinor 4
 
 #endif /*SCREENRECORD_SCREENRECORD_H*/
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index f26e3a8..1a6e5e8 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -60,6 +60,7 @@
 
 #include <private/media/VideoFrame.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/GLConsumer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
@@ -1133,7 +1134,12 @@
             CHECK(gSurface != NULL);
         } else {
             CHECK(useSurfaceTexAlloc);
-
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+            sp<GLConsumer> texture =
+                    new GLConsumer(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
+                                   true /* useFenceSync */, false /* isControlledByApp */);
+            gSurface = texture->getSurface();
+#else
             sp<IGraphicBufferProducer> producer;
             sp<IGraphicBufferConsumer> consumer;
             BufferQueue::createBufferQueue(&producer, &consumer);
@@ -1141,6 +1147,7 @@
                     GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
                     false /* isControlledByApp */);
             gSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         }
     }
 
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index b1a01e4..88f0571 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -14,6 +14,7 @@
     ],
     static_libs: [
         "android.hardware.common-V2-ndk",
+        "android.hardware.drm.common-V1-ndk",
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
@@ -39,6 +40,7 @@
     ],
     static_libs: [
         "android.hardware.common-V2-ndk",
+        "android.hardware.drm.common-V1-ndk",
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 8e8e57d..1bb3da6 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -40,6 +40,7 @@
 
     static_libs: [
         "android.hardware.common-V2-ndk",
+        "android.hardware.drm.common-V1-ndk",
         "android.hardware.drm-V1-ndk",
         "libbase",
         "libclearkeybase",
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index d662585..362e98e 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -133,3 +133,10 @@
   description: "Feature flag to track teamfood population"
   bug: "328770262"
 }
+
+flag {
+  name: "thumbnail_block_model"
+  namespace: "codec_fwk"
+  description: "Feature flag for using block model decoder in thumbnail generation"
+  bug: "329521645"
+}
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 7ab616a..5f7260d 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -197,7 +197,8 @@
 
 // static
 ConversionResult<SourceMetadata>
-legacy2aidl_SourceMetadata(const std::vector<playback_track_metadata_v7_t>& legacy) {
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+        const std::vector<playback_track_metadata_v7_t>& legacy) {
     SourceMetadata aidl;
     aidl.tracks = VALUE_OR_RETURN(
             convertContainer<std::vector<PlaybackTrackMetadata>>(
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index b5888b3..b8a3110 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -58,7 +58,8 @@
 legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
 
 ConversionResult<hardware::audio::common::SourceMetadata>
-legacy2aidl_SourceMetadata(const std::vector<playback_track_metadata_v7_t>& legacy);
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+        const std::vector<playback_track_metadata_v7_t>& legacy);
 
 }  // namespace android
 }  // namespace aidl
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index e1193cc..bdfc409 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -34,7 +34,7 @@
 
 c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
     std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
-    if (bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
+    if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
         return C2_BAD_VALUE;
     }
     if (__builtin_available(android __ANDROID_API_T__, *)) {
@@ -824,6 +824,10 @@
     std::shared_ptr<BufferCache> cache;
     int slotId;
     sp<Fence> rFence;
+    if (mStopped.load() == true) {
+        ALOGE("cannot deallocate due to being stopped");
+        return C2_BAD_STATE;
+    }
     c2_status_t res = requestDeallocate(bid, fence, &completed, &updateDequeue,
                                         &cache, &slotId, &rFence);
     if (res != C2_OK) {
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index ddb9855..108f0a6 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -96,6 +96,7 @@
     uint64_t mBqId;
     int32_t mMaxDequeueBufferCount;
     std::shared_ptr<int> mOwner;
+    std::shared_ptr<int> mConsumerAttachCount;
     // To migrate existing buffers
     sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 54d78a0..2eb381b 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -261,6 +261,7 @@
         mGeneration = generation;
         mBqId = bqId;
         mOwner = std::make_shared<int>(0);
+        mConsumerAttachCount = std::make_shared<int>(0);
         mMaxDequeueBufferCount = maxDequeueBufferCount;
         if (igbp == nullptr) {
             return false;
@@ -522,6 +523,7 @@
     std::shared_ptr<C2SurfaceSyncMemory> syncMem;
     sp<IGraphicBufferProducer> outputIgbp;
     uint32_t outputGeneration = 0;
+    std::shared_ptr<int> consumerAttachCount;
     {
         std::unique_lock<std::mutex> l(mMutex);
         if (mStopped) {
@@ -529,6 +531,7 @@
         }
         outputIgbp = mIgbp;
         outputGeneration = mGeneration;
+        consumerAttachCount = mConsumerAttachCount;
         syncMem = mSyncMem;
     }
 
@@ -536,15 +539,42 @@
         auto syncVar = syncMem ? syncMem->mem() : nullptr;
         if (syncVar) {
             syncVar->lock();
-            syncVar->notifyQueuedLocked();
+            if (consumerAttachCount && *consumerAttachCount > 0) {
+                (*consumerAttachCount)--;
+            } else {
+                syncVar->notifyQueuedLocked();
+            }
             syncVar->unlock();
         }
     }
 }
 
 void OutputBufferQueue::onBufferAttached(uint32_t generation) {
-    // TODO
-    (void) generation;
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    sp<IGraphicBufferProducer> outputIgbp;
+    uint32_t outputGeneration = 0;
+    std::shared_ptr<int> consumerAttachCount;
+    {
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mStopped) {
+            return;
+        }
+        outputIgbp = mIgbp;
+        outputGeneration = mGeneration;
+        consumerAttachCount = mConsumerAttachCount;
+        syncMem = mSyncMem;
+    }
+
+    if (outputIgbp && generation == outputGeneration) {
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            if (consumerAttachCount) {
+                (*consumerAttachCount)++;
+            }
+            syncVar->unlock();
+        }
+    }
 }
 
 void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 4a956f5..90d1874 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -31,6 +31,7 @@
 #include <C2Debug.h>
 #include <codec2/common/HalSelection.h>
 #include <codec2/hidl/client.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferQueue.h>
 #include <gui/IConsumerListener.h>
 #include <gui/IProducerListener.h>
@@ -423,7 +424,6 @@
                       surfaceMode_t surfMode) {
     using namespace android;
     sp<IGraphicBufferProducer> producer = nullptr;
-    sp<IGraphicBufferConsumer> consumer = nullptr;
     sp<GLConsumer> texture = nullptr;
     sp<ANativeWindow> surface = nullptr;
     static std::atomic_uint32_t surfaceGeneration{0};
@@ -442,6 +442,16 @@
     }
 
     if (surfMode == SURFACE) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        texture = new GLConsumer(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
+                                 false /* isControlledByApp */);
+        sp<Surface> s = texture->getSurface();
+        surface = s;
+        ASSERT_NE(surface, nullptr) << "failed to create Surface object";
+
+        producer = s->getIGraphicBufferProducer();
+#else
+        sp<IGraphicBufferConsumer> consumer = nullptr;
         BufferQueue::createBufferQueue(&producer, &consumer);
         ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
         ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
@@ -452,6 +462,7 @@
 
         surface = new Surface(producer);
         ASSERT_NE(surface, nullptr) << "failed to create Surface object";
+#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         producer->setGenerationNumber(generation);
     }
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 373f86e..68f1dda 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -96,11 +96,14 @@
 
 public:
     static sp<CCodecWatchdog> getInstance() {
-        static sp<CCodecWatchdog> instance(new CCodecWatchdog);
-        static std::once_flag flag;
-        // Call Init() only once.
-        std::call_once(flag, Init, instance);
-        return instance;
+        static sp<CCodecWatchdog> sInstance = [] {
+            sp<CCodecWatchdog> instance = new CCodecWatchdog;
+            // the instance should never get destructed
+            instance->incStrong((void *)CCodecWatchdog::getInstance);
+            instance->init();
+            return instance;
+        }();
+        return sInstance;
     }
 
     ~CCodecWatchdog() = default;
@@ -146,11 +149,11 @@
 private:
     CCodecWatchdog() : mLooper(new ALooper) {}
 
-    static void Init(const sp<CCodecWatchdog> &thiz) {
-        ALOGV("Init");
-        thiz->mLooper->setName("CCodecWatchdog");
-        thiz->mLooper->registerHandler(thiz);
-        thiz->mLooper->start();
+    void init() {
+        ALOGV("init");
+        mLooper->setName("CCodecWatchdog");
+        mLooper->registerHandler(this);
+        mLooper->start();
     }
 
     sp<ALooper> mLooper;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 36725ec..a943626 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1896,7 +1896,8 @@
                 std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
                 char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
                 strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
-                char* box = strtok(mutableStrQpOffsetRects, ";");
+                char* savePtr;
+                char* box = strtok_r(mutableStrQpOffsetRects, ";", &savePtr);
                 while (box != nullptr) {
                     int top, left, bottom, right, offset;
                     if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
@@ -1914,7 +1915,7 @@
                     } else {
                         ALOGE("Rects configuration %s doesn't follow the string pattern.", box);
                     }
-                    box = strtok(nullptr, ";");
+                    box = strtok_r(nullptr, ";", &savePtr);
                 }
                 if (c2QpOffsetRects.size() != 0) {
                     const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 7b9b80d..bff953d 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -1311,8 +1311,7 @@
                 for (size_t planeIx = 0; planeIx < mLayout.numPlanes; ++planeIx) {
                     const uint32_t colSampling = mLayout.planes[planeIx].colSampling;
                     const uint32_t rowSampling = mLayout.planes[planeIx].rowSampling;
-                    if (crop.left % colSampling || crop.right() % colSampling
-                            || crop.top % rowSampling || crop.bottom() % rowSampling) {
+                    if (crop.left % colSampling || crop.top % rowSampling) {
                         // cannot calculate data pointer
                         mImpl->getAllocation()->unmap(mData, crop, nullptr);
                         memset(&mLayout, 0, sizeof(mLayout));
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index f729e1b..7f5a165 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -256,6 +256,13 @@
 
     mTracker.reset(new RecordingActivityTracker());
 
+    sp<IBinder> binder = defaultServiceManager()->checkService(String16("audio"));
+    if (binder != nullptr) {
+        // Barrier to ensure runtime permission update propagates to audioflinger
+        // Must be client-side
+        interface_cast<IAudioManager>(binder)->permissionUpdateBarrier();
+    }
+
     mSelectedDeviceId = selectedDeviceId;
     mSelectedMicDirection = selectedMicDirection;
     mSelectedMicFieldDimension = microphoneFieldDimension;
@@ -680,6 +687,17 @@
     AutoMutex lock(mLock);
     ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
             __func__, mPortId, deviceId, mSelectedDeviceId);
+    const int64_t beginNs = systemTime();
+    mediametrics::Defer defer([&] {
+        mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_CALLERNAME,
+                     mCallerName.empty()
+                     ? AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN
+                     : mCallerName.c_str())
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPREFERREDDEVICE)
+                .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+                .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)deviceId)
+                .record(); });
 
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 3602e94..55f74e1 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -322,6 +322,18 @@
     return NO_ERROR;
 }
 
+status_t AudioSystem::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+    const sp<IAudioFlinger> af = get_audio_flinger();
+    if (af == 0) return PERMISSION_DENIED;
+    std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(
+                    portIds, legacy2aidl_audio_port_handle_t_int32_t));
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    af->setPortsVolume(portIdsAidl, volume, outputAidl);
+    return NO_ERROR;
+}
+
 status_t AudioSystem::setMode(audio_mode_t mode) {
     if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
     const sp<IAudioFlinger> af = get_audio_flinger();
@@ -1081,7 +1093,8 @@
                                        audio_port_handle_t* portId,
                                        std::vector<audio_io_handle_t>* secondaryOutputs,
                                        bool *isSpatialized,
-                                       bool *isBitPerfect) {
+                                       bool *isBitPerfect,
+                                       float *volume) {
     if (attr == nullptr) {
         ALOGE("%s NULL audio attributes", __func__);
         return BAD_VALUE;
@@ -1147,6 +1160,7 @@
     *isBitPerfect = responseAidl.isBitPerfect;
     *attr = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioAttributes_audio_attributes_t(responseAidl.attr));
+    *volume = responseAidl.volume;
 
     return OK;
 }
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 161c4d5..336af36 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1676,6 +1676,18 @@
     AutoMutex lock(mLock);
     ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
             __func__, mPortId, deviceId, mSelectedDeviceId);
+    const int64_t beginNs = systemTime();
+    mediametrics::Defer defer([&] {
+        mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_CALLERNAME,
+                     mCallerName.empty()
+                     ? AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN
+                     : mCallerName.c_str())
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPREFERREDDEVICE)
+                .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+                .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)deviceId)
+                .record(); });
+
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index e0dca2d..9241973 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -350,6 +350,15 @@
     return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
 }
 
+status_t AudioFlingerClientAdapter::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+    std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(
+                    portIds, legacy2aidl_audio_port_handle_t_int32_t));
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    return statusTFromBinderStatus(mDelegate->setPortsVolume(portIdsAidl, volume, outputAidl));
+}
+
 status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
     AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
     return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
@@ -1012,6 +1021,16 @@
     return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
 }
 
+Status AudioFlingerServerAdapter::setPortsVolume(
+        const std::vector<int32_t>& portIds, float volume, int32_t output) {
+    std::vector<audio_port_handle_t> portIdsLegacy = VALUE_OR_RETURN_BINDER(
+            convertContainer<std::vector<audio_port_handle_t>>(
+                    portIds, aidl2legacy_int32_t_audio_port_handle_t));
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    return Status::fromStatusT(mDelegate->setPortsVolume(portIdsLegacy, volume, outputLegacy));
+}
+
 Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
     audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
     return Status::fromStatusT(mDelegate->setMode(modeLegacy));
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index b814b85..4b26d5b 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -39,4 +39,6 @@
     boolean isBitPerfect;
     /** The corrected audio attributes. **/
     AudioAttributes attr;
+    /** initial port volume for the new audio track */
+    float volume;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 29de9c2..1c825bc 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -100,6 +100,13 @@
     void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
     void setStreamMute(AudioStreamType stream, boolean muted);
 
+    /*
+     * Set AudioTrack port ids volume attribute. This is the new way of controlling volume from
+     * AudioPolicyManager to AudioFlinger.
+     */
+    void setPortsVolume(in int[] /* audio_port_handle_t[] */ portIds, float volume,
+            int /* audio_io_handle_t */ output);
+
     // set audio mode.
     void setMode(AudioMode mode);
 
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..c1e1de5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8e49acd
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..a8ffcae
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..7c25f6e
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index 4c94974..710a656 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -26,6 +26,7 @@
 #include <android/content/AttributionSourceState.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <com_android_media_audioserver.h>
 #include <media/AidlConversion.h>
 #include <media/AudioEffect.h>
 #include <media/AudioRecord.h>
@@ -41,6 +42,8 @@
 constexpr int32_t kMaxSampleRateHz = 192000;
 constexpr int32_t kSampleRateUnspecified = 0;
 
+namespace audioserver_flags = com::android::media::audioserver;
+
 using namespace std;
 using namespace android;
 
@@ -501,13 +504,19 @@
     AudioSystem::getMasterMute(&state);
     AudioSystem::isMicrophoneMuted(&state);
 
-    audio_stream_type_t stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::setStreamMute(getValue(&mFdp, kStreamtypes), mFdp.ConsumeBool());
+    audio_stream_type_t stream ;
+    if (!audioserver_flags::portid_volume_management()) {
+        stream = getValue(&mFdp, kStreamtypes);
+        AudioSystem::setStreamMute(getValue(&mFdp, kStreamtypes), mFdp.ConsumeBool());
 
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
-                                 mFdp.ConsumeIntegral<int32_t>());
-
+        stream = getValue(&mFdp, kStreamtypes);
+        AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
+                                     mFdp.ConsumeIntegral<int32_t>());
+    } else {
+        std::vector <audio_port_handle_t> portsForVolumeChange{};
+        AudioSystem::setPortsVolume(portsForVolumeChange, mFdp.ConsumeFloatingPoint<float>(),
+                                    mFdp.ConsumeIntegral<int32_t>());
+    }
     audio_mode_t mode = getValue(&mFdp, kModes);
     AudioSystem::setMode(mode);
 
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 67b3dcd..40e5673 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -131,6 +131,16 @@
     // mute/unmute stream
     static status_t setStreamMute(audio_stream_type_t stream, bool mute);
 
+    /**
+     * Set volume for given AudioTrack port ids on specified output
+     * @param portIds to consider
+     * @param volume to set
+     * @param output to consider
+     * @return NO_ERROR if successful
+     */
+    static status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds,
+                                   float volume, audio_io_handle_t output);
+
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
 
@@ -334,7 +344,8 @@
                                      audio_port_handle_t *portId,
                                      std::vector<audio_io_handle_t> *secondaryOutputs,
                                      bool *isSpatialized,
-                                     bool *isBitPerfect);
+                                     bool *isBitPerfect,
+                                     float *volume);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 667e9ae..a5f3217 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -229,6 +229,16 @@
                                     audio_io_handle_t output) = 0;
     virtual     status_t    setStreamMute(audio_stream_type_t stream, bool muted) = 0;
 
+    /**
+     * Set volume for given AudioTrack port ids on specified output
+     * @param portIds to consider
+     * @param volume to set
+     * @param output to consider
+     * @return NO_ERROR if successful
+     */
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+            audio_io_handle_t output) = 0;
+
     // set audio mode
     virtual     status_t    setMode(audio_mode_t mode) = 0;
 
@@ -420,6 +430,8 @@
     status_t setStreamVolume(audio_stream_type_t stream, float value,
                              audio_io_handle_t output) override;
     status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+            audio_io_handle_t output) override;
     status_t setMode(audio_mode_t mode) override;
     status_t setMicMute(bool state) override;
     bool getMicMute() const override;
@@ -542,6 +554,7 @@
             MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
             SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
             SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
+            SET_PORTS_VOLUME = media::BnAudioFlingerService::TRANSACTION_setPortsVolume,
             SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
             SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
             GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
@@ -664,6 +677,8 @@
     Status setStreamVolume(media::audio::common::AudioStreamType stream,
                            float value, int32_t output) override;
     Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
+    Status setPortsVolume(const std::vector<int32_t>& portIds, float volume, int32_t output)
+            override;
     Status setMode(media::audio::common::AudioMode mode) override;
     Status setMicMute(bool state) override;
     Status getMicMute(bool* _aidl_return) override;
diff --git a/media/libaudiohal/impl/AidlUtils.h b/media/libaudiohal/impl/AidlUtils.h
index fcd7a01..97a5bba 100644
--- a/media/libaudiohal/impl/AidlUtils.h
+++ b/media/libaudiohal/impl/AidlUtils.h
@@ -25,32 +25,6 @@
 
 namespace android {
 
-/*
- * Helper macro to add instance name, function name in logs
- * classes should provide getInstanceName and getClassName API to use these macros.
- * print function names along with instance name.
- *
- * Usage:
- *  AUGMENT_LOG(I, "hello!");
- *  AUGMENT_LOG(W, "value: %d", value);
- *
- * AUGMENT_LOG_IF(D, value < 0, "negative");
- * AUGMENT_LOG_IF(E, value < 0, "bad value: %d", value);
- */
-
-#define AUGMENT_LOG(level, ...)                                                              \
-    ALOG##level("[%s] %s: " __android_second(0, __VA_ARGS__, ""), getInstanceName().c_str(), \
-                __func__ __android_rest(__VA_ARGS__))
-
-#define AUGMENT_LOG_IF(level, cond, ...)                                     \
-    ALOG##level##_IF(cond, "[%s] %s: " __android_second(0, __VA_ARGS__, ""), \
-                     getInstanceName().c_str(), __func__ __android_rest(__VA_ARGS__))
-// Used to register an entry into the function
-#define LOG_ENTRY() ALOGD("[%s] %s", getInstanceName().c_str(), __func__)
-
-// entry logging as verbose level
-#define LOG_ENTRY_V() ALOGV("[%s] %s", getInstanceName().c_str(), __func__)
-
 class HalDeathHandler {
   public:
     static HalDeathHandler& getInstance();
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
index 2d19bee..fe00fb2 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.h
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -32,6 +32,28 @@
 
 namespace android {
 
+/*
+ * Helper macro to add instance name, function name in logs
+ * classes should provide getInstanceName API to use these macros.
+ * print function names along with instance name.
+ *
+ * Usage:
+ *  AUGMENT_LOG(D);
+ *  AUGMENT_LOG(I, "hello!");
+ *  AUGMENT_LOG(W, "value: %d", value);
+ *
+ *  AUGMENT_LOG_IF(D, value < 0, "negative");
+ *  AUGMENT_LOG_IF(E, value < 0, "bad value: %d", value);
+ */
+
+#define AUGMENT_LOG(level, ...)                                                  \
+    ALOG##level("[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+                getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
+#define AUGMENT_LOG_IF(level, cond, ...)                                                    \
+    ALOG##level##_IF(cond, "[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+                     getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
 class Args {
   public:
     explicit Args(const Vector<String16>& args)
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 86e2ca5..3cc923d 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -141,7 +141,7 @@
 }
 
 status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
@@ -165,7 +165,7 @@
 }
 
 status_t DeviceHalAidl::initCheck() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     std::lock_guard l(mLock);
@@ -197,7 +197,7 @@
 }
 
 status_t DeviceHalAidl::getMasterVolume(float *volume) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (volume == nullptr) {
@@ -228,7 +228,7 @@
 }
 
 status_t DeviceHalAidl::getMicMute(bool *state) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -248,7 +248,7 @@
 }
 
 status_t DeviceHalAidl::getMasterMute(bool *state) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -307,7 +307,7 @@
 }
 
 status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -463,6 +463,7 @@
         sp<StreamOutHalInterface>* outStream,
         const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
+
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (outStream == nullptr || config == nullptr) {
@@ -479,7 +480,7 @@
     int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
     SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_SourceMetadata(sourceMetadata));
+            ::aidl::android::legacy2aidl_playback_track_metadata_v7_SourceMetadata(sourceMetadata));
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
     AudioPatch aidlPatch;
@@ -612,7 +613,7 @@
 }
 
 status_t DeviceHalAidl::supportsAudioPatches(bool* supportsPatches) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (supportsPatches == nullptr) {
         AUGMENT_LOG(E, "uninitialized supportsPatches");
@@ -728,7 +729,7 @@
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
         AUGMENT_LOG(E, "port not initialized");
@@ -741,7 +742,7 @@
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -773,7 +774,7 @@
 
 status_t DeviceHalAidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
                                         struct audio_port_v7 *mixPort) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
 
@@ -797,7 +798,7 @@
 }
 
 status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -816,7 +817,7 @@
 }
 
 MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT({});
@@ -843,7 +844,7 @@
 
 status_t DeviceHalAidl::getMicrophones(
         std::vector<audio_microphone_characteristic_t>* microphones) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -867,7 +868,7 @@
 
 status_t DeviceHalAidl::addDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -900,7 +901,7 @@
 }
 status_t DeviceHalAidl::removeDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (device == nullptr || effect == nullptr) {
@@ -932,7 +933,7 @@
 status_t DeviceHalAidl::getMmapPolicyInfos(
         media::audio::common::AudioMMapPolicyType policyType,
         std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -954,7 +955,7 @@
 }
 
 int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -966,7 +967,7 @@
 }
 
 int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -978,7 +979,7 @@
 }
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -997,7 +998,7 @@
 }
 
 status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
@@ -1009,7 +1010,7 @@
 
 status_t DeviceHalAidl::getSoundDoseInterface([[maybe_unused]] const std::string& module,
                                               ::ndk::SpAIBinder* soundDoseBinder) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
 
     if (soundDoseBinder == nullptr) {
@@ -1031,7 +1032,7 @@
 }
 
 status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
@@ -1070,7 +1071,7 @@
 }
 
 status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
@@ -1102,7 +1103,7 @@
 }
 
 status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index b48c7ed..0a262e4 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -280,11 +280,9 @@
     ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
 #endif
 
-    if (status_t status = CoreUtils::sourceMetadataFromHalV7(
-                sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata);
-            status != OK) {
-        return status;
-    }
+    RETURN_STATUS_IF_ERROR(CoreUtils::sourceMetadataFromHalV7(
+            sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata
+            ));
 
 #if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
     //TODO: b/193496180 use spatializer flag at audio HAL when available
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 99e2c66..63be1bc 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -91,7 +91,7 @@
                            mContext.getBufferDurationMs(mConfig.sample_rate))
                   * NANOS_PER_MILLISECOND)
 {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     {
         std::lock_guard l(mLock);
         mLastReply.latencyMs = nominalLatency;
@@ -106,7 +106,7 @@
 }
 
 StreamHalAidl::~StreamHalAidl() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     if (mStream != nullptr) {
         ndk::ScopedAStatus status = mStream->close();
         AUGMENT_LOG_IF(E, !status.isOk(), "status %s", status.getDescription().c_str());
@@ -114,7 +114,7 @@
 }
 
 status_t StreamHalAidl::getBufferSize(size_t *size) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     if (size == nullptr) {
         return BAD_VALUE;
     }
@@ -128,7 +128,7 @@
 }
 
 status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     if (configBase == nullptr) {
         return BAD_VALUE;
     }
@@ -138,7 +138,7 @@
 }
 
 status_t StreamHalAidl::setParameters(const String8& kvPairs) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     AudioParameter parameters(kvPairs);
@@ -153,7 +153,7 @@
 }
 
 status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (values == nullptr) {
@@ -165,7 +165,7 @@
 }
 
 status_t StreamHalAidl::getFrameSize(size_t *size) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     if (size == nullptr) {
         return BAD_VALUE;
     }
@@ -177,7 +177,7 @@
 }
 
 status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (effect == nullptr) {
@@ -188,7 +188,7 @@
 }
 
 status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (effect == nullptr) {
@@ -199,7 +199,7 @@
 }
 
 status_t StreamHalAidl::standby() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     const auto state = getState();
@@ -247,7 +247,7 @@
 }
 
 status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     Vector<String16> newArgs = args;
@@ -258,7 +258,7 @@
 }
 
 status_t StreamHalAidl::start() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -304,7 +304,7 @@
 }
 
 status_t StreamHalAidl::stop() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -329,7 +329,7 @@
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -342,7 +342,7 @@
 
 status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
         StatePositions* statePositions) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
@@ -352,7 +352,7 @@
 }
 
 status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -362,7 +362,7 @@
 }
 
 status_t StreamHalAidl::getXruns(int32_t *frames) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -371,7 +371,7 @@
 }
 
 status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
-    LOG_ENTRY_V();
+    AUGMENT_LOG(V);
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
     mWorkerTid.store(gettid(), std::memory_order_release);
@@ -422,7 +422,7 @@
 }
 
 status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
 
@@ -438,7 +438,7 @@
 }
 
 status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (mIsInput) {
@@ -475,7 +475,7 @@
 }
 
 status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
@@ -486,7 +486,7 @@
 }
 
 status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
 
@@ -505,7 +505,7 @@
 }
 
 status_t StreamHalAidl::exit() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     return statusTFromBinderStatus(mStream->prepareToClose());
@@ -545,7 +545,7 @@
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
                                          struct audio_mmap_buffer_info *info) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -783,7 +783,7 @@
 }
 
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isAsynchronous()) {
@@ -859,7 +859,7 @@
 }
 
 status_t StreamOutHalAidl::presentationComplete() {
-    LOG_ENTRY();
+    AUGMENT_LOG(D);
     return OK;
 }
 
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index f367a3e..98c3382 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -267,6 +267,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPREFERREDDEVICE "setPreferredDevice" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE "setSampleRate" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
diff --git a/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0 b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..aae78ae
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 26b8d0c..e26f189 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -33,6 +33,7 @@
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
 #include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
 #include <utils/String8.h>
 #include <cutils/properties.h>
@@ -471,11 +472,13 @@
         ALOGE("%s: Buffer queue already exists", __FUNCTION__);
         return ALREADY_EXISTS;
     }
-
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     // Create a buffer queue.
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
+#endif // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
 
     uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
@@ -484,9 +487,15 @@
 
     bufferCount += kConsumerBufferCount;
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mVideoBufferConsumer = new BufferItemConsumer(usage, bufferCount);
+    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
+    mVideoBufferProducer = mVideoBufferConsumer->getSurface()->getIGraphicBufferProducer();
+#else
     mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
     mVideoBufferProducer = producer;
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
     if (res != OK) {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 46703bb..893b442 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -18,19 +18,14 @@
 #define LOG_TAG "FrameDecoder"
 #define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include "include/FrameDecoder.h"
-#include "include/FrameCaptureLayer.h"
-#include "include/HevcUtils.h"
+#include <android_media_codec.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <gui/Surface.h>
 #include <inttypes.h>
-#include <mediadrm/ICrypto.h>
 #include <media/IMediaSource.h>
 #include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/avc_utils.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/FrameCaptureProcessor.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -39,13 +34,24 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <mediadrm/ICrypto.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
+
+#include <C2Buffer.h>
+#include <Codec2BufferUtils.h>
 
 namespace android {
 
 static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
+static const int64_t kAsyncBufferTimeOutUs = 2000000LL; // 2000 msec
 static const size_t kRetryCount = 100; // must be >0
 static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
 // For codec, 0 is the highest importance; higher the number lesser important.
@@ -232,6 +238,104 @@
     return false;
 }
 
+AsyncCodecHandler::AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder) {
+    mFrameDecoder = frameDecoder;
+}
+
+void AsyncCodecHandler::onMessageReceived(const sp<AMessage>& msg) {
+    switch (msg->what()) {
+        case FrameDecoder::kWhatCallbackNotify:
+            int32_t callbackId;
+            if (!msg->findInt32("callbackID", &callbackId)) {
+                ALOGE("kWhatCallbackNotify: callbackID is expected.");
+                break;
+            }
+            switch (callbackId) {
+                case MediaCodec::CB_INPUT_AVAILABLE: {
+                    int32_t index;
+                    if (!msg->findInt32("index", &index)) {
+                        ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+                        break;
+                    }
+                    ALOGD("CB_INPUT_AVAILABLE received, index is %d", index);
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleInputBufferAsync(index);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_OUTPUT_AVAILABLE: {
+                    int32_t index;
+                    int64_t timeUs;
+                    CHECK(msg->findInt32("index", &index));
+                    CHECK(msg->findInt64("timeUs", &timeUs));
+                    ALOGD("CB_OUTPUT_AVAILABLE received, index is %d", index);
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleOutputBufferAsync(index, timeUs);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
+                    ALOGD("CB_OUTPUT_FORMAT_CHANGED received");
+                    sp<AMessage> format;
+                    if (!msg->findMessage("format", &format) || format == nullptr) {
+                        ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+                        break;
+                    }
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleOutputFormatChangeAsync(format);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_ERROR: {
+                    status_t err;
+                    int32_t actionCode;
+                    AString detail;
+                    if (!msg->findInt32("err", &err)) {
+                        ALOGE("CB_ERROR: err is expected.");
+                        break;
+                    }
+                    if (!msg->findInt32("actionCode", &actionCode)) {
+                        ALOGE("CB_ERROR: actionCode is expected.");
+                        break;
+                    }
+                    msg->findString("detail", &detail);
+                    ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
+                          StrMediaError(err).c_str(), actionCode, detail.c_str());
+                    break;
+                }
+                default:
+                    ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
+                    break;
+            }
+            break;
+        default:
+            ALOGE("unexpected message received: %s", msg->debugString().c_str());
+            break;
+    }
+}
+
+void InputBufferIndexQueue::enqueue(int32_t index) {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    mQueue.push(index);
+    mCondition.notify_one();
+}
+
+bool InputBufferIndexQueue::dequeue(int32_t* index, int32_t timeOutUs) {
+    std::unique_lock<std::mutex> lock(mMutex);
+    bool hasAvailableIndex = mCondition.wait_for(lock, std::chrono::microseconds(timeOutUs),
+                                                 [this] { return !mQueue.empty(); });
+    if (hasAvailableIndex) {
+        *index = mQueue.front();
+        mQueue.pop();
+        return true;
+    } else {
+        return false;
+    }
+}
+
 //static
 sp<IMemory> FrameDecoder::getMetadataOnly(
         const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
@@ -281,6 +385,7 @@
         const sp<MetaData> &trackMeta,
         const sp<IMediaSource> &source)
     : mComponentName(componentName),
+      mUseBlockModel(false),
       mTrackMeta(trackMeta),
       mSource(source),
       mDstFormat(OMX_COLOR_Format16bitRGB565),
@@ -290,6 +395,10 @@
 }
 
 FrameDecoder::~FrameDecoder() {
+    if (mHandler != NULL) {
+        mAsyncLooper->stop();
+        mAsyncLooper->unregisterHandler(mHandler->id());
+    }
     if (mDecoder != NULL) {
         mDecoder->release();
         mSource->stop();
@@ -333,8 +442,18 @@
         return (decoder.get() == NULL) ? NO_MEMORY : err;
     }
 
+    if (mUseBlockModel) {
+        mAsyncLooper = new ALooper;
+        mAsyncLooper->start();
+        mHandler = new AsyncCodecHandler(wp<FrameDecoder>(this));
+        mAsyncLooper->registerHandler(mHandler);
+        sp<AMessage> callbackMsg = new AMessage(kWhatCallbackNotify, mHandler);
+        decoder->setCallback(callbackMsg);
+    }
+
     err = decoder->configure(
-            videoFormat, mSurface, NULL /* crypto */, 0 /* flags */);
+            videoFormat, mSurface, NULL /* crypto */,
+            mUseBlockModel ? MediaCodec::CONFIGURE_FLAG_USE_BLOCK_MODEL : 0 /* flags */);
     if (err != OK) {
         ALOGW("configure returned error %d (%s)", err, asString(err));
         decoder->release();
@@ -362,10 +481,18 @@
 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
     ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
     status_t err = onExtractRect(rect);
-    if (err == OK) {
+    if (err != OK) {
+        ALOGE("onExtractRect error %d", err);
+        return NULL;
+    }
+
+    if (!mUseBlockModel) {
         err = extractInternal();
+    } else {
+        err = extractInternalUsingBlockModel();
     }
     if (err != OK) {
+        ALOGE("extractInternal error %d", err);
         return NULL;
     }
 
@@ -380,6 +507,7 @@
         ALOGE("decoder is not initialized");
         return NO_INIT;
     }
+
     do {
         size_t index;
         int64_t ptsUs = 0LL;
@@ -433,7 +561,8 @@
                         (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
                         mediaBuffer->range_length());
 
-                onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
+                onInputReceived(codecBuffer->data(), codecBuffer->size(), mediaBuffer->meta_data(),
+                                mFirstSample, &flags);
                 mFirstSample = false;
             }
 
@@ -487,11 +616,14 @@
                         ALOGE("failed to get output buffer %zu", index);
                         break;
                     }
+                    uint8_t* frameData = videoFrameBuffer->data();
+                    sp<ABuffer> imageData;
+                    videoFrameBuffer->meta()->findBuffer("image-data", &imageData);
                     if (mSurface != nullptr) {
                         mDecoder->renderOutputBufferAndRelease(index);
-                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                     } else {
-                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                         mDecoder->releaseOutputBuffer(index);
                     }
                 } else {
@@ -510,6 +642,73 @@
     return err;
 }
 
+status_t FrameDecoder::extractInternalUsingBlockModel() {
+    status_t err = OK;
+    MediaBufferBase* mediaBuffer = NULL;
+    int64_t ptsUs = 0LL;
+    uint32_t flags = 0;
+    int32_t index;
+    mHandleOutputBufferAsyncDone = false;
+
+    err = mSource->read(&mediaBuffer, &mReadOptions);
+    mReadOptions.clearSeekTo();
+    if (err != OK) {
+        ALOGW("Input Error: err=%d", err);
+        mediaBuffer->release();
+        return err;
+    }
+
+    size_t inputSize = mediaBuffer->range_length();
+    std::shared_ptr<C2LinearBlock> block =
+            MediaCodec::FetchLinearBlock(inputSize, {std::string{mComponentName.c_str()}});
+    C2WriteView view{block->map().get()};
+    if (view.error() != C2_OK) {
+        ALOGE("Fatal error: failed to allocate and map a block");
+        mediaBuffer->release();
+        return NO_MEMORY;
+    }
+    if (inputSize > view.capacity()) {
+        ALOGE("Fatal error: allocated block is too small "
+              "(input size %zu; block cap %u)",
+              inputSize, view.capacity());
+        mediaBuffer->release();
+        return BAD_VALUE;
+    }
+    CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
+    memcpy(view.base(), (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
+           inputSize);
+    std::shared_ptr<C2Buffer> c2Buffer =
+            C2Buffer::CreateLinearBuffer(block->share(0, inputSize, C2Fence{}));
+    onInputReceived(view.base(), inputSize, mediaBuffer->meta_data(), true /* firstSample */,
+                    &flags);
+    flags |= MediaCodec::BUFFER_FLAG_EOS;
+    mediaBuffer->release();
+
+    std::vector<AccessUnitInfo> infoVec;
+    infoVec.emplace_back(flags, inputSize, ptsUs);
+    sp<BufferInfosWrapper> infos = new BufferInfosWrapper{std::move(infoVec)};
+
+    if (!mInputBufferIndexQueue.dequeue(&index, kAsyncBufferTimeOutUs)) {
+        ALOGE("No available input buffer index for async mode.");
+        return TIMED_OUT;
+    }
+
+    AString errorDetailMsg;
+    ALOGD("QueueLinearBlock: index=%d size=%zu ts=%" PRId64 " us flags=%x",
+            index, inputSize, ptsUs,flags);
+    err = mDecoder->queueBuffer(index, c2Buffer, infos, nullptr, &errorDetailMsg);
+    if (err != OK) {
+        ALOGE("failed to queueBuffer (err %d): %s", err, errorDetailMsg.c_str());
+        return err;
+    }
+
+    // wait for handleOutputBufferAsync() to finish
+    std::unique_lock _lk(mMutex);
+    mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
+                                 [this] { return mHandleOutputBufferAsyncDone; });
+    return mHandleOutputBufferAsyncDone ? OK : TIMED_OUT;
+}
+
 //////////////////////////////////////////////////////////////////////
 
 VideoFrameDecoder::VideoFrameDecoder(
@@ -525,6 +724,81 @@
       mDefaultSampleDurationUs(0) {
 }
 
+status_t FrameDecoder::handleOutputFormatChangeAsync(sp<AMessage> format) {
+    // Here format is MediaCodec's internal copy of output format.
+    // Make a copy since the client might modify it.
+    mOutputFormat = format->dup();
+    ALOGD("receive output format in async mode: %s", mOutputFormat->debugString().c_str());
+    return OK;
+}
+
+status_t FrameDecoder::handleInputBufferAsync(int32_t index) {
+    mInputBufferIndexQueue.enqueue(index);
+    return OK;
+}
+
+status_t FrameDecoder::handleOutputBufferAsync(int32_t index, int64_t timeUs) {
+    if (mHandleOutputBufferAsyncDone) {
+        // we have already processed an output buffer, skip others
+        return OK;
+    }
+
+    status_t err = OK;
+    sp<MediaCodecBuffer> videoFrameBuffer;
+    err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
+    if (err != OK || videoFrameBuffer == nullptr) {
+        ALOGE("failed to get output buffer %d", index);
+        return err;
+    }
+
+    bool onOutputReceivedDone = false;
+    if (mSurface != nullptr) {
+        mDecoder->renderOutputBufferAndRelease(index);
+        // frameData and imgObj will be fetched by captureSurface() inside onOutputReceived()
+        // explicitly pass null here
+        err = onOutputReceived(nullptr, nullptr, mOutputFormat, timeUs, &onOutputReceivedDone);
+    } else {
+        // get stride and frame data for block model buffer
+        std::shared_ptr<C2Buffer> c2buffer = videoFrameBuffer->asC2Buffer();
+        if (!c2buffer
+                || c2buffer->data().type() != C2BufferData::GRAPHIC
+                || c2buffer->data().graphicBlocks().size() == 0u) {
+            ALOGE("C2Buffer precond fail");
+            return ERROR_MALFORMED;
+        }
+
+        std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
+            c2buffer->data().graphicBlocks()[0].map().get()));
+        GraphicView2MediaImageConverter converter(*view, mOutputFormat, false /* copy */);
+        if (converter.initCheck() != OK) {
+            ALOGE("Converter init failed: %d", converter.initCheck());
+            return NO_INIT;
+        }
+
+        uint8_t* frameData = converter.wrap()->data();
+        sp<ABuffer> imageData = converter.imageData();
+        if (imageData != nullptr) {
+            mOutputFormat->setBuffer("image-data", imageData);
+            MediaImage2 *img = (MediaImage2*) imageData->data();
+            if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+                int32_t stride = img->mPlane[0].mRowInc;
+                mOutputFormat->setInt32(KEY_STRIDE, stride);
+                ALOGD("updating stride = %d", stride);
+            }
+        }
+
+        err = onOutputReceived(frameData, imageData, mOutputFormat, timeUs, &onOutputReceivedDone);
+        mDecoder->releaseOutputBuffer(index);
+    }
+
+    if (err == OK && onOutputReceivedDone) {
+        std::lock_guard _lm(mMutex);
+        mHandleOutputBufferAsyncDone = true;
+        mOutputFramePending.notify_one();
+    }
+    return err;
+}
+
 sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
         int64_t frameTimeUs, int seekMode,
         MediaSource::ReadOptions *options,
@@ -575,8 +849,13 @@
     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
     if (!isSeekingClosest) {
-        videoFormat->setInt32("android._num-input-buffers", 1);
-        videoFormat->setInt32("android._num-output-buffers", 1);
+        if (mComponentName.startsWithIgnoreCase("c2.")) {
+            mUseBlockModel = android::media::codec::provider_->thumbnail_block_model();
+        } else {
+            // OMX Codec
+            videoFormat->setInt32("android._num-input-buffers", 1);
+            videoFormat->setInt32("android._num-output-buffers", 1);
+        }
     }
 
     if (isHDR(videoFormat)) {
@@ -601,9 +880,8 @@
     return videoFormat;
 }
 
-status_t VideoFrameDecoder::onInputReceived(
-        const sp<MediaCodecBuffer> &codecBuffer,
-        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
+status_t VideoFrameDecoder::onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                            bool firstSample, uint32_t* flags) {
     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
 
@@ -612,10 +890,7 @@
         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
     }
 
-    if (!isSeekingClosest
-            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
-            || (mIsHevc && IsIDR(
-            codecBuffer->data(), codecBuffer->size())))) {
+    if (!isSeekingClosest && ((mIsAvc && IsIDR(data, size)) || (mIsHevc && IsIDR(data, size)))) {
         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
         // option, in which case we need to actually decode to targetTimeUs.
         *flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -630,7 +905,8 @@
 }
 
 status_t VideoFrameDecoder::onOutputReceived(
-        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        uint8_t* frameData,
+        sp<ABuffer> imgObj,
         const sp<AMessage> &outputFormat,
         int64_t timeUs, bool *done) {
     int64_t durationUs = mDefaultSampleDurationUs;
@@ -703,7 +979,6 @@
         }
 
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
-
         setFrame(frameMem);
     }
 
@@ -712,7 +987,7 @@
     if (mCaptureLayer != nullptr) {
         return captureSurface();
     }
-    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
+    ColorConverter colorConverter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
     if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
@@ -724,22 +999,25 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
-    sp<ABuffer> imgObj;
-    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+    if (imgObj != nullptr) {
         MediaImage2 *imageData = nullptr;
         imageData = (MediaImage2 *)(imgObj.get()->data());
         if (imageData != nullptr) {
-            converter.setSrcMediaImage2(*imageData);
+            colorConverter.setSrcMediaImage2(*imageData);
         }
     }
     if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
         return ERROR_UNSUPPORTED;
     }
-    converter.setSrcColorSpace(standard, range, transfer);
-    if (converter.isValid()) {
+    colorConverter.setSrcColorSpace(standard, range, transfer);
+    if (colorConverter.isValid()) {
         ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
-        converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
+        if (frameData == nullptr) {
+            ALOGD("frameData is null for ColorConverter");
+        }
+        colorConverter.convert(
+                (const uint8_t *)frameData,
                 width, height, stride,
                 crop_left, crop_top, crop_right, crop_bottom,
                 mFrame->getFlattenedData(),
@@ -955,7 +1233,8 @@
 }
 
 status_t MediaImageDecoder::onOutputReceived(
-        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        uint8_t* frameData,
+        sp<ABuffer> imgObj,
         const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
     if (outputFormat == NULL) {
         return ERROR_MALFORMED;
@@ -1008,8 +1287,8 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
-    sp<ABuffer> imgObj;
-    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+    if (imgObj != nullptr) {
         MediaImage2 *imageData = nullptr;
         imageData = (MediaImage2 *)(imgObj.get()->data());
         if (imageData != nullptr) {
@@ -1058,7 +1337,7 @@
 
     if (converter.isValid()) {
         converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
+                (const uint8_t *)frameData,
                 width, height, stride,
                 crop_left, crop_top, crop_right, crop_bottom,
                 mFrame->getFlattenedData(),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 15188b0..76b6aa6 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3776,6 +3776,12 @@
             if (mStszTableEntries->count() == 0) {
                 mFirstSampleTimeRealUs = systemTime() / 1000;
                 if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
+                    if (WARN_UNLESS(timestampUs != INT64_MIN, "for %s track", trackName)) {
+                        copy->release();
+                        mSource->stop();
+                        mIsMalformed = true;
+                        break;
+                    }
                     mFirstSampleStartOffsetUs = -timestampUs;
                     timestampUs = 0;
                 }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index b380ade..1422868 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -6699,8 +6699,8 @@
     if (!mDetachedSurface) {
         uint64_t usage = 0;
         if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
-            // TODO: should we use a/the default consumer usage?
-            usage = 0;
+            // By default prepare buffer to be displayed on any of the common surfaces
+            usage = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
         }
         mDetachedSurface.reset(new ReleaseSurface(usage));
     }
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index e417324..94c201f 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -18,12 +18,15 @@
 #define FRAME_DECODER_H_
 
 #include <memory>
+#include <mutex>
+#include <queue>
 #include <vector>
 
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/openmax/OMX_Video.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AString.h>
 #include <ui/GraphicTypes.h>
 
 namespace android {
@@ -34,11 +37,23 @@
 class MediaCodecBuffer;
 class Surface;
 class VideoFrame;
+struct AsyncCodecHandler;
 
 struct FrameRect {
     int32_t left, top, right, bottom;
 };
 
+struct InputBufferIndexQueue {
+public:
+    void enqueue(int32_t index);
+    bool dequeue(int32_t* index, int32_t timeOutUs);
+
+private:
+    std::queue<int32_t> mQueue;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+};
+
 struct FrameDecoder : public RefBase {
     FrameDecoder(
             const AString &componentName,
@@ -53,7 +68,19 @@
             const sp<MetaData> &trackMeta, int colorFormat,
             bool thumbnail = false, uint32_t bitDepth = 0);
 
+    status_t handleInputBufferAsync(int32_t index);
+    status_t handleOutputBufferAsync(int32_t index, int64_t timeUs);
+    status_t handleOutputFormatChangeAsync(sp<AMessage> format);
+
+    enum {
+        kWhatCallbackNotify,
+    };
+
 protected:
+    AString mComponentName;
+    sp<AMessage> mOutputFormat;
+    bool mUseBlockModel;
+
     virtual ~FrameDecoder();
 
     virtual sp<AMessage> onGetFormatAndSeekOptions(
@@ -64,14 +91,12 @@
 
     virtual status_t onExtractRect(FrameRect *rect) = 0;
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer,
-            MetaDataBase &sampleMeta,
-            bool firstSample,
-            uint32_t *flags) = 0;
+    virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                     bool firstSample, uint32_t* flags) = 0;
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) = 0;
@@ -83,7 +108,6 @@
     void setFrame(const sp<IMemory> &frameMem) { mFrameMemory = frameMem; }
 
 private:
-    AString mComponentName;
     sp<MetaData> mTrackMeta;
     sp<IMediaSource> mSource;
     OMX_COLOR_FORMATTYPE mDstFormat;
@@ -92,17 +116,32 @@
     sp<IMemory> mFrameMemory;
     MediaSource::ReadOptions mReadOptions;
     sp<MediaCodec> mDecoder;
-    sp<AMessage> mOutputFormat;
+    sp<AsyncCodecHandler> mHandler;
+    sp<ALooper> mAsyncLooper;
     bool mHaveMoreInputs;
     bool mFirstSample;
+    bool mHandleOutputBufferAsyncDone;
     sp<Surface> mSurface;
+    std::mutex mMutex;
+    std::condition_variable mOutputFramePending;
+    InputBufferIndexQueue mInputBufferIndexQueue;
 
     status_t extractInternal();
+    status_t extractInternalUsingBlockModel();
 
     DISALLOW_EVIL_CONSTRUCTORS(FrameDecoder);
 };
 struct FrameCaptureLayer;
 
+struct AsyncCodecHandler : public AHandler {
+public:
+    explicit AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder);
+    virtual void onMessageReceived(const sp<AMessage>& msg);
+
+private:
+    wp<FrameDecoder> mFrameDecoder;
+};
+
 struct VideoFrameDecoder : public FrameDecoder {
     VideoFrameDecoder(
             const AString &componentName,
@@ -121,14 +160,12 @@
         return (rect == NULL) ? OK : ERROR_UNSUPPORTED;
     }
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer,
-            MetaDataBase &sampleMeta,
-            bool firstSample,
-            uint32_t *flags) override;
+    virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                     bool firstSample, uint32_t* flags) override;
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) override;
@@ -162,14 +199,13 @@
 
     virtual status_t onExtractRect(FrameRect *rect) override;
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer __unused,
-            MetaDataBase &sampleMeta __unused,
-            bool firstSample __unused,
-            uint32_t *flags __unused) override { return OK; }
+    virtual status_t onInputReceived(uint8_t* __unused, size_t __unused,
+                                     MetaDataBase& sampleMeta __unused, bool firstSample __unused,
+                                     uint32_t* flags __unused) override { return OK; }
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) override;
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 5d68890..979edab 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -297,9 +297,10 @@
 }
 
 void MtpFfsHandle::close() {
-    auto timeout = std::chrono::seconds(2);
-    std::unique_lock lk(m);
-    cv.wait_for(lk, timeout ,[this]{return child_threads==0;});
+    // Join all child threads before destruction
+    for (auto& thread : mChildThreads) {
+        thread.join();
+    }
 
     io_destroy(mCtx);
     closeEndpoints();
@@ -677,12 +678,10 @@
     memcpy(temp, me.data, me.length);
     me.data = temp;
 
-    std::unique_lock lk(m);
-    child_threads++;
-    lk.unlock();
-
     std::thread t([this, me]() { return this->doSendEvent(me); });
-    t.detach();
+
+    // Store the thread object for later joining
+    mChildThreads.emplace_back(std::move(t));
     return 0;
 }
 
@@ -692,11 +691,6 @@
     if (static_cast<unsigned>(ret) != length)
         PLOG(ERROR) << "Mtp error sending event thread!";
     delete[] reinterpret_cast<char*>(me.data);
-
-    std::unique_lock lk(m);
-    child_threads--;
-    lk.unlock();
-    cv.notify_one();
 }
 
 } // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index 51cdef0..8f4b769 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -60,9 +60,7 @@
     bool mCanceled;
     bool mBatchCancel;
 
-    std::mutex m;
-    std::condition_variable cv;
-    std::atomic<int> child_threads{0};
+    std::vector<std::thread> mChildThreads;
 
     android::base::unique_fd mControl;
     // "in" from the host's perspective => sink for mtp server
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 7b19ac0..995c674 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -22,13 +22,15 @@
 #include "NdkImagePriv.h"
 #include "NdkImageReaderPriv.h"
 
-#include <cutils/atomic.h>
-#include <utils/Log.h>
 #include <android_media_Utils.h>
-#include <ui/PublicFormat.h>
-#include <private/android/AHardwareBufferHelpers.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <grallocusage/GrallocUsageConversion.h>
 #include <gui/bufferqueue/1.0/WGraphicBufferProducer.h>
+#include <private/android/AHardwareBufferHelpers.h>
+#include <ui/PublicFormat.h>
+#include <utils/Log.h>
+
+#include <cutils/atomic.h>
 
 using namespace android;
 
@@ -291,22 +293,30 @@
 AImageReader::init() {
     mHalUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     sp<IGraphicBufferProducer> gbProducer;
     sp<IGraphicBufferConsumer> gbConsumer;
     BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "m%d-%d-%d",
             mWidth, mHeight, mFormat, mUsage, mMaxImages, getpid(),
             createProcessUniqueId());
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBufferItemConsumer = new BufferItemConsumer(mHalUsage, mMaxImages, /*controlledByApp*/ true);
+#else
     mBufferItemConsumer =
             new BufferItemConsumer(gbConsumer, mHalUsage, mMaxImages, /*controlledByApp*/ true);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mBufferItemConsumer == nullptr) {
         ALOGE("Failed to allocate BufferItemConsumer");
         return AMEDIA_ERROR_UNKNOWN;
     }
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mProducer = gbProducer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mBufferItemConsumer->setName(consumerName);
     mBufferItemConsumer->setFrameAvailableListener(mFrameListener);
     mBufferItemConsumer->setBufferFreedListener(mBufferRemovedListener);
@@ -328,10 +338,18 @@
         return AMEDIA_ERROR_UNKNOWN;
     }
     if (mUsage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mBufferItemConsumer->setConsumerIsProtected(true);
+#else
         gbConsumer->setConsumerIsProtected(true);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mSurface = mBufferItemConsumer->getSurface();
+#else
     mSurface = new Surface(mProducer, /*controlledByApp*/true);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mSurface == nullptr) {
         ALOGE("Failed to create surface");
         return AMEDIA_ERROR_UNKNOWN;
@@ -578,8 +596,13 @@
         *handle = mWindowHandle;
         return AMEDIA_OK;
     }
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<HGraphicBufferProducer> hgbp = new TWGraphicBufferProducer<HGraphicBufferProducer>(
+            mSurface->getIGraphicBufferProducer());
+#else
     sp<HGraphicBufferProducer> hgbp =
         new TWGraphicBufferProducer<HGraphicBufferProducer>(mProducer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     HalToken halToken;
     if (!createHalToken(hgbp, &halToken)) {
         return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 0199616..985f42b 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -25,6 +25,7 @@
 #include <utils/Mutex.h>
 #include <utils/StrongPointer.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/Surface.h>
@@ -161,7 +162,9 @@
 
     uint64_t mHalUsage;
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     sp<IGraphicBufferProducer> mProducer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     sp<Surface>                mSurface;
     sp<BufferItemConsumer>     mBufferItemConsumer;
     sp<ANativeWindow>          mWindow;
diff --git a/media/utils/include/mediautils/jthread.h b/media/utils/include/mediautils/jthread.h
new file mode 100644
index 0000000..17532a4
--- /dev/null
+++ b/media/utils/include/mediautils/jthread.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <thread>
+#include <utility>
+
+namespace android::mediautils {
+
+namespace impl {
+class stop_source;
+/**
+ * Const view on stop source, which the running thread uses and an interface
+ * for cancellation.
+ */
+class stop_token {
+  public:
+    stop_token(const stop_source& source) : stop_source_(source) {}
+    bool stop_requested() const;
+
+  private:
+    const stop_source& stop_source_;
+};
+
+class stop_source {
+  public:
+    stop_token get_token() { return stop_token{*this}; }
+    bool stop_requested() const { return cancellation_signal_.load(); }
+    bool request_stop() {
+        auto f = false;
+        return cancellation_signal_.compare_exchange_strong(f, true);
+    }
+
+  private:
+    std::atomic_bool cancellation_signal_ = false;
+};
+
+inline bool stop_token::stop_requested() const {
+    return stop_source_.stop_requested();
+}
+}  // namespace impl
+
+using stop_token = impl::stop_token;
+/**
+ * Just a jthread, since std::jthread is still experimental in our toolchain.
+ * Implements a subset of essential functionality (co-op cancellation and join on dtor).
+ * If jthread gets picked up, usage can be cut over.
+ */
+class jthread {
+  public:
+    /**
+     * Construct/launch and thread with a callable which consumes a stop_token.
+     * The callable must be cooperatively cancellable via stop_token::stop_requested(), and will be
+     * automatically stopped then joined on destruction.
+     * Example:
+     * jthread([](stop_token stok) {
+     *     while(!stok.stop_requested) {
+     *         // do work
+     *     }
+     * }
+     */
+    template <typename F>
+    jthread(F&& f) : stop_source_{}, thread_{std::forward<F>(f), stop_source_.get_token()} {}
+
+    ~jthread() {
+        stop_source_.request_stop();
+        thread_.join();
+    }
+
+    bool request_stop() { return stop_source_.request_stop(); }
+
+  private:
+    // order matters
+    impl::stop_source stop_source_;
+    std::thread thread_;
+};
+}  // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index a68569a..ff11b42 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -237,3 +237,11 @@
         "shared_memory_allocator_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "jthread_tests",
+    defaults: ["libmediautils_tests_defaults"],
+    srcs: [
+        "jthread_tests.cpp",
+    ],
+}
diff --git a/media/utils/tests/jthread_tests.cpp b/media/utils/tests/jthread_tests.cpp
new file mode 100644
index 0000000..ed77c27
--- /dev/null
+++ b/media/utils/tests/jthread_tests.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "jthread_tests"
+
+#include <mediautils/jthread.h>
+
+#include <gtest/gtest.h>
+
+#include <atomic>
+
+using namespace android::mediautils;
+
+namespace {
+TEST(jthread_tests, dtor) {
+    std::atomic_int x = 0;
+    std::atomic_bool is_stopped = false;
+    {
+        auto jt = jthread([&](stop_token stok) {
+            while (!stok.stop_requested()) {
+                if (x.load() < std::numeric_limits<int>::max())
+                    x++;
+            }
+            is_stopped = true;
+        });
+        while (x.load() < 1000)
+            ;
+    }
+    // Check we triggered a stop on dtor
+    ASSERT_TRUE(is_stopped.load());
+    // Check we actually ran
+    ASSERT_GE(x.load(), 1000);
+}
+TEST(jthread_tests, request_stop) {
+    std::atomic_int x = 0;
+    std::atomic_bool is_stopped = false;
+    auto jt = jthread([&](stop_token stok) {
+        while (!stok.stop_requested()) {
+            if (x.load() < std::numeric_limits<int>::max())
+                x++;
+        }
+        is_stopped = true;
+    });
+    while (x.load() < 1000)
+        ;
+    // request stop manually
+    ASSERT_TRUE(jt.request_stop());
+    // busy loop till thread acks
+    while (!is_stopped.load())
+        ;
+    // Check we triggered a stop on dtor
+    ASSERT_TRUE(is_stopped.load());
+    // Check we actually ran
+    ASSERT_GE(x.load(), 1000);
+}
+
+}  // namespace
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index bf2915a..264fc4f 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -152,6 +152,7 @@
         "audiopermissioncontroller",
         "av-types-aidl-cpp",
         "com.android.media.audio-aconfig-cc",
+        "com.android.media.audioserver-aconfig-cc",
         "effect-aidl-cpp",
         "libactivitymanager_aidl",
         "libaudioclient",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 77b342a..37fc1fc 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -187,6 +187,7 @@
 BINDER_METHOD_ENTRY(masterMute) \
 BINDER_METHOD_ENTRY(setStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamMute) \
+BINDER_METHOD_ENTRY(setPortsVolume) \
 BINDER_METHOD_ENTRY(setMode) \
 BINDER_METHOD_ENTRY(setMicMute) \
 BINDER_METHOD_ENTRY(getMicMute) \
@@ -617,6 +618,7 @@
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
         bool isBitPerfect;
+        float volume;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
                                             &streamType, adjAttributionSource,
@@ -624,7 +626,8 @@
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
                                             deviceId, &portId, &secondaryOutputs, &isSpatialized,
-                                            &isBitPerfect);
+                                            &isBitPerfect,
+                                            &volume);
         if (ret != NO_ERROR) {
             config->sample_rate = fullConfig.sample_rate;
             config->channel_mask = fullConfig.channel_mask;
@@ -748,9 +751,11 @@
 
     result.append("Notification Clients:\n");
     result.append("   pid    uid  name\n");
-    for (const auto& [ _, client ] : mNotificationClients) {
-        result.appendFormat("%6d %6u  %s\n",
-                client->getPid(), client->getUid(), client->getPackageName().c_str());
+    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
+        const pid_t pid = mNotificationClients[i]->getPid();
+        const uid_t uid = mNotificationClients[i]->getUid();
+        const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
+        result.appendFormat("%6d %6u  %s\n", pid, uid, info.package.c_str());
     }
 
     result.append("Global session refs:\n");
@@ -1059,6 +1064,7 @@
     std::vector<audio_io_handle_t> secondaryOutputs;
     bool isSpatialized = false;
     bool isBitPerfect = false;
+    float volume;
 
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
@@ -1119,7 +1125,7 @@
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             adjAttributionSource, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs,
-                                            &isSpatialized, &isBitPerfect);
+                                            &isSpatialized, &isBitPerfect, &volume);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1176,7 +1182,7 @@
         if (effectThread == nullptr) {
             effectChain = getOrphanEffectChain_l(sessionId);
         }
-        ALOGV("createTrack() sessionId: %d", sessionId);
+        ALOGV("createTrack() sessionId: %d volume: %f", sessionId, volume);
 
         output.sampleRate = input.config.sample_rate;
         output.frameCount = input.frameCount;
@@ -1191,7 +1197,7 @@
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
                                       &lStatus, portId, input.audioTrackCallback, isSpatialized,
-                                      isBitPerfect, &output.afTrackFlags);
+                                      isBitPerfect, &output.afTrackFlags, volume);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -1642,6 +1648,33 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::setPortsVolume(
+        const std::vector<audio_port_handle_t>& ports, float volume, audio_io_handle_t output)
+{
+    for (const auto& port : ports) {
+        if (port == AUDIO_PORT_HANDLE_NONE) {
+            return BAD_VALUE;
+        }
+    }
+    if (isnan(volume) || volume > 1.0f || volume < 0.0f) {
+        return BAD_VALUE;
+    }
+    if (output == AUDIO_IO_HANDLE_NONE) {
+        return BAD_VALUE;
+    }
+    audio_utils::lock_guard lock(mutex());
+    IAfPlaybackThread *thread = checkPlaybackThread_l(output);
+    if (thread != nullptr) {
+        return thread->setPortsVolume(ports, volume);
+    }
+    const sp<IAfMmapThread> mmapThread = checkMmapThread_l(output);
+    if (mmapThread != nullptr && mmapThread->isOutput()) {
+        IAfMmapPlaybackThread *mmapPlaybackThread = mmapThread->asIAfMmapPlaybackThread().get();
+        return mmapPlaybackThread->setPortsVolume(ports, volume);
+    }
+    return BAD_VALUE;
+}
+
 status_t AudioFlinger::setRequestedLatencyMode(
         audio_io_handle_t output, audio_latency_mode_t mode) {
     if (output == AUDIO_IO_HANDLE_NONE) {
@@ -2129,23 +2162,24 @@
 
 void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
+    audio_utils::lock_guard _l(mutex());
     if (client == 0) {
         return;
     }
-    const pid_t pid = IPCThreadState::self()->getCallingPid();
+    pid_t pid = IPCThreadState::self()->getCallingPid();
     const uid_t uid = IPCThreadState::self()->getCallingUid();
-
-    audio_utils::lock_guard _l(mutex());
     {
         audio_utils::lock_guard _cl(clientMutex());
-        if (mNotificationClients.count(pid) == 0) {
-            const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
-            sp<NotificationClient> notificationClient = sp<NotificationClient>::make(
-                    this, client, pid, uid, info.package);
-            ALOGV("registerClient() pid %d, uid %u, package %s",
-                    pid, uid, info.package.c_str());
+        if (mNotificationClients.indexOfKey(pid) < 0) {
+            sp<NotificationClient> notificationClient = new NotificationClient(this,
+                                                                                client,
+                                                                                pid,
+                                                                                uid);
+            ALOGV("registerClient() client %p, pid %d, uid %u",
+                    notificationClient.get(), pid, uid);
 
-            mNotificationClients[pid] = notificationClient;
+            mNotificationClients.add(pid, notificationClient);
+
             sp<IBinder> binder = IInterface::asBinder(client);
             binder->linkToDeath(notificationClient);
         }
@@ -2172,7 +2206,7 @@
         audio_utils::lock_guard _l(mutex());
         {
             audio_utils::lock_guard _cl(clientMutex());
-            mNotificationClients.erase(pid);
+            mNotificationClients.removeItem(pid);
         }
 
         ALOGV("%d died, releasing its sessions", pid);
@@ -2213,13 +2247,11 @@
             legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
 
     audio_utils::lock_guard _l(clientMutex());
-    if (pid != 0) {
-        if (auto it = mNotificationClients.find(pid); it != mNotificationClients.end()) {
-            it->second->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
-        }
-    } else {
-        for (const auto& [ client_pid, client] : mNotificationClients) {
-            client->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
+    size_t size = mNotificationClients.size();
+    for (size_t i = 0; i < size; i++) {
+        if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
+            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
+                                                                                   descAidl);
         }
     }
 }
@@ -2233,8 +2265,9 @@
 
     audio_utils::lock_guard _l(clientMutex());
     size_t size = mNotificationClients.size();
-    for (const auto& [_, client] : mNotificationClients) {
-        client->audioFlingerClient()->onSupportedLatencyModesChanged(outputAidl, modesAidl);
+    for (size_t i = 0; i < size; i++) {
+        mNotificationClients.valueAt(i)->audioFlingerClient()
+                ->onSupportedLatencyModesChanged(outputAidl, modesAidl);
     }
 }
 
@@ -2291,10 +2324,8 @@
 AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                      const sp<media::IAudioFlingerClient>& client,
                                                      pid_t pid,
-        uid_t uid,
-        std::string_view packageName)
-    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid)
-    , mPackageName(packageName), mAudioFlingerClient(client)
+                                                     uid_t uid)
+    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
 {
 }
 
@@ -2304,7 +2335,7 @@
 
 void AudioFlinger::NotificationClient::binderDied(const wp<IBinder>& who __unused)
 {
-    const auto keep = sp<NotificationClient>::fromExisting(this);
+    sp<NotificationClient> keep(this);
     mAudioFlinger->removeNotificationClient(mPid);
 }
 
@@ -3571,7 +3602,7 @@
         // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
         // called from a different pid leaving a stale session reference.  Also we don't know how
         // to clear this reference if the client process dies.
-        if (mNotificationClients.count(caller) == 0) {
+        if (mNotificationClients.indexOfKey(caller) < 0) {
             ALOGW("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
             return;
         }
@@ -3832,8 +3863,7 @@
 
 
 // checkPlaybackThread_l() must be called with AudioFlinger::mutex() held
-sp<VolumeInterface> AudioFlinger::getVolumeInterface_l(audio_io_handle_t output) const
-{
+sp<VolumeInterface> AudioFlinger::getVolumeInterface_l(audio_io_handle_t output) const {
     sp<VolumeInterface> volumeInterface = mPlaybackThreads.valueFor(output).get();
     if (volumeInterface == nullptr) {
         IAfMmapThread* const mmapThread = mMmapThreads.valueFor(output).get();
@@ -4032,7 +4062,8 @@
                                                        outputFlags,
                                                        0ns /* timeout */,
                                                        frameCountToBeReady,
-                                                       track->getSpeed());
+                                                       track->getSpeed(),
+                                                       1.f /* volume */);
         status = patchTrack->initCheck();
         if (status != NO_ERROR) {
             ALOGE("Secondary output patchTrack init failed: %d", status);
@@ -5131,6 +5162,7 @@
         case TransactionCode::GET_AUDIO_MIX_PORT:
         case TransactionCode::SET_TRACKS_INTERNAL_MUTE:
         case TransactionCode::RESET_REFERENCES_FOR_TEST:
+        case TransactionCode::SET_PORTS_VOLUME:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
@@ -5218,7 +5250,7 @@
         }
     }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
     mediautils::TimeCheck::getDefaultSecondChanceDuration(),
-    true /* crashOnTimeout */);
+    !property_get_bool("audio.timecheck.disabled", false) /* crashOnTimeout */);
 
     return delegate();
 }
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index b57a355..545fa36 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -96,6 +96,9 @@
     status_t setStreamMute(audio_stream_type_t stream, bool muted) final
             EXCLUDES_AudioFlinger_Mutex;
 
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+            audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
+
     status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setMicMute(bool state) final EXCLUDES_AudioFlinger_Mutex;
@@ -479,14 +482,12 @@
                             NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                 const sp<media::IAudioFlingerClient>& client,
                                                 pid_t pid,
-                uid_t uid,
-                std::string_view packageName);
+                                                uid_t uid);
         virtual             ~NotificationClient();
 
                 sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
                 pid_t getPid() const { return mPid; }
                 uid_t getUid() const { return mUid; }
-                const std::string& getPackageName() const { return mPackageName; }
 
                 // IBinder::DeathRecipient
                 virtual     void        binderDied(const wp<IBinder>& who);
@@ -497,7 +498,6 @@
         const sp<AudioFlinger>  mAudioFlinger;
         const pid_t             mPid;
         const uid_t             mUid;
-        const std::string mPackageName;
         const sp<media::IAudioFlingerClient> mAudioFlingerClient;
     };
 
@@ -555,6 +555,7 @@
     IAfPlaybackThread* checkMixerThread_l(audio_io_handle_t output) const REQUIRES(mutex());
 
     sp<VolumeInterface> getVolumeInterface_l(audio_io_handle_t output) const REQUIRES(mutex());
+
     std::vector<sp<VolumeInterface>> getAllVolumeInterfaces_l() const REQUIRES(mutex());
 
 
@@ -697,7 +698,8 @@
 
     DefaultKeyedVector<audio_io_handle_t, sp<IAfRecordThread>> mRecordThreads GUARDED_BY(mutex());
 
-    std::map<pid_t, sp<NotificationClient>> mNotificationClients GUARDED_BY(clientMutex());
+    DefaultKeyedVector<pid_t, sp<NotificationClient>> mNotificationClients
+            GUARDED_BY(clientMutex());
 
                 // updated by atomic_fetch_add_explicit
     volatile atomic_uint_fast32_t mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];  // ctor init
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 4d26aa0..8596acb 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -26,6 +26,7 @@
 #include <datapath/AudioStreamIn.h>
 #include <datapath/AudioStreamOut.h>
 #include <datapath/VolumeInterface.h>
+#include <datapath/VolumePortInterface.h>
 #include <fastpath/FastMixerDumpState.h>
 #include <media/DeviceDescriptorBase.h>
 #include <media/MmapStreamInterface.h>
@@ -479,7 +480,8 @@
             const sp<media::IAudioTrackCallback>& callback,
             bool isSpatialized,
             bool isBitPerfect,
-            audio_output_flags_t* afTrackFlags)
+            audio_output_flags_t* afTrackFlags,
+            float volume)
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
@@ -555,6 +557,9 @@
 
     virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
             EXCLUDES_ThreadBase_Mutex = 0;
+
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
@@ -694,6 +699,9 @@
             AudioHwDevice* hwDev, AudioStreamOut* output, bool systemReady);
 
     virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
+
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfMmapCaptureThread : public virtual IAfMmapThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index a9c87ad..ee834d6 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -21,6 +21,7 @@
 #include <audio_utils/mutex.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
+#include <datapath/VolumePortInterface.h>
 #include <fastpath/FastMixerDumpState.h>
 #include <media/AudioSystem.h>
 #include <media/VolumeShaper.h>
@@ -254,7 +255,7 @@
 };
 
 // Common interface for Playback tracks.
-class IAfTrack : public virtual IAfTrackBase {
+class IAfTrack : public virtual IAfTrackBase, public virtual VolumePortInterface {
 public:
     // FillingStatus is used for suppressing volume ramp at begin of playing
     enum FillingStatus { FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE };
@@ -289,7 +290,8 @@
             size_t frameCountToBeReady = SIZE_MAX,
             float speed = 1.0f,
             bool isSpatialized = false,
-            bool isBitPerfect = false);
+            bool isBitPerfect = false,
+            float volume = 0.0f);
 
     virtual void pause() = 0;
     virtual void flush() = 0;
@@ -452,7 +454,7 @@
     virtual ExtendedTimestamp getClientProxyTimestamp() const = 0;
 };
 
-class IAfMmapTrack : public virtual IAfTrackBase {
+class IAfMmapTrack : public virtual IAfTrackBase, public virtual VolumePortInterface {
 public:
     static sp<IAfMmapTrack> create(IAfThreadBase* thread,
             const audio_attributes_t& attr,
@@ -463,7 +465,8 @@
             bool isOut,
             const android::content::AttributionSourceState& attributionSource,
             pid_t creatorPid,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+            float volume = 0.0f);
 
     // protected by MMapThread::mLock
     virtual void setSilenced_l(bool silenced) = 0;
@@ -583,7 +586,8 @@
                                              *  as soon as possible to have
                                              *  the lowest possible latency
                                              *  even if it might glitch. */
-            float speed = 1.0f);
+            float speed = 1.0f,
+            float volume = 1.0f);
 };
 
 class IAfPatchRecord : public virtual IAfRecordTrack, public virtual IAfPatchTrackBase {
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 85ce142..8758bd0 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -35,7 +35,8 @@
                             bool isOut,
                             const android::content::AttributionSourceState& attributionSource,
                             pid_t creatorPid,
-                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+                            float volume = 0.0f);
     ~MmapTrack() override;
 
     status_t initCheck() const final;
@@ -65,6 +66,13 @@
     void processMuteEvent_l(const sp<IAudioManager>& audioManager,
                             mute_state_t muteState)
                             /* REQUIRES(MmapPlaybackThread::mLock) */ final;
+
+    // VolumePortInterface implementation
+    void setPortVolume(float volume) override {
+        mVolume = volume;
+    }
+    float getPortVolume() const override { return mVolume; }
+
 private:
     DISALLOW_COPY_AND_ASSIGN(MmapTrack);
 
@@ -87,6 +95,8 @@
             /* GUARDED_BY(MmapPlaybackThread::mLock) */;
     mute_state_t mMuteState
             /* GUARDED_BY(MmapPlaybackThread::mLock) */;
+
+    float mVolume = 0.0f;
 };  // end of Track
 
 } // namespace android
\ No newline at end of file
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 35f17c1..02d4fc2 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -649,7 +649,8 @@
                                            outputFlags,
                                            {} /*timeout*/,
                                            frameCountToBeReady,
-                                           1.0f);
+                                           1.0f /*speed*/,
+                                           1.0f /*volume*/);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 2cc6236..84758a4 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -96,7 +96,8 @@
                                 size_t frameCountToBeReady = SIZE_MAX,
                                 float speed = 1.0f,
                                 bool isSpatialized = false,
-                                bool isBitPerfect = false);
+                                bool isBitPerfect = false,
+                                float volume = 0.0f);
     ~Track() override;
     status_t initCheck() const final;
     void appendDumpHeader(String8& result) const final;
@@ -222,6 +223,11 @@
 
     bool getInternalMute() const final { return mInternalMute; }
     void setInternalMute(bool muted) final { mInternalMute = muted; }
+
+    // VolumePortInterface implementation
+    void setPortVolume(float volume) override;
+    float getPortVolume() const override { return mVolume; }
+
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
@@ -362,6 +368,8 @@
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
+    void                populateUsageAndContentTypeFromStreamType();
+
     size_t              mPresentationCompleteFrames = 0; // (Used for Mixed tracks)
                                     // The number of frames written to the
                                     // audio HAL when this track is considered fully rendered.
@@ -403,8 +411,8 @@
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
     mute_state_t        mMuteState;
-
     bool                mInternalMute = false;
+    std::atomic<float> mVolume = 0.0f;
 };  // end of Track
 
 
@@ -501,7 +509,8 @@
                                                                     *  as soon as possible to have
                                                                     *  the lowest possible latency
                                                                     *  even if it might glitch. */
-                                   float speed = 1.0f);
+                                   float speed = 1.0f,
+                                   float volume = 1.0f);
     ~PatchTrack() override;
 
     size_t framesReady() const final;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 400613a..56f4e18 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -49,6 +49,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/PersistableBundle.h>
 #include <com_android_media_audio.h>
+#include <com_android_media_audioserver.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <fastpath/AutoPark.h>
@@ -122,6 +123,7 @@
 }
 
 using com::android::media::permission::ValidatedAttributionSourceState;
+namespace audioserver_flags = com::android::media::audioserver;
 
 namespace android {
 
@@ -2216,17 +2218,18 @@
                 (int64_t)(mIsMsdDevice ? AUDIO_DEVICE_OUT_BUS // turn on by default for MSD
                                        : AUDIO_DEVICE_NONE));
     }
-
-    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
-        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
-        mStreamTypes[stream].volume = 0.0f;
-        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    if (!audioserver_flags::portid_volume_management()) {
+        for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+            const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+            mStreamTypes[stream].volume = 0.0f;
+            mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+        }
+        // Audio patch and call assistant volume are always max
+        mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
     }
-    // Audio patch and call assistant volume are always max
-    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
 }
 
 PlaybackThread::~PlaybackThread()
@@ -2277,16 +2280,17 @@
 void PlaybackThread::dumpTracks_l(int fd, const Vector<String16>& /* args */)
 {
     String8 result;
-
-    result.appendFormat("  Stream volumes in dB: ");
-    for (int i = 0; i < AUDIO_STREAM_CNT; ++i) {
-        const stream_type_t *st = &mStreamTypes[i];
-        if (i > 0) {
-            result.appendFormat(", ");
-        }
-        result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume));
-        if (st->mute) {
-            result.append("M");
+    if (!audioserver_flags::portid_volume_management()) {
+        result.appendFormat("  Stream volumes in dB: ");
+        for (int i = 0; i < AUDIO_STREAM_CNT; ++i) {
+            const stream_type_t *st = &mStreamTypes[i];
+            if (i > 0) {
+                result.appendFormat(", ");
+            }
+            result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume));
+            if (st->mute) {
+                result.append("M");
+            }
         }
     }
     result.append("\n");
@@ -2394,7 +2398,8 @@
         const sp<media::IAudioTrackCallback>& callback,
         bool isSpatialized,
         bool isBitPerfect,
-        audio_output_flags_t *afTrackFlags)
+        audio_output_flags_t *afTrackFlags,
+        float volume)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2723,7 +2728,7 @@
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
                           sessionId, creatorPid, attributionSource, trackFlags,
                           IAfTrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
-                          speed, isSpatialized, isBitPerfect);
+                          speed, isSpatialized, isBitPerfect, volume);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2851,6 +2856,22 @@
     return mStreamTypes[stream].volume;
 }
 
+status_t PlaybackThread::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume) {
+    audio_utils::lock_guard _l(mutex());
+    for (const auto& portId : portIds) {
+        for (size_t i = 0; i < mTracks.size(); i++) {
+            sp<IAfTrack> track = mTracks[i].get();
+            if (portId == track->portId()) {
+                track->setPortVolume(volume);
+                break;
+            }
+        }
+    }
+    broadcast_l();
+    return NO_ERROR;
+}
+
 void PlaybackThread::setVolumeForOutput_l(float left, float right) const
 {
     mOutput->stream->setVolume(left, right);
@@ -5794,12 +5815,19 @@
                 }
                 sp<AudioTrackServerProxy> proxy = track->audioTrackServerProxy();
                 float volume;
-                if (track->isPlaybackRestricted() || mStreamTypes[track->streamType()].mute) {
-                    volume = 0.f;
+                if (!audioserver_flags::portid_volume_management()) {
+                    if (track->isPlaybackRestricted() || mStreamTypes[track->streamType()].mute) {
+                        volume = 0.f;
+                    } else {
+                        volume = masterVolume * mStreamTypes[track->streamType()].volume;
+                    }
                 } else {
-                    volume = masterVolume * mStreamTypes[track->streamType()].volume;
+                    if (track->isPlaybackRestricted()) {
+                        volume = 0.f;
+                    } else {
+                        volume = masterVolume * track->getPortVolume();
+                    }
                 }
-
                 handleVoipVolume_l(&volume);
 
                 // cache the combined master volume and stream type volume for fast mixer; this
@@ -5811,15 +5839,23 @@
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
                 float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
                 float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
-
-                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                    /*muteState=*/{masterVolume == 0.f,
-                                   mStreamTypes[track->streamType()].volume == 0.f,
-                                   mStreamTypes[track->streamType()].mute,
-                                   track->isPlaybackRestricted(),
-                                   vlf == 0.f && vrf == 0.f,
-                                   vh == 0.f});
-
+                if (!audioserver_flags::portid_volume_management()) {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           mStreamTypes[track->streamType()].volume == 0.f,
+                                           mStreamTypes[track->streamType()].mute,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                } else {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           track->getPortVolume() == 0.f,
+                                           /* muteFromStreamMuted= */ false,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                }
                 vlf *= volume;
                 vrf *= volume;
 
@@ -5970,16 +6006,22 @@
             uint32_t vl, vr;       // in U8.24 integer format
             float vlf, vrf, vaf;   // in [0.0, 1.0] float format
             // read original volumes with volume control
-            float v = masterVolume * mStreamTypes[track->streamType()].volume;
             // Always fetch volumeshaper volume to ensure state is updated.
             const sp<AudioTrackServerProxy> proxy = track->audioTrackServerProxy();
             const float vh = track->getVolumeHandler()->getVolume(
                     track->audioTrackServerProxy()->framesReleased()).first;
-
-            if (mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
-                v = 0;
+            float v;
+            if (!audioserver_flags::portid_volume_management()) {
+                v = masterVolume * mStreamTypes[track->streamType()].volume;
+                if (mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
+                    v = 0;
+                }
+            } else {
+                v = masterVolume * track->getPortVolume();
+                if (track->isPlaybackRestricted()) {
+                    v = 0;
+                }
             }
-
             handleVoipVolume_l(&v);
 
             if (track->isPausing()) {
@@ -5999,15 +6041,23 @@
                     ALOGV("Track right volume out of range: %.3g", vrf);
                     vrf = GAIN_FLOAT_UNITY;
                 }
-
-                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                    /*muteState=*/{masterVolume == 0.f,
-                                   mStreamTypes[track->streamType()].volume == 0.f,
-                                   mStreamTypes[track->streamType()].mute,
-                                   track->isPlaybackRestricted(),
-                                   vlf == 0.f && vrf == 0.f,
-                                   vh == 0.f});
-
+                if (!audioserver_flags::portid_volume_management()) {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           mStreamTypes[track->streamType()].volume == 0.f,
+                                           mStreamTypes[track->streamType()].mute,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                } else {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           track->getPortVolume() == 0.f,
+                                           /* muteFromStreamMuted= */ false,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                }
                 // now apply the master volume and stream type volume and shaper volume
                 vlf *= v * vh;
                 vrf *= v * vh;
@@ -6733,34 +6783,64 @@
 
     const bool clientVolumeMute = (left == 0.f && right == 0.f);
 
-    if (mMasterMute || mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
-        left = right = 0;
-    } else {
-        float typeVolume = mStreamTypes[track->streamType()].volume;
-        const float v = mMasterVolume * typeVolume * shaperVolume;
+    if (!audioserver_flags::portid_volume_management()) {
+        if (mMasterMute || mStreamTypes[track->streamType()].mute ||
+            track->isPlaybackRestricted()) {
+            left = right = 0;
+        } else {
+            float typeVolume = mStreamTypes[track->streamType()].volume;
+            const float v = mMasterVolume * typeVolume * shaperVolume;
 
-        if (left > GAIN_FLOAT_UNITY) {
-            left = GAIN_FLOAT_UNITY;
-        }
-        if (right > GAIN_FLOAT_UNITY) {
-            right = GAIN_FLOAT_UNITY;
-        }
-        left *= v;
-        right *= v;
-        if (mAfThreadCallback->getMode() != AUDIO_MODE_IN_COMMUNICATION
+            if (left > GAIN_FLOAT_UNITY) {
+                left = GAIN_FLOAT_UNITY;
+            }
+            if (right > GAIN_FLOAT_UNITY) {
+                right = GAIN_FLOAT_UNITY;
+            }
+            left *= v;
+            right *= v;
+            if (mAfThreadCallback->getMode() != AUDIO_MODE_IN_COMMUNICATION
                 || audio_channel_count_from_out_mask(mChannelMask) > 1) {
-            left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
-            right *= mMasterBalanceRight;
+                left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+                right *= mMasterBalanceRight;
+            }
         }
-    }
+        track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                /*muteState=*/{mMasterMute,
+                               mStreamTypes[track->streamType()].volume == 0.f,
+                               mStreamTypes[track->streamType()].mute,
+                               track->isPlaybackRestricted(),
+                               clientVolumeMute,
+                               shaperVolume == 0.f});
+    } else {
+        if (mMasterMute || track->isPlaybackRestricted()) {
+            left = right = 0;
+        } else {
+            float typeVolume = track->getPortVolume();
+            const float v = mMasterVolume * typeVolume * shaperVolume;
 
-    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-        /*muteState=*/{mMasterMute,
-                       mStreamTypes[track->streamType()].volume == 0.f,
-                       mStreamTypes[track->streamType()].mute,
-                       track->isPlaybackRestricted(),
-                       clientVolumeMute,
-                       shaperVolume == 0.f});
+            if (left > GAIN_FLOAT_UNITY) {
+                left = GAIN_FLOAT_UNITY;
+            }
+            if (right > GAIN_FLOAT_UNITY) {
+                right = GAIN_FLOAT_UNITY;
+            }
+            left *= v;
+            right *= v;
+            if (mAfThreadCallback->getMode() != AUDIO_MODE_IN_COMMUNICATION
+                || audio_channel_count_from_out_mask(mChannelMask) > 1) {
+                left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+                right *= mMasterBalanceRight;
+            }
+        }
+        track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                /*muteState=*/{mMasterMute,
+                               track->getPortVolume() == 0.f,
+                               /* muteFromStreamMuted= */ false,
+                               track->isPlaybackRestricted(),
+                               clientVolumeMute,
+                               shaperVolume == 0.f});
+    }
 
     if (lastTrack) {
         track->setFinalVolume(left, right);
@@ -7854,7 +7934,9 @@
         ALOGE("addOutputTrack() initCheck failed %d", status);
         return;
     }
-    thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f);
+    if (!audioserver_flags::portid_volume_management()) {
+        thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f);
+    }
     mOutputTracks.add(outputTrack);
     ALOGV("addOutputTrack() track %p, on thread %p", outputTrack.get(), thread);
     updateWaitTime_l();
@@ -10341,6 +10423,7 @@
 
     const auto localSessionId = mSessionId;
     auto localAttr = mAttr;
+    float volume = 0.0f;
     if (isOutput()) {
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
         config.sample_rate = mSampleRate;
@@ -10364,7 +10447,8 @@
                                             &portId,
                                             &secondaryOutputs,
                                             &isSpatialized,
-                                            &isBitPerfect);
+                                            &isBitPerfect,
+                                            &volume);
         mutex().lock();
         mAttr = localAttr;
         ALOGD_IF(!secondaryOutputs.empty(),
@@ -10433,7 +10517,8 @@
             this, attr == nullptr ? mAttr : *attr, mSampleRate, mFormat,
                                         mChannelMask, mSessionId, isOutput(),
                                         client.attributionSource,
-                                        IPCThreadState::self()->getCallingPid(), portId);
+                                        IPCThreadState::self()->getCallingPid(), portId,
+                                        volume);
     if (!isOutput()) {
         track->setSilenced_l(isClientSilenced_l(portId));
     }
@@ -11018,18 +11103,18 @@
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = afThreadCallback->masterVolume_l();
     mMasterMute = afThreadCallback->masterMute_l();
-
-    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
-        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
-        mStreamTypes[stream].volume = 0.0f;
-        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    if (!audioserver_flags::portid_volume_management()) {
+        for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+            const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+            mStreamTypes[stream].volume = 0.0f;
+            mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+        }
+        // Audio patch and call assistant volume are always max
+        mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
     }
-    // Audio patch and call assistant volume are always max
-    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
-
     if (mAudioHwDev) {
         if (mAudioHwDev->canSetMasterVolume()) {
             mMasterVolume = 1.0;
@@ -11108,6 +11193,21 @@
     }
 }
 
+status_t MmapPlaybackThread::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume) {
+    audio_utils::lock_guard _l(mutex());
+    for (const auto& portId : portIds) {
+        for (const sp<IAfMmapTrack>& track : mActiveTracks) {
+            if (portId == track->portId()) {
+                track->setPortVolume(volume);
+                break;
+            }
+        }
+    }
+    broadcast_l();
+    return NO_ERROR;
+}
+
 void MmapPlaybackThread::invalidateTracks(audio_stream_type_t streamType)
 {
     audio_utils::lock_guard _l(mutex());
@@ -11141,14 +11241,26 @@
 void MmapPlaybackThread::processVolume_l()
 NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
 {
-    float volume;
-
-    if (mMasterMute || streamMuted_l()) {
-        volume = 0;
+    float volume = 0;
+    if (!audioserver_flags::portid_volume_management()) {
+        if (mMasterMute || streamMuted_l()) {
+            volume = 0;
+        } else {
+            volume = mMasterVolume * streamVolume_l();
+        }
     } else {
-        volume = mMasterVolume * streamVolume_l();
+        if (mMasterMute) {
+            volume = 0;
+        } else {
+            // All mmap tracks are declared with the same audio attributes to the audio policy
+            // manager. Hence, they follow the same routing / volume group. Any change of volume
+            // will be broadcasted to all tracks. Thus, take arbitrarily first track volume.
+            size_t numtracks = mActiveTracks.size();
+            if (numtracks) {
+                volume = mMasterVolume * mActiveTracks[0]->getPortVolume();
+            }
+        }
     }
-
     if (volume != mHalVolFloat) {
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
@@ -11181,14 +11293,25 @@
         }
         for (const sp<IAfMmapTrack>& track : mActiveTracks) {
             track->setMetadataHasChanged();
-            track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                /*muteState=*/{mMasterMute,
-                               streamVolume_l() == 0.f,
-                               streamMuted_l(),
-                               // TODO(b/241533526): adjust logic to include mute from AppOps
-                               false /*muteFromPlaybackRestricted*/,
-                               false /*muteFromClientVolume*/,
-                               false /*muteFromVolumeShaper*/});
+            if (!audioserver_flags::portid_volume_management()) {
+                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                        /*muteState=*/{mMasterMute,
+                        streamVolume_l() == 0.f,
+                        streamMuted_l(),
+                        // TODO(b/241533526): adjust logic to include mute from AppOps
+                        false /*muteFromPlaybackRestricted*/,
+                        false /*muteFromClientVolume*/,
+                        false /*muteFromVolumeShaper*/});
+            } else {
+                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                    /*muteState=*/{mMasterMute,
+                                   track->getPortVolume() == 0.f,
+                                   /* muteFromStreamMuted= */ false,
+                                   // TODO(b/241533526): adjust logic to include mute from AppOps
+                                   false /*muteFromPlaybackRestricted*/,
+                                   false /*muteFromClientVolume*/,
+                                   false /*muteFromVolumeShaper*/});
+                }
         }
     }
 }
@@ -11295,9 +11418,13 @@
 void MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MmapThread::dumpInternals_l(fd, args);
-
-    dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
-            mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
+    if (!audioserver_flags::portid_volume_management()) {
+        dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d",
+                mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
+    } else {
+        dprintf(fd, "  HAL volume: %f", mHalVolFloat);
+    }
+    dprintf(fd, "\n");
     dprintf(fd, "  Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 10a77ef..a7a2630 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -836,6 +836,12 @@
                     typename SortedVector<sp<T>>::iterator end() {
                         return mActiveTracks.end();
                     }
+                    typename SortedVector<const sp<T>>::iterator begin() const {
+                        return mActiveTracks.begin();
+                    }
+                    typename SortedVector<const sp<T>>::iterator end() const {
+                        return mActiveTracks.end();
+                    }
 
                     // Due to Binder recursion optimization, clear() and updatePowerState()
                     // cannot be called from a Binder thread because they may call back into
@@ -1011,6 +1017,9 @@
     void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
     void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
     float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            final EXCLUDES_ThreadBase_Mutex;
+
     void setVolumeForOutput_l(float left, float right) const final;
 
     sp<IAfTrack> createTrack_l(
@@ -1035,7 +1044,8 @@
                                 const sp<media::IAudioTrackCallback>& callback,
                                 bool isSpatialized,
                                 bool isBitPerfect,
-                                audio_output_flags_t* afTrackFlags) final
+                                audio_output_flags_t* afTrackFlags,
+                                float volume) final
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     bool isTrackActive(const sp<IAfTrack>& track) const final {
@@ -2385,6 +2395,8 @@
     void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
     void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
     float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            final EXCLUDES_ThreadBase_Mutex;
 
     void setMasterMute_l(bool muted) REQUIRES(mutex()) { mMasterMute = muted; }
 
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index a0b85f7..1342b7b 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -347,7 +347,7 @@
     size_t              mBufferSize; // size of mBuffer in bytes
     // we don't really need a lock for these
     MirroredVariable<track_state>  mState;
-    const audio_attributes_t mAttr;
+    audio_attributes_t  mAttr;
     const uint32_t      mSampleRate;    // initial sample rate only; for tracks which
                         // support dynamic rates, the current value is in control block
     const audio_format_t mFormat;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f5f11cc..5aa58a2 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -715,7 +715,8 @@
         size_t frameCountToBeReady,
         float speed,
         bool isSpatialized,
-        bool isBitPerfect) {
+        bool isBitPerfect,
+        float volume) {
     return sp<Track>::make(thread,
             client,
             streamType,
@@ -736,7 +737,8 @@
             frameCountToBeReady,
             speed,
             isSpatialized,
-            isBitPerfect);
+            isBitPerfect,
+            volume);
 }
 
 // Track constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held
@@ -761,7 +763,8 @@
             size_t frameCountToBeReady,
             float speed,
             bool isSpatialized,
-            bool isBitPerfect)
+            bool isBitPerfect,
+            float volume)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -797,7 +800,8 @@
     mFlags(flags),
     mSpeed(speed),
     mIsSpatialized(isSpatialized),
-    mIsBitPerfect(isBitPerfect)
+    mIsBitPerfect(isBitPerfect),
+    mVolume(volume)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -843,6 +847,14 @@
         thread->fastTrackAvailMask_l() &= ~(1 << i);
     }
 
+    populateUsageAndContentTypeFromStreamType();
+
+    // Audio patch and call assistant volume are always max
+    if (mAttr.usage == AUDIO_USAGE_CALL_ASSISTANT
+            || mAttr.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
+        mVolume = 1.0f;
+    }
+
     mServerLatencySupported = checkServerLatencySupported(format, flags);
 #ifdef TEE_SINK
     mTee.setId(std::string("_") + std::to_string(mThreadIoHandle)
@@ -865,6 +877,62 @@
     mTrackMetrics.logConstructor(creatorPid, uid, id(), traits, streamType);
 }
 
+// When attributes are undefined, derive default values from stream type.
+// See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
+void Track::populateUsageAndContentTypeFromStreamType() {
+    if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
+        switch (mStreamType) {
+        case AUDIO_STREAM_VOICE_CALL:
+            mAttr.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_SYSTEM:
+            mAttr.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_RING:
+            mAttr.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_MUSIC:
+            mAttr.usage = AUDIO_USAGE_MEDIA;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+            break;
+        case AUDIO_STREAM_ALARM:
+            mAttr.usage = AUDIO_USAGE_ALARM;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_NOTIFICATION:
+            mAttr.usage = AUDIO_USAGE_NOTIFICATION;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_DTMF:
+            mAttr.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            mAttr.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_ASSISTANT:
+            mAttr.usage = AUDIO_USAGE_ASSISTANT;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_REROUTING:
+        case AUDIO_STREAM_PATCH:
+            mAttr.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
+            // unknown content type
+            break;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            mAttr.usage = AUDIO_USAGE_CALL_ASSISTANT;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        default:
+            break;
+        }
+    }
+}
+
 Track::~Track()
 {
     ALOGV("%s(%d)", __func__, mId);
@@ -923,7 +991,7 @@
     result.appendFormat("Type     Id Active Client Session Port Id S  Flags "
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
-                        " G db  L dB  R dB  VS dB "
+                        " G db  L dB  R dB  VS dB  PortVol dB "
                         "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
@@ -1009,7 +1077,7 @@
     result.appendFormat("%7s %6u %7u %7u %2s 0x%03X "
                         "%08X %08X %6u "
                         "%2u %3x %2x "
-                        "%5.2g %5.2g %5.2g %5.2g%c "
+                        "%5.2g %5.2g %5.2g %5.2g%c %11.2g "
                         "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
@@ -1031,6 +1099,7 @@
             20.0 * log10(float_from_gain(gain_minifloat_unpack_right(vlr))),
             20.0 * log10(vsVolume.first), // VolumeShaper(s) total volume
             vsVolume.second ? 'A' : ' ',  // if any VolumeShapers active
+            20.0 * log10(mVolume),
 
             mCblk->mServer,
             bufferSizeInFrames,
@@ -1532,6 +1601,16 @@
     return INVALID_OPERATION;
 }
 
+void Track::setPortVolume(float volume) {
+    mVolume = volume;
+    if (mType != TYPE_PATCH) {
+        // Do not recursively propagate a PatchTrack setPortVolume to
+        // downstream PatchTracks.
+        forEachTeePatchTrack_l([volume](const auto& patchTrack) {
+                patchTrack->setPortVolume(volume); });
+    }
+}
+
 VolumeShaper::Status Track::applyVolumeShaper(
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
@@ -1587,59 +1666,6 @@
             .gain = mFinalVolume,
     };
 
-    // When attributes are undefined, derive default values from stream type.
-    // See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
-    if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
-        switch (mStreamType) {
-        case AUDIO_STREAM_VOICE_CALL:
-            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        case AUDIO_STREAM_SYSTEM:
-            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_RING:
-            metadata.base.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_MUSIC:
-            metadata.base.usage = AUDIO_USAGE_MEDIA;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_MUSIC;
-            break;
-        case AUDIO_STREAM_ALARM:
-            metadata.base.usage = AUDIO_USAGE_ALARM;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_NOTIFICATION:
-            metadata.base.usage = AUDIO_USAGE_NOTIFICATION;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_DTMF:
-            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_ACCESSIBILITY:
-            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        case AUDIO_STREAM_ASSISTANT:
-            metadata.base.usage = AUDIO_USAGE_ASSISTANT;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        case AUDIO_STREAM_REROUTING:
-            metadata.base.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
-            // unknown content type
-            break;
-        case AUDIO_STREAM_CALL_ASSISTANT:
-            metadata.base.usage = AUDIO_USAGE_CALL_ASSISTANT;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        default:
-            break;
-        }
-    }
-
     metadata.channel_mask = mChannelMask;
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
@@ -2191,14 +2217,13 @@
             size_t frameCount,
             const AttributionSourceState& attributionSource)
     :   Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
-              audio_attributes_t{} /* currently unused for output track */,
+              AUDIO_ATTRIBUTES_INITIALIZER ,
               sampleRate, format, channelMask, frameCount,
               nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), attributionSource, AUDIO_OUTPUT_FLAG_NONE,
               TYPE_OUTPUT),
     mActive(false), mSourceThread(sourceThread)
 {
-
     if (mCblk != NULL) {
         mOutBuffer.frameCount = 0;
         playbackThread->addOutputTrack_l(this);
@@ -2464,7 +2489,8 @@
                                          *  as soon as possible to have
                                          *  the lowest possible latency
                                          *  even if it might glitch. */
-        float speed)
+        float speed,
+        float volume)
 {
     return sp<PatchTrack>::make(
             playbackThread,
@@ -2478,7 +2504,8 @@
             flags,
             timeout,
             frameCountToBeReady,
-            speed);
+            speed,
+            volume);
 }
 
 PatchTrack::PatchTrack(IAfPlaybackThread* playbackThread,
@@ -2492,13 +2519,15 @@
                                                      audio_output_flags_t flags,
                                                      const Timeout& timeout,
                                                      size_t frameCountToBeReady,
-                                                     float speed)
+                                                     float speed,
+                                                     float volume)
     :   Track(playbackThread, NULL, streamType,
-              audio_attributes_t{} /* currently unused for patch track */,
+              AUDIO_ATTRIBUTES_INITIALIZER,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
-              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed),
+              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed,
+              false /*isSpatialized*/, false /*isBitPerfect*/, volume),
         PatchTrackBase(mCblk ? new AudioTrackClientProxy(mCblk, mBuffer, frameCount, mFrameSize,
                         true /*clientInServer*/) : nullptr,
                        playbackThread, timeout)
@@ -3482,7 +3511,8 @@
           bool isOut,
           const android::content::AttributionSourceState& attributionSource,
           pid_t creatorPid,
-          audio_port_handle_t portId)
+          audio_port_handle_t portId,
+          float volume)
 {
     return sp<MmapTrack>::make(
             thread,
@@ -3494,7 +3524,8 @@
             isOut,
             attributionSource,
             creatorPid,
-            portId);
+            portId,
+            volume);
 }
 
 MmapTrack::MmapTrack(IAfThreadBase* thread,
@@ -3506,7 +3537,8 @@
         bool isOut,
         const AttributionSourceState& attributionSource,
         pid_t creatorPid,
-        audio_port_handle_t portId)
+        audio_port_handle_t portId,
+        float volume)
     :   TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
@@ -3517,10 +3549,15 @@
                   TYPE_DEFAULT, portId,
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
         mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.pid))),
-            mSilenced(false), mSilencedNotified(false)
+            mSilenced(false), mSilencedNotified(false), mVolume(volume)
 {
     // Once this item is logged by the server, the client can add properties.
     mTrackMetrics.logConstructor(creatorPid, uid(), id());
+    if (isOut && (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
+            || attr.usage == AUDIO_USAGE_VIRTUAL_SOURCE)) {
+        // Audio patch and call assistant volume are always max
+        mVolume = 1.0f;
+    }
 }
 
 MmapTrack::~MmapTrack()
@@ -3599,8 +3636,8 @@
 
 void MmapTrack::appendDumpHeader(String8& result) const
 {
-    result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s\n",
-                        isOut() ? "Usg CT": "Source");
+    result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s  %s\n",
+                        isOut() ? "Usg CT": "Source", isOut() ? "PortVol dB" : "");
 }
 
 void MmapTrack::appendDump(String8& result, bool active __unused) const
@@ -3615,6 +3652,7 @@
             mAttr.flags);
     if (isOut()) {
         result.appendFormat("%3x %2x", mAttr.usage, mAttr.content_type);
+        result.appendFormat("%11.2g", 20.0 * log10(mVolume));
     } else {
         result.appendFormat("%6x", mAttr.source);
     }
diff --git a/services/audioflinger/datapath/VolumePortInterface.h b/services/audioflinger/datapath/VolumePortInterface.h
new file mode 100644
index 0000000..fb1c463
--- /dev/null
+++ b/services/audioflinger/datapath/VolumePortInterface.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <system/audio.h>
+
+namespace android {
+
+class VolumePortInterface : public virtual RefBase {
+public:
+    virtual void setPortVolume(float volume) = 0;
+    virtual float getPortVolume() const = 0;
+};
+
+}  // namespace android
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 1bac259..e849bb4 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -147,7 +147,8 @@
                                       std::vector<audio_io_handle_t> *secondaryOutputs,
                                       output_type_t *outputType,
                                       bool *isSpatialized,
-                                      bool *isBitPerfect) = 0;
+                                      bool *isBitPerfect,
+                                      float *volume) = 0;
     // indicates to the audio policy manager that the output starts being used by corresponding
     // stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -515,6 +516,18 @@
     // for each output (destination device) it is attached to.
     virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
                                      audio_io_handle_t output, int delayMs = 0) = 0;
+    /**
+     * Set volume for given AudioTrack port ids for a particular output.
+     * For the same user setting, a given volume group and associated output port id
+     * can have different volumes for each output (destination device) it is attached to.
+     * @param ports to consider
+     * @param volume to apply
+     * @param output to consider
+     * @param delayMs to use
+     * @return NO_ERROR if successful
+     */
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& ports, float volume,
+            audio_io_handle_t output, int delayMs = 0) = 0;
 
     // function enabling to send proprietary informations directly from audio policy manager to
     // audio hardware interface.
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 051e975..4dedcd6 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -39,6 +39,8 @@
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudiopolicy",
@@ -51,6 +53,7 @@
         "libmedia_helper",
         "libutils",
         "libxml2",
+        "server_configurable_flags",
     ],
     export_shared_lib_headers: [
         "libaudiofoundation",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index bfb28a5..3c296e7 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -491,6 +491,13 @@
 
     virtual std::string info() const override;
 
+    /**
+     * Finds all ports matching the given volume source.
+     * @param vs to be considered
+     * @return vector of ports following the given volume source.
+     */
+    std::vector<audio_port_handle_t> getPortsForVolumeSource(const VolumeSource& vs);
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 6fef215..848051c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -27,6 +27,7 @@
 #include "HwModule.h"
 #include "TypeConverter.h"
 #include "policy.h"
+#include <com_android_media_audioserver.h>
 #include <media/AudioGain.h>
 #include <media/AudioParameter.h>
 #include <media/AudioPolicy.h>
@@ -34,6 +35,8 @@
 // A device mask for all audio output devices that are considered "remote" when evaluating
 // active output devices in isStreamActiveRemotely()
 
+namespace audioserver_flags = com::android::media::audioserver;
+
 namespace android {
 
 static const DeviceTypeSet& getAllOutRemoteDevices() {
@@ -498,17 +501,33 @@
         const DeviceTypeSet& deviceTypes, uint32_t delayMs) {
     // volume source active and more than one volume source is active, otherwise, no-op or let
     // setVolume controlling SW and/or HW Gains
-    if (!streamTypes.empty() && isActive(vs) && (getActiveVolumeSources().size() > 1)) {
-        for (const auto& devicePort : devices()) {
-            if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
+    if (!audioserver_flags::portid_volume_management()) {
+        if (!streamTypes.empty() && isActive(vs) && (getActiveVolumeSources().size() > 1)) {
+            for (const auto& devicePort : devices()) {
+                if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
                     devicePort->hasGainController(true /*canUseForVolume*/)) {
-                float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
-                ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
-                      mIoHandle, vs, muted, getActiveVolumeSources().size());
-                for (const auto &stream : streamTypes) {
-                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                    float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                    ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
+                          mIoHandle, vs, muted, getActiveVolumeSources().size());
+                    for (const auto &stream : streamTypes) {
+                        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                    }
+                    return;
                 }
-                return;
+            }
+        }
+    } else {
+        if (isActive(vs) && (getActiveVolumeSources().size() > 1)) {
+            for (const auto &devicePort: devices()) {
+                if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
+                    devicePort->hasGainController(true /*canUseForVolume*/)) {
+                    float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                    ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
+                          mIoHandle, vs, muted, getActiveVolumeSources().size());
+                    mClientInterface->setPortsVolume(
+                            getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
+                    return;
+                }
             }
         }
     }
@@ -528,8 +547,14 @@
             VolumeSource callVolSrc = getVoiceSource();
             if (callVolSrc != VOLUME_SOURCE_NONE && volumeDb != getCurVolume(callVolSrc)) {
                 setCurVolume(callVolSrc, volumeDb, true);
-                mClientInterface->setStreamVolume(
-                        AUDIO_STREAM_VOICE_CALL, Volume::DbToAmpl(volumeDb), mIoHandle, delayMs);
+                float volumeAmpl = Volume::DbToAmpl(volumeDb);
+                if (audioserver_flags::portid_volume_management()) {
+                    mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc),
+                            volumeAmpl, mIoHandle, delayMs);
+                } else {
+                    mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL,
+                            volumeAmpl, mIoHandle, delayMs);
+                }
             }
         }
         return false;
@@ -539,25 +564,34 @@
     }
     for (const auto& devicePort : devices()) {
         // APM loops on all group, so filter on active group to set the port gain,
-        // let the other groups set the stream volume as per legacy
+        // let the other groups set the sw volume as per legacy
         // TODO: Pass in the device address and check against it.
         if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
                 devicePort->hasGainController(true) && isActive(vs)) {
             ALOGV("%s: device %s has gain controller", __func__, devicePort->toString().c_str());
             // @todo: here we might be in trouble if the SwOutput has several active clients with
             // different Volume Source (or if we allow several curves within same volume group)
-            //
-            // @todo: default stream volume to max (0) when using HW Port gain?
-            // Allows to set SW Gain on AudioFlinger if:
-            //    -volume group has explicit stream(s) associated
-            //    -volume group with no explicit stream(s) is the only active source on this output
-            // Allows to mute SW Gain on AudioFlinger only for volume group with explicit stream(s)
-            if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
-                const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
-                float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
-                for (const auto &stream : streams) {
-                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+            if (!audioserver_flags::portid_volume_management()) {
+                // @todo: default stream volume to max (0) when using HW Port gain?
+                // Allows to set SW Gain on AudioFlinger if:
+                //    -volume group has explicit stream(s) associated
+                //    -volume group with no explicit stream(s) is the only active source on this
+                //    output
+                // Allows to mute SW Gain on AudioFlinger only for volume group with explicit
+                // stream(s)
+                if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
+                    const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
+                    float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
+                    for (const auto &stream: streams) {
+                        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                    }
                 }
+            } else {
+                float volumeAmpl = (muted && volumeDb != 0.0f) ? 0.0f : Volume::DbToAmpl(0);
+                ALOGV("%s: output: %d, vs: %d, active vs count: %zu", __func__,
+                      mIoHandle, vs, getActiveVolumeSources().size());
+                mClientInterface->setPortsVolume(
+                        getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
             }
             AudioGains gains = devicePort->getGains();
             int gainMinValueInMb = gains[0]->getMinValueInMb();
@@ -577,20 +611,47 @@
     // Force VOICE_CALL to track BLUETOOTH_SCO stream volume when bluetooth audio is enabled
     float volumeAmpl = Volume::DbToAmpl(getCurVolume(vs));
     if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) {
-        mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
         VolumeSource callVolSrc = getVoiceSource();
+        if (audioserver_flags::portid_volume_management()) {
+            if (callVolSrc != VOLUME_SOURCE_NONE) {
+                mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc), volumeAmpl,
+                        mIoHandle, delayMs);
+            }
+        } else {
+            mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle,
+                    delayMs);
+        }
         if (callVolSrc != VOLUME_SOURCE_NONE) {
             setCurVolume(callVolSrc, getCurVolume(vs), true);
         }
     }
-    for (const auto &stream : streams) {
-        ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
-              mIoHandle, vs, volumeDb, delayMs, toString(stream).c_str());
-        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+    if (audioserver_flags::portid_volume_management()) {
+        ALOGV("%s output %d for volumeSource %d, volume %f, delay %d active=%d", __func__,
+              mIoHandle, vs, volumeDb, delayMs, isActive(vs));
+        mClientInterface->setPortsVolume(getPortsForVolumeSource(vs), volumeAmpl, mIoHandle,
+                                         delayMs);
+    } else {
+        for (const auto &stream : streams) {
+            ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
+                  mIoHandle, vs, volumeDb, delayMs, toString(stream).c_str());
+            mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+        }
     }
     return true;
 }
 
+std::vector<audio_port_handle_t> SwAudioOutputDescriptor::getPortsForVolumeSource(
+        const VolumeSource& vs)
+{
+    std::vector<audio_port_handle_t> portsForVolumeSource;
+    for (const auto& client : getClientIterable()) {
+        if (client->volumeSource() == vs) {
+            portsForVolumeSource.push_back(client->portId());
+        }
+    }
+    return portsForVolumeSource;
+}
+
 status_t SwAudioOutputDescriptor::open(const audio_config_t *halConfig,
                                        const audio_config_base_t *mixerConfig,
                                        const DeviceVector &devices,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index 3dc2229..c9a77a4 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -38,6 +38,8 @@
     shared_libs: [
         "libaudiopolicycomponents",
         "libaudiopolicyengineconfigurable",
+        "libbase",
+        "libcutils",
         "liblog",
         "libmedia_helper",
         "libparameter",
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4d539b7
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8af7d2f
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..b89b77e
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..6e966e9
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..8ccf24d
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..223d1df
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..ad54b83
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f4eabf4
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 6416a47..fd40c04 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -265,6 +265,7 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
     bool isBitPerfect;
+    float volume;
 
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource;
@@ -272,7 +273,7 @@
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
             &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
-            &isBitPerfect) != OK) {
+            &isBitPerfect, &volume) != OK) {
         return false;
     }
     if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 5a23b44..718f958 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1489,7 +1489,8 @@
                                               std::vector<audio_io_handle_t> *secondaryOutputs,
                                               output_type_t *outputType,
                                               bool *isSpatialized,
-                                              bool *isBitPerfect)
+                                              bool *isBitPerfect,
+                                              float *volume)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1545,6 +1546,8 @@
                                   outputDesc->mPolicyMix);
     outputDesc->addClient(clientDesc);
 
+    *volume = Volume::DbToAmpl(outputDesc->getCurVolume(toVolumeSource(resultAttr)));
+
     ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
           *output, requestedPortId, *selectedDeviceId, *portId);
 
@@ -7391,29 +7394,35 @@
     std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
     // take into account dynamic audio policies related changes: if a client is now associated
     // to a different policy mix than at creation time, invalidate corresponding stream
+    // invalidate clients on outputs that do not support all the newly selected devices for the
+    // strategy
     for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
-        if (desc->isDuplicated()) {
+        if (desc->isDuplicated() || desc->getClientCount() == 0) {
             continue;
         }
+
         for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
             if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
                 continue;
             }
+            if (!desc->supportsAllDevices(newDevices)) {
+                invalidatedOutputs.push_back(desc);
+                break;
+            }
             sp<AudioPolicyMix> primaryMix;
             status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
                     client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
                     nullptr /* requestedDevice */, primaryMix, nullptr /* secondaryMixes */,
                     unneededUsePrimaryOutputFromPolicyMixes);
-            if (status != OK) {
-                continue;
-            }
-            if ((client->getPrimaryMix() && client->getPrimaryMix() != primaryMix)
-                    || client->hasLostPrimaryMix()) {
-                if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
-                    maxLatency = desc->latency();
+            if (status == OK) {
+                if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
+                    if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
+                        maxLatency = desc->latency();
+                    }
+                    invalidatedOutputs.push_back(desc);
+                    break;
                 }
-                invalidatedOutputs.push_back(desc);
             }
         }
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index c0a5012..4736980 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -128,7 +128,8 @@
                                   std::vector<audio_io_handle_t> *secondaryOutputs,
                                   output_type_t *outputType,
                                   bool *isSpatialized,
-                                  bool *isBitPerfect) override;
+                                  bool *isBitPerfect,
+                                  float *volume) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual bool releaseOutput(audio_port_handle_t portId);
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 22fc151..9a5365c 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -191,6 +191,16 @@
                                                delay_ms);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setPortsVolume(
+        const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
+        int delayMs)
+{
+    if (ports.empty()) {
+        return NO_ERROR;
+    }
+    return mAudioPolicyService->setPortsVolume(ports, volume, output, delayMs);
+}
+
 void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
                    const String8& keyValuePairs,
                    int delay_ms)
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index e598897..bb41d00 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -423,6 +423,7 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized = false;
     bool isBitPerfect = false;
+    float volume;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
                                                             attributionSource,
@@ -431,7 +432,8 @@
                                                             &secondaryOutputs,
                                                             &outputType,
                                                             &isSpatialized,
-                                                            &isBitPerfect);
+                                                            &isBitPerfect,
+                                                            &volume);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -495,6 +497,7 @@
         _aidl_return->isBitPerfect = isBitPerfect;
         _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_attributes_t_AudioAttributes(attr));
+        _aidl_return->volume = volume;
     } else {
         _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index cbc0b41..2e0aa3c 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1418,7 +1418,7 @@
         }
     }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
     mediautils::TimeCheck::getDefaultSecondChanceDuration(),
-    true /* crashOnTimeout */);
+    !property_get_bool("audio.timecheck.disabled", false) /* crashOnTimeout */);
 
     switch (code) {
         case SHELL_COMMAND_TRANSACTION: {
@@ -1816,6 +1816,16 @@
                                                                     data->mIO);
                     ul.lock();
                     }break;
+                case SET_PORTS_VOLUME: {
+                    VolumePortsData *data = (VolumePortsData *)command->mParam.get();
+                    ALOGV("AudioCommandThread() processing set volume Ports %s volume %f, \
+                            output %d", data->dumpPorts().c_str(), data->mVolume, data->mIO);
+                    ul.unlock();
+                    command->mStatus = AudioSystem::setPortsVolume(data->mPorts,
+                                                                   data->mVolume,
+                                                                   data->mIO);
+                    ul.lock();
+                    } break;
                 case SET_PARAMETERS: {
                     ParametersData *data = (ParametersData *)command->mParam.get();
                     ALOGV("AudioCommandThread() processing set parameters string %s, io %d",
@@ -2128,6 +2138,23 @@
     return sendCommand(command, delayMs);
 }
 
+status_t AudioPolicyService::AudioCommandThread::volumePortsCommand(
+        const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
+        int delayMs)
+{
+    sp<AudioCommand> command = new AudioCommand();
+    command->mCommand = SET_PORTS_VOLUME;
+    sp<VolumePortsData> data = new VolumePortsData();
+    data->mPorts = ports;
+    data->mVolume = volume;
+    data->mIO = output;
+    command->mParam = data;
+    command->mWaitStatus = true;
+    ALOGV("AudioCommandThread() adding set volume ports %s, volume %f, output %d",
+            data->dumpPorts().c_str(), volume, output);
+    return sendCommand(command, delayMs);
+}
+
 status_t AudioPolicyService::AudioCommandThread::parametersCommand(audio_io_handle_t ioHandle,
                                                                    const char *keyValuePairs,
                                                                    int delayMs)
@@ -2458,6 +2485,31 @@
             delayMs = 1;
         } break;
 
+        case SET_PORTS_VOLUME: {
+            VolumePortsData *data = (VolumePortsData *)command->mParam.get();
+            VolumePortsData *data2 = (VolumePortsData *)command2->mParam.get();
+            if (data->mIO != data2->mIO) break;
+            // Can remove command only if port ids list is the same, otherwise, remove from
+            // command 2 all port whose volume will be replaced with command 1 volume.
+            std::vector<audio_port_handle_t> portsOnlyInCommand2{};
+            std::copy_if(data2->mPorts.begin(), data2->mPorts.end(),
+                    std::back_inserter(portsOnlyInCommand2), [&](const auto &portId) {
+                return std::find(data->mPorts.begin(), data->mPorts.end(), portId) ==
+                        data->mPorts.end();
+            });
+            if (!portsOnlyInCommand2.empty()) {
+                data2->mPorts = portsOnlyInCommand2;
+                break;
+            }
+            ALOGV("Filtering out volume command on output %d for ports %s",
+                    data->mIO, data->dumpPorts().c_str());
+            removedCommands.add(command2);
+            command->mTime = command2->mTime;
+            // force delayMs to non 0 so that code below does not request to wait for
+            // command status as the command is now delayed
+            delayMs = 1;
+        } break;
+
         case SET_VOICE_VOLUME: {
             VoiceVolumeData *data = (VoiceVolumeData *)command->mParam.get();
             VoiceVolumeData *data2 = (VoiceVolumeData *)command2->mParam.get();
@@ -2604,6 +2656,12 @@
                                                    output, delayMs);
 }
 
+int AudioPolicyService::setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
+                                       audio_io_handle_t output, int delayMs)
+{
+    return (int)mAudioCommandThread->volumePortsCommand(ports, volume, output, delayMs);
+}
+
 int AudioPolicyService::setVoiceVolume(float volume, int delayMs)
 {
     return (int)mAudioCommandThread->voiceVolumeCommand(volume, delayMs);
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 92c162f..d5d4f97 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -47,6 +47,7 @@
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/content/AttributionSourceState.h>
 
+#include <numeric>
 #include <unordered_map>
 
 namespace android {
@@ -354,6 +355,21 @@
                                      float volume,
                                      audio_io_handle_t output,
                                      int delayMs = 0);
+
+    /**
+     * Set a volume on AudioTrack port id(s) for a particular output.
+     * For the same user setting, a volume group (and associated given port of the
+     * client's track) can have different volumes for each output destination device
+     * it is attached to.
+     *
+     * @param ports to consider
+     * @param volume to set
+     * @param output to consider
+     * @param delayMs to use
+     * @return NO_ERROR if successful
+     */
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
+            audio_io_handle_t output, int delayMs = 0);
     virtual status_t setVoiceVolume(float volume, int delayMs = 0);
 
     void doOnNewAudioModulesAvailable();
@@ -577,6 +593,7 @@
         // commands for tone AudioCommand
         enum {
             SET_VOLUME,
+            SET_PORTS_VOLUME,
             SET_PARAMETERS,
             SET_VOICE_VOLUME,
             STOP_OUTPUT,
@@ -610,6 +627,8 @@
                     void        exit();
                     status_t    volumeCommand(audio_stream_type_t stream, float volume,
                                             audio_io_handle_t output, int delayMs = 0);
+                    status_t    volumePortsCommand(const std::vector<audio_port_handle_t> &ports,
+                            float volume, audio_io_handle_t output, int delayMs = 0);
                     status_t    parametersCommand(audio_io_handle_t ioHandle,
                                             const char *keyValuePairs, int delayMs = 0);
                     status_t    voiceVolumeCommand(float volume, int delayMs = 0);
@@ -684,6 +703,20 @@
             audio_io_handle_t mIO;
         };
 
+        class VolumePortsData : public AudioCommandData {
+        public:
+            std::vector<audio_port_handle_t> mPorts;
+            float mVolume;
+            audio_io_handle_t mIO;
+            std::string dumpPorts() {
+                return std::string("volume ") + std::to_string(mVolume) + " on IO " +
+                        std::to_string(mIO) + " and ports " +
+                        std::accumulate(std::begin(mPorts), std::end(mPorts), std::string{},
+                                       [] (const std::string& ls, int rs) {
+                                return ls + std::to_string(rs) + " "; });
+            }
+        };
+
         class ParametersData : public AudioCommandData {
         public:
             audio_io_handle_t mIO;
@@ -824,6 +857,19 @@
         // set a stream volume for a particular output. For the same user setting, a given stream type can have different volumes
         // for each output (destination device) it is attached to.
         virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
+        /**
+         * Set a volume on port(s) for a particular output. For the same user setting, a volume
+         * group (and associated given port of the client's track) can have different volumes for
+         * each output (destination device) it is attached to.
+         *
+         * @param ports to consider
+         * @param volume to set
+         * @param output to consider
+         * @param delayMs to use
+         * @return NO_ERROR if successful
+         */
+        status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
+                audio_io_handle_t output, int delayMs = 0) override;
 
         // function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
         virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index 6d8b3cf..733f0d6 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -18,15 +18,17 @@
 
 #include "AudioRecordClient.h"
 #include "AudioPolicyService.h"
+#include "binder/AppOpsManager.h"
 #include <android_media_audiopolicy.h>
 
+#include <algorithm>
+
 namespace android::media::audiopolicy {
 namespace audiopolicy_flags = android::media::audiopolicy;
 using android::AudioPolicyService;
 
 namespace {
-bool isAppOpSource(audio_source_t source)
-{
+bool isAppOpSource(audio_source_t source) {
     switch (source) {
         case AUDIO_SOURCE_FM_TUNER:
         case AUDIO_SOURCE_ECHO_REFERENCE:
@@ -55,7 +57,40 @@
 bool doesPackageTargetAtLeastU(std::string_view packageName) {
     return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
 }
-}
+
+class AttrSourceItr {
+  public:
+    using iterator_category = std::forward_iterator_tag;
+    using difference_type = std::ptrdiff_t;
+    using value_type = AttributionSourceState;
+    using pointer = const value_type*;
+    using reference = const value_type&;
+
+    AttrSourceItr() : mAttr(nullptr) {}
+
+    AttrSourceItr(const AttributionSourceState& attr) : mAttr(&attr) {}
+
+    reference operator*() const { return *mAttr; }
+    pointer operator->() const { return mAttr; }
+
+    AttrSourceItr& operator++() {
+        mAttr = !mAttr->next.empty() ? mAttr->next.data() : nullptr;
+        return *this;
+    }
+
+    AttrSourceItr operator++(int) {
+        AttrSourceItr tmp = *this;
+        ++(*this);
+        return tmp;
+    }
+
+    friend bool operator==(const AttrSourceItr& a, const AttrSourceItr& b) = default;
+
+    static AttrSourceItr end() { return AttrSourceItr{}; }
+private:
+    const AttributionSourceState * mAttr;
+};
+} // anonymous
 
 // static
 sp<OpRecordAudioMonitor>
@@ -110,15 +145,24 @@
     mOpCallback = new RecordAudioOpCallback(this);
     ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
 
-    int flags = doesPackageTargetAtLeastU(
-            mAttributionSource.packageName.value_or("")) ?
-            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
-    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-        mAttributionSource.packageName.value_or(""))),
-        flags,
-        mOpCallback);
+    int flags = doesPackageTargetAtLeastU(mAttributionSource.packageName.value_or(""))
+                        ? AppOpsManager::WATCH_FOREGROUND_CHANGES
+                        : 0;
+
+    const auto reg = [&](int32_t op) {
+        std::for_each(AttrSourceItr{mAttributionSource}, AttrSourceItr::end(),
+                      [&](const auto& attr) {
+                          mAppOpsManager.startWatchingMode(
+                                  op,
+                                  VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                                          attr.packageName.value_or(""))),
+                                  flags, mOpCallback);
+                      });
+    };
+    reg(mAppOp);
+    if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+        reg(AppOpsManager::OP_RECORD_AUDIO);
+    }
 }
 
 bool OpRecordAudioMonitor::hasOp() const {
@@ -131,14 +175,20 @@
 // due to the UID in createIfNeeded(). As a result for those record track, it's:
 // - not called from constructor,
 // - not called from RecordAudioOpCallback because the callback is not installed in this case
-void OpRecordAudioMonitor::checkOp(bool updateUidStates)
-{
-    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
-            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-                mAttributionSource.packageName.value_or(""))));
-    bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+void OpRecordAudioMonitor::checkOp(bool updateUidStates) {
+    const auto check = [&](int32_t op) -> bool {
+        return std::all_of(
+                AttrSourceItr{mAttributionSource}, AttrSourceItr::end(), [&](const auto& x) {
+                    return mAppOpsManager.checkOp(op, x.uid,
+                                                  VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                                                          x.packageName.value_or("")))) ==
+                           AppOpsManager::MODE_ALLOWED;
+                });
+    };
+    bool hasIt = check(mAppOp);
+    if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+        hasIt = hasIt && check(AppOpsManager::OP_RECORD_AUDIO);
+    }
 
     if (audiopolicy_flags::record_audio_device_aware_permission()) {
         const bool canRecord = recordingAllowed(mAttributionSource, mVirtualDeviceId, mAttr.source);
@@ -173,4 +223,4 @@
     }
 }
 
-} // android::media::audiopolicy::internal
+}  // namespace android::media::audiopolicy
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 4006489..c600fb6 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -107,7 +107,7 @@
         "-Werror",
     ],
 
-    test_suites: ["device-tests"],
+    test_suites: ["general-tests"],
 
 }
 
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 0299160..6f63721 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -58,6 +58,10 @@
                              float /*volume*/,
                              audio_io_handle_t /*output*/,
                              int /*delayMs*/) override { return NO_INIT; }
+
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& /*ports*/, float /*volume*/,
+            audio_io_handle_t /*output*/, int /*delayMs*/) override { return NO_INIT; }
+
     void setParameters(audio_io_handle_t /*ioHandle*/,
                        const String8& /*keyValuePairs*/,
                        int /*delayMs*/) override { }
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index f66b911..43b451f 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -299,11 +299,12 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
     bool isBitPerfectInternal;
+    float volume;
     AttributionSourceState attributionSource = createAttributionSourceState(uid);
     ASSERT_EQ(OK, mManager->getOutputForAttr(
                     &attr, output, session, &stream, attributionSource, &config, &flags,
                     selectedDeviceId, portId, {}, &outputType, &isSpatialized,
-                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect));
+                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
 }
@@ -2065,6 +2066,7 @@
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     bool mIsSpatialized;
     bool mIsBitPerfect;
+    float mVolume;
 };
 
 TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingLoopbackDapMixFails) {
@@ -2083,7 +2085,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
 }
 
 TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2102,7 +2104,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
 }
 
 TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2133,7 +2135,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
     ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
     auto outputDesc = mManager->getOutputs().valueFor(mOutput);
     ASSERT_NE(nullptr, outputDesc);
@@ -2149,7 +2151,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
     ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
     outputDesc = mManager->getOutputs().valueFor(mOutput);
     ASSERT_NE(nullptr, outputDesc);
@@ -2178,7 +2180,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
 }
 
 INSTANTIATE_TEST_SUITE_P(
@@ -3634,11 +3636,12 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
     bool isBitPerfect;
+    float volume;
     EXPECT_EQ(expected,
               mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
                                          &stream, attributionSource, &config, &flags,
                                          &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
-                                         &isSpatialized, &isBitPerfect));
+                                         &isSpatialized, &isBitPerfect, &volume));
 }
 
 class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 38476a4..a74b6d6 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -58,6 +58,7 @@
         "libcamera_metadata",
         "libfmq",
         "libgui",
+        "libguiflags",
         "libhardware",
         "libhidlbase",
         "libimage_io",
@@ -169,6 +170,7 @@
         "device3/Camera3OutputStreamInterface.cpp",
         "device3/Camera3OutputUtils.cpp",
         "device3/Camera3DeviceInjectionMethods.cpp",
+        "device3/deprecated/DeprecatedCamera3StreamSplitter.cpp",
         "device3/UHRCropAndMeteringRegionMapper.cpp",
         "device3/PreviewFrameSpacer.cpp",
         "device3/hidl/HidlCamera3Device.cpp",
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 206c879..3af673b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1311,7 +1311,9 @@
         if (firstPendingFrame != mPendingInputFrames.end()) {
             updateCodecQualityLocked(firstPendingFrame->second.quality);
         } else {
-            markTrackerIdle();
+            if (mSettingsByFrameNumber.size() == 0) {
+                markTrackerIdle();
+            }
         }
     }
 }
@@ -1737,7 +1739,9 @@
                     // removed, they are simply skipped.
                     mPendingInputFrames.erase(failingFrameNumber);
                     if (mPendingInputFrames.size() == 0) {
-                        markTrackerIdle();
+                        if (mSettingsByFrameNumber.size() == 0) {
+                            markTrackerIdle();
+                        }
                     }
                     return true;
                 }
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index 719ff2c..57df314 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -417,7 +417,8 @@
 int processDepthPhotoFrame(DepthPhotoInputFrame inputFrame, size_t depthPhotoBufferSize,
         void* depthPhotoBuffer /*out*/, size_t* depthPhotoActualSize /*out*/) {
     if ((inputFrame.mMainJpegBuffer == nullptr) || (inputFrame.mDepthMapBuffer == nullptr) ||
-            (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr)) {
+            (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr) ||
+            (inputFrame.mMaxJpegSize < MIN_JPEG_BUFFER_SIZE)) {
         return BAD_VALUE;
     }
 
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.h b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
index 09b6935..9e79fc0 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.h
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
@@ -23,6 +23,9 @@
 namespace android {
 namespace camera3 {
 
+// minimal jpeg buffer size: 256KB. Blob header is not included.
+constexpr const size_t MIN_JPEG_BUFFER_SIZE = 256 * 1024;
+
 enum DepthPhotoOrientation {
     DEPTH_ORIENTATION_0_DEGREES   = 0,
     DEPTH_ORIENTATION_90_DEGREES  = 90,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6f87ca3..e5ccbae 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -36,6 +36,7 @@
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 
 #include "common/CameraDeviceBase.h"
+#include "common/DepthPhotoProcessor.h"
 #include "device3/BufferUtils.h"
 #include "device3/StatusTracker.h"
 #include "device3/Camera3BufferManager.h"
@@ -376,8 +377,8 @@
 
     struct                     RequestTrigger;
     // minimal jpeg buffer size: 256KB + blob header
-    static const ssize_t       kMinJpegBufferSize =
-            256 * 1024 + sizeof(aidl::android::hardware::camera::device::CameraBlob);
+    static const ssize_t       kMinJpegBufferSize = camera3::MIN_JPEG_BUFFER_SIZE +
+            sizeof(aidl::android::hardware::camera::device::CameraBlob);
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 54d55a1..04867b9 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -732,7 +732,11 @@
         if (res == OK) {
             // Disable buffer allocation for this BufferQueue, buffer manager will take over
             // the buffer allocation responsibility.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_PLATFORM_API_IMPROVEMENTS)
+            mConsumer->allowAllocation(false);
+#else
             mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
+#endif
             mUseBufferManager = true;
         } else {
             ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 485f3f0..187bd93 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -18,6 +18,8 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include "Flags.h"
+
 #include "Camera3SharedOutputStream.h"
 
 namespace android {
@@ -59,7 +61,11 @@
 status_t Camera3SharedOutputStream::connectStreamSplitterLocked() {
     status_t res = OK;
 
-    mStreamSplitter = new Camera3StreamSplitter(mUseHalBufManager);
+#if USE_NEW_STREAM_SPLITTER
+    mStreamSplitter = sp<Camera3StreamSplitter>::make(mUseHalBufManager);
+#else
+    mStreamSplitter = sp<DeprecatedCamera3StreamSplitter>::make(mUseHalBufManager);
+#endif  // USE_NEW_STREAM_SPLITTER
 
     uint64_t usage = 0;
     getEndpointUsage(&usage);
@@ -90,7 +96,11 @@
     // Attach the buffer to the splitter output queues. This could block if
     // the output queue doesn't have any empty slot. So unlock during the course
     // of attachBufferToOutputs.
+#if USE_NEW_STREAM_SPLITTER
     sp<Camera3StreamSplitter> splitter = mStreamSplitter;
+#else
+    sp<DeprecatedCamera3StreamSplitter> splitter = mStreamSplitter;
+#endif  // USE_NEW_STREAM_SPLITTER
     mLock.unlock();
     res = splitter->attachBufferToOutputs(anb, surface_ids);
     mLock.lock();
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 818ce17..ae11507 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -18,9 +18,17 @@
 #define ANDROID_SERVERS_CAMERA3_SHARED_OUTPUT_STREAM_H
 
 #include <array>
-#include "Camera3StreamSplitter.h"
+
+#include "Flags.h"
+
 #include "Camera3OutputStream.h"
 
+#if USE_NEW_STREAM_SPLITTER
+#include "Camera3StreamSplitter.h"
+#else
+#include "deprecated/DeprecatedCamera3StreamSplitter.h"
+#endif  // USE_NEW_STREAM_SPLITTER
+
 namespace android {
 
 namespace camera3 {
@@ -106,8 +114,11 @@
      * The Camera3StreamSplitter object this stream uses for stream
      * sharing.
      */
+#if USE_NEW_STREAM_SPLITTER
     sp<Camera3StreamSplitter> mStreamSplitter;
-
+#else
+    sp<DeprecatedCamera3StreamSplitter> mStreamSplitter;
+#endif  // USE_NEW_STREAM_SPLITTER
     /**
      * Initialize stream splitter.
      */
diff --git a/services/camera/libcameraservice/device3/Flags.h b/services/camera/libcameraservice/device3/Flags.h
new file mode 100644
index 0000000..ca0006b
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Flags.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <com_android_graphics_libgui_flags.h>
+
+#ifndef USE_NEW_STREAM_SPLITTER
+
+#define USE_NEW_STREAM_SPLITTER                              \
+    COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_STREAM_SPLITTER) && \
+            COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_PLATFORM_API_IMPROVEMENTS)
+
+#endif  // USE_NEW_STREAM_SPLITTER
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
new file mode 100644
index 0000000..c1113e5
--- /dev/null
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
@@ -0,0 +1,820 @@
+/*
+ * Copyright 2014,2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "DeprecatedCamera3StreamSplitter"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+// #define LOG_NDEBUG 0
+
+#include <camera/StringUtils.h>
+#include <gui/BufferItem.h>
+#include <gui/BufferQueue.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+
+#include <ui/GraphicBuffer.h>
+
+#include <binder/ProcessState.h>
+
+#include <utils/Trace.h>
+
+#include <cutils/atomic.h>
+
+#include "../Camera3Stream.h"
+
+#include "DeprecatedCamera3StreamSplitter.h"
+
+namespace android {
+
+status_t DeprecatedCamera3StreamSplitter::connect(
+        const std::unordered_map<size_t, sp<Surface>>& surfaces, uint64_t consumerUsage,
+        uint64_t producerUsage, size_t halMaxBuffers, uint32_t width, uint32_t height,
+        android::PixelFormat format, sp<Surface>* consumer, int64_t dynamicRangeProfile) {
+    ATRACE_CALL();
+    if (consumer == nullptr) {
+        SP_LOGE("%s: consumer pointer is NULL", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock lock(mMutex);
+    status_t res = OK;
+
+    if (mOutputs.size() > 0 || mConsumer != nullptr) {
+        SP_LOGE("%s: already connected", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    if (mBuffers.size() > 0) {
+        SP_LOGE("%s: still has %zu pending buffers", __FUNCTION__, mBuffers.size());
+        return BAD_VALUE;
+    }
+
+    mMaxHalBuffers = halMaxBuffers;
+    mConsumerName = getUniqueConsumerName();
+    mDynamicRangeProfile = dynamicRangeProfile;
+    // Add output surfaces. This has to be before creating internal buffer queue
+    // in order to get max consumer side buffers.
+    for (auto& it : surfaces) {
+        if (it.second == nullptr) {
+            SP_LOGE("%s: Fatal: surface is NULL", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        res = addOutputLocked(it.first, it.second);
+        if (res != OK) {
+            SP_LOGE("%s: Failed to add output surface: %s(%d)", __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    // Create BufferQueue for input
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+
+    // Allocate 1 extra buffer to handle the case where all buffers are detached
+    // from input, and attached to the outputs. In this case, the input queue's
+    // dequeueBuffer can still allocate 1 extra buffer before being blocked by
+    // the output's attachBuffer().
+    mMaxConsumerBuffers++;
+    mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
+    if (mBufferItemConsumer == nullptr) {
+        return NO_MEMORY;
+    }
+    mConsumer->setConsumerName(toString8(mConsumerName));
+
+    *consumer = new Surface(mProducer);
+    if (*consumer == nullptr) {
+        return NO_MEMORY;
+    }
+
+    res = mProducer->setAsyncMode(true);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to enable input queue async mode: %s(%d)", __FUNCTION__, strerror(-res),
+                res);
+        return res;
+    }
+
+    res = mConsumer->consumerConnect(this, /* controlledByApp */ false);
+
+    mWidth = width;
+    mHeight = height;
+    mFormat = format;
+    mProducerUsage = producerUsage;
+    mAcquiredInputBuffers = 0;
+
+    SP_LOGV("%s: connected", __FUNCTION__);
+    return res;
+}
+
+status_t DeprecatedCamera3StreamSplitter::getOnFrameAvailableResult() {
+    ATRACE_CALL();
+    return mOnFrameAvailableRes.load();
+}
+
+void DeprecatedCamera3StreamSplitter::disconnect() {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    for (auto& notifier : mNotifiers) {
+        sp<IGraphicBufferProducer> producer = notifier.first;
+        sp<OutputListener> listener = notifier.second;
+        IInterface::asBinder(producer)->unlinkToDeath(listener);
+    }
+    mNotifiers.clear();
+
+    for (auto& output : mOutputs) {
+        if (output.second != nullptr) {
+            output.second->disconnect(NATIVE_WINDOW_API_CAMERA);
+        }
+    }
+    mOutputs.clear();
+    mOutputSurfaces.clear();
+    mOutputSlots.clear();
+    mConsumerBufferCount.clear();
+
+    if (mConsumer.get() != nullptr) {
+        mConsumer->consumerDisconnect();
+    }
+
+    if (mBuffers.size() > 0) {
+        SP_LOGW("%zu buffers still being tracked", mBuffers.size());
+        mBuffers.clear();
+    }
+
+    mMaxHalBuffers = 0;
+    mMaxConsumerBuffers = 0;
+    mAcquiredInputBuffers = 0;
+    SP_LOGV("%s: Disconnected", __FUNCTION__);
+}
+
+DeprecatedCamera3StreamSplitter::DeprecatedCamera3StreamSplitter(bool useHalBufManager)
+    : mUseHalBufManager(useHalBufManager) {}
+
+DeprecatedCamera3StreamSplitter::~DeprecatedCamera3StreamSplitter() {
+    disconnect();
+}
+
+status_t DeprecatedCamera3StreamSplitter::addOutput(size_t surfaceId,
+                                                    const sp<Surface>& outputQueue) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+    status_t res = addOutputLocked(surfaceId, outputQueue);
+
+    if (res != OK) {
+        SP_LOGE("%s: addOutputLocked failed %d", __FUNCTION__, res);
+        return res;
+    }
+
+    if (mMaxConsumerBuffers > mAcquiredInputBuffers) {
+        res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+    }
+
+    return res;
+}
+
+void DeprecatedCamera3StreamSplitter::setHalBufferManager(bool enabled) {
+    Mutex::Autolock lock(mMutex);
+    mUseHalBufManager = enabled;
+}
+
+status_t DeprecatedCamera3StreamSplitter::addOutputLocked(size_t surfaceId,
+                                                          const sp<Surface>& outputQueue) {
+    ATRACE_CALL();
+    if (outputQueue == nullptr) {
+        SP_LOGE("addOutput: outputQueue must not be NULL");
+        return BAD_VALUE;
+    }
+
+    if (mOutputs[surfaceId] != nullptr) {
+        SP_LOGE("%s: surfaceId: %u already taken!", __FUNCTION__, (unsigned)surfaceId);
+        return BAD_VALUE;
+    }
+
+    status_t res = native_window_set_buffers_dimensions(outputQueue.get(), mWidth, mHeight);
+    if (res != NO_ERROR) {
+        SP_LOGE("addOutput: failed to set buffer dimensions (%d)", res);
+        return res;
+    }
+    res = native_window_set_buffers_format(outputQueue.get(), mFormat);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure stream buffer format %#x for surfaceId %zu", __FUNCTION__,
+              mFormat, surfaceId);
+        return res;
+    }
+
+    sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
+    // Connect to the buffer producer
+    sp<OutputListener> listener(new OutputListener(this, gbp));
+    IInterface::asBinder(gbp)->linkToDeath(listener);
+    res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
+    if (res != NO_ERROR) {
+        SP_LOGE("addOutput: failed to connect (%d)", res);
+        return res;
+    }
+
+    // Query consumer side buffer count, and update overall buffer count
+    int maxConsumerBuffers = 0;
+    res = static_cast<ANativeWindow*>(outputQueue.get())
+                  ->query(outputQueue.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+                          &maxConsumerBuffers);
+    if (res != OK) {
+        SP_LOGE("%s: Unable to query consumer undequeued buffer count"
+                " for surface",
+                __FUNCTION__);
+        return res;
+    }
+
+    SP_LOGV("%s: Consumer wants %d buffers, Producer wants %zu", __FUNCTION__, maxConsumerBuffers,
+            mMaxHalBuffers);
+    // The output slot count requirement can change depending on the current amount
+    // of outputs and incoming buffer consumption rate. To avoid any issues with
+    // insufficient slots, set their count to the maximum supported. The output
+    // surface buffer allocation is disabled so no real buffers will get allocated.
+    size_t totalBufferCount = BufferQueue::NUM_BUFFER_SLOTS;
+    res = native_window_set_buffer_count(outputQueue.get(), totalBufferCount);
+    if (res != OK) {
+        SP_LOGE("%s: Unable to set buffer count for surface %p", __FUNCTION__, outputQueue.get());
+        return res;
+    }
+
+    // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
+    // We need skip these cases as timeout will disable the non-blocking (async) mode.
+    uint64_t usage = 0;
+    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(outputQueue.get()), &usage);
+    if (!(usage & (GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_TEXTURE))) {
+        nsecs_t timeout =
+                mUseHalBufManager ? kHalBufMgrDequeueBufferTimeout : kNormalDequeueBufferTimeout;
+        outputQueue->setDequeueTimeout(timeout);
+    }
+
+    res = gbp->allowAllocation(false);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to turn off allocation for outputQueue", __FUNCTION__);
+        return res;
+    }
+
+    // Add new entry into mOutputs
+    mOutputs[surfaceId] = gbp;
+    mOutputSurfaces[surfaceId] = outputQueue;
+    mConsumerBufferCount[surfaceId] = maxConsumerBuffers;
+    if (mConsumerBufferCount[surfaceId] > mMaxHalBuffers) {
+        SP_LOGW("%s: Consumer buffer count %zu larger than max. Hal buffers: %zu", __FUNCTION__,
+                mConsumerBufferCount[surfaceId], mMaxHalBuffers);
+    }
+    mNotifiers[gbp] = listener;
+    mOutputSlots[gbp] = std::make_unique<OutputSlots>(totalBufferCount);
+
+    mMaxConsumerBuffers += maxConsumerBuffers;
+    return NO_ERROR;
+}
+
+status_t DeprecatedCamera3StreamSplitter::removeOutput(size_t surfaceId) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    status_t res = removeOutputLocked(surfaceId);
+    if (res != OK) {
+        SP_LOGE("%s: removeOutputLocked failed %d", __FUNCTION__, res);
+        return res;
+    }
+
+    if (mAcquiredInputBuffers < mMaxConsumerBuffers) {
+        res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+        if (res != OK) {
+            SP_LOGE("%s: setMaxAcquiredBufferCount failed %d", __FUNCTION__, res);
+            return res;
+        }
+    }
+
+    return res;
+}
+
+status_t DeprecatedCamera3StreamSplitter::removeOutputLocked(size_t surfaceId) {
+    if (mOutputs[surfaceId] == nullptr) {
+        SP_LOGE("%s: output surface is not present!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    sp<IGraphicBufferProducer> gbp = mOutputs[surfaceId];
+    // Search and decrement the ref. count of any buffers that are
+    // still attached to the removed surface.
+    std::vector<uint64_t> pendingBufferIds;
+    auto& outputSlots = *mOutputSlots[gbp];
+    for (size_t i = 0; i < outputSlots.size(); i++) {
+        if (outputSlots[i] != nullptr) {
+            pendingBufferIds.push_back(outputSlots[i]->getId());
+            auto rc = gbp->detachBuffer(i);
+            if (rc != NO_ERROR) {
+                // Buffers that fail to detach here will be scheduled for detach in the
+                // input buffer queue and the rest of the registered outputs instead.
+                // This will help ensure that camera stops accessing buffers that still
+                // can get referenced by the disconnected output.
+                mDetachedBuffers.emplace(outputSlots[i]->getId());
+            }
+        }
+    }
+    mOutputs[surfaceId] = nullptr;
+    mOutputSurfaces[surfaceId] = nullptr;
+    mOutputSlots[gbp] = nullptr;
+    for (const auto& id : pendingBufferIds) {
+        decrementBufRefCountLocked(id, surfaceId);
+    }
+
+    auto res = IInterface::asBinder(gbp)->unlinkToDeath(mNotifiers[gbp]);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to unlink producer death listener: %d ", __FUNCTION__, res);
+        return res;
+    }
+
+    res = gbp->disconnect(NATIVE_WINDOW_API_CAMERA);
+    if (res != OK) {
+        SP_LOGE("%s: Unable disconnect from producer interface: %d ", __FUNCTION__, res);
+        return res;
+    }
+
+    mNotifiers[gbp] = nullptr;
+    mMaxConsumerBuffers -= mConsumerBufferCount[surfaceId];
+    mConsumerBufferCount[surfaceId] = 0;
+
+    return res;
+}
+
+status_t DeprecatedCamera3StreamSplitter::outputBufferLocked(
+        const sp<IGraphicBufferProducer>& output, const BufferItem& bufferItem, size_t surfaceId) {
+    ATRACE_CALL();
+    status_t res;
+    IGraphicBufferProducer::QueueBufferInput queueInput(
+            bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp, bufferItem.mDataSpace,
+            bufferItem.mCrop, static_cast<int32_t>(bufferItem.mScalingMode), bufferItem.mTransform,
+            bufferItem.mFence);
+
+    IGraphicBufferProducer::QueueBufferOutput queueOutput;
+
+    uint64_t bufferId = bufferItem.mGraphicBuffer->getId();
+    const BufferTracker& tracker = *(mBuffers[bufferId]);
+    int slot = getSlotForOutputLocked(output, tracker.getBuffer());
+
+    if (mOutputSurfaces[surfaceId] != nullptr) {
+        sp<ANativeWindow> anw = mOutputSurfaces[surfaceId];
+        camera3::Camera3Stream::queueHDRMetadata(
+                bufferItem.mGraphicBuffer->getNativeBuffer()->handle, anw, mDynamicRangeProfile);
+    } else {
+        SP_LOGE("%s: Invalid surface id: %zu!", __FUNCTION__, surfaceId);
+    }
+
+    // In case the output BufferQueue has its own lock, if we hold splitter lock while calling
+    // queueBuffer (which will try to acquire the output lock), the output could be holding its
+    // own lock calling releaseBuffer (which  will try to acquire the splitter lock), running into
+    // circular lock situation.
+    mMutex.unlock();
+    res = output->queueBuffer(slot, queueInput, &queueOutput);
+    mMutex.lock();
+
+    SP_LOGV("%s: Queuing buffer to buffer queue %p slot %d returns %d", __FUNCTION__, output.get(),
+            slot, res);
+    // During buffer queue 'mMutex' is not held which makes the removal of
+    //"output" possible. Check whether this is the case and return.
+    if (mOutputSlots[output] == nullptr) {
+        return res;
+    }
+    if (res != OK) {
+        if (res != NO_INIT && res != DEAD_OBJECT) {
+            SP_LOGE("Queuing buffer to output failed (%d)", res);
+        }
+        // If we just discovered that this output has been abandoned, note
+        // that, increment the release count so that we still release this
+        // buffer eventually, and move on to the next output
+        onAbandonedLocked();
+        decrementBufRefCountLocked(bufferItem.mGraphicBuffer->getId(), surfaceId);
+        return res;
+    }
+
+    // If the queued buffer replaces a pending buffer in the async
+    // queue, no onBufferReleased is called by the buffer queue.
+    // Proactively trigger the callback to avoid buffer loss.
+    if (queueOutput.bufferReplaced) {
+        onBufferReplacedLocked(output, surfaceId);
+    }
+
+    return res;
+}
+
+std::string DeprecatedCamera3StreamSplitter::getUniqueConsumerName() {
+    static volatile int32_t counter = 0;
+    return fmt::sprintf("DeprecatedCamera3StreamSplitter-%d", android_atomic_inc(&counter));
+}
+
+status_t DeprecatedCamera3StreamSplitter::notifyBufferReleased(const sp<GraphicBuffer>& buffer) {
+    ATRACE_CALL();
+
+    Mutex::Autolock lock(mMutex);
+
+    uint64_t bufferId = buffer->getId();
+    std::unique_ptr<BufferTracker> tracker_ptr = std::move(mBuffers[bufferId]);
+    mBuffers.erase(bufferId);
+
+    return OK;
+}
+
+status_t DeprecatedCamera3StreamSplitter::attachBufferToOutputs(
+        ANativeWindowBuffer* anb, const std::vector<size_t>& surface_ids) {
+    ATRACE_CALL();
+    status_t res = OK;
+
+    Mutex::Autolock lock(mMutex);
+
+    sp<GraphicBuffer> gb(static_cast<GraphicBuffer*>(anb));
+    uint64_t bufferId = gb->getId();
+
+    // Initialize buffer tracker for this input buffer
+    auto tracker = std::make_unique<BufferTracker>(gb, surface_ids);
+
+    for (auto& surface_id : surface_ids) {
+        sp<IGraphicBufferProducer>& gbp = mOutputs[surface_id];
+        if (gbp.get() == nullptr) {
+            // Output surface got likely removed by client.
+            continue;
+        }
+        int slot = getSlotForOutputLocked(gbp, gb);
+        if (slot != BufferItem::INVALID_BUFFER_SLOT) {
+            // Buffer is already attached to this output surface.
+            continue;
+        }
+        // Temporarly Unlock the mutex when trying to attachBuffer to the output
+        // queue, because attachBuffer could block in case of a slow consumer. If
+        // we block while holding the lock, onFrameAvailable and onBufferReleased
+        // will block as well because they need to acquire the same lock.
+        mMutex.unlock();
+        res = gbp->attachBuffer(&slot, gb);
+        mMutex.lock();
+        if (res != OK) {
+            SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)", __FUNCTION__,
+                    gbp.get(), strerror(-res), res);
+            // TODO: might need to detach/cleanup the already attached buffers before return?
+            return res;
+        }
+        if ((slot < 0) || (slot > BufferQueue::NUM_BUFFER_SLOTS)) {
+            SP_LOGE("%s: Slot received %d either bigger than expected maximum %d or negative!",
+                    __FUNCTION__, slot, BufferQueue::NUM_BUFFER_SLOTS);
+            return BAD_VALUE;
+        }
+        // During buffer attach 'mMutex' is not held which makes the removal of
+        //"gbp" possible. Check whether this is the case and continue.
+        if (mOutputSlots[gbp] == nullptr) {
+            continue;
+        }
+        auto& outputSlots = *mOutputSlots[gbp];
+        if (static_cast<size_t>(slot + 1) > outputSlots.size()) {
+            outputSlots.resize(slot + 1);
+        }
+        if (outputSlots[slot] != nullptr) {
+            // If the buffer is attached to a slot which already contains a buffer,
+            // the previous buffer will be removed from the output queue. Decrement
+            // the reference count accordingly.
+            decrementBufRefCountLocked(outputSlots[slot]->getId(), surface_id);
+        }
+        SP_LOGV("%s: Attached buffer %p to slot %d on output %p.", __FUNCTION__, gb.get(), slot,
+                gbp.get());
+        outputSlots[slot] = gb;
+    }
+
+    mBuffers[bufferId] = std::move(tracker);
+
+    return res;
+}
+
+void DeprecatedCamera3StreamSplitter::onFrameAvailable(const BufferItem& /*item*/) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    // Acquire and detach the buffer from the input
+    BufferItem bufferItem;
+    status_t res = mConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
+    if (res != NO_ERROR) {
+        SP_LOGE("%s: Acquiring buffer from input failed (%d)", __FUNCTION__, res);
+        mOnFrameAvailableRes.store(res);
+        return;
+    }
+
+    uint64_t bufferId;
+    if (bufferItem.mGraphicBuffer != nullptr) {
+        mInputSlots[bufferItem.mSlot] = bufferItem;
+    } else if (bufferItem.mAcquireCalled) {
+        bufferItem.mGraphicBuffer = mInputSlots[bufferItem.mSlot].mGraphicBuffer;
+        mInputSlots[bufferItem.mSlot].mFrameNumber = bufferItem.mFrameNumber;
+    } else {
+        SP_LOGE("%s: Invalid input graphic buffer!", __FUNCTION__);
+        mOnFrameAvailableRes.store(BAD_VALUE);
+        return;
+    }
+    bufferId = bufferItem.mGraphicBuffer->getId();
+
+    if (mBuffers.find(bufferId) == mBuffers.end()) {
+        SP_LOGE("%s: Acquired buffer doesn't exist in attached buffer map", __FUNCTION__);
+        mOnFrameAvailableRes.store(INVALID_OPERATION);
+        return;
+    }
+
+    mAcquiredInputBuffers++;
+    SP_LOGV("acquired buffer %" PRId64 " from input at slot %d", bufferItem.mGraphicBuffer->getId(),
+            bufferItem.mSlot);
+
+    if (bufferItem.mTransformToDisplayInverse) {
+        bufferItem.mTransform |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+    }
+
+    // Attach and queue the buffer to each of the outputs
+    BufferTracker& tracker = *(mBuffers[bufferId]);
+
+    SP_LOGV("%s: BufferTracker for buffer %" PRId64 ", number of requests %zu", __FUNCTION__,
+            bufferItem.mGraphicBuffer->getId(), tracker.requestedSurfaces().size());
+    for (const auto id : tracker.requestedSurfaces()) {
+        if (mOutputs[id] == nullptr) {
+            // Output surface got likely removed by client.
+            continue;
+        }
+
+        res = outputBufferLocked(mOutputs[id], bufferItem, id);
+        if (res != OK) {
+            SP_LOGE("%s: outputBufferLocked failed %d", __FUNCTION__, res);
+            mOnFrameAvailableRes.store(res);
+            // If we fail to send buffer to certain output, keep sending to
+            // other outputs.
+            continue;
+        }
+    }
+
+    mOnFrameAvailableRes.store(res);
+}
+
+void DeprecatedCamera3StreamSplitter::onFrameReplaced(const BufferItem& item) {
+    ATRACE_CALL();
+    onFrameAvailable(item);
+}
+
+void DeprecatedCamera3StreamSplitter::decrementBufRefCountLocked(uint64_t id, size_t surfaceId) {
+    ATRACE_CALL();
+
+    if (mBuffers[id] == nullptr) {
+        return;
+    }
+
+    size_t referenceCount = mBuffers[id]->decrementReferenceCountLocked(surfaceId);
+    if (referenceCount > 0) {
+        return;
+    }
+
+    // We no longer need to track the buffer now that it is being returned to the
+    // input. Note that this should happen before we unlock the mutex and call
+    // releaseBuffer, to avoid the case where the same bufferId is acquired in
+    // attachBufferToOutputs resulting in a new BufferTracker with same bufferId
+    // overwrites the current one.
+    std::unique_ptr<BufferTracker> tracker_ptr = std::move(mBuffers[id]);
+    mBuffers.erase(id);
+
+    uint64_t bufferId = tracker_ptr->getBuffer()->getId();
+    int consumerSlot = -1;
+    uint64_t frameNumber;
+    auto inputSlot = mInputSlots.begin();
+    for (; inputSlot != mInputSlots.end(); inputSlot++) {
+        if (inputSlot->second.mGraphicBuffer->getId() == bufferId) {
+            consumerSlot = inputSlot->second.mSlot;
+            frameNumber = inputSlot->second.mFrameNumber;
+            break;
+        }
+    }
+    if (consumerSlot == -1) {
+        SP_LOGE("%s: Buffer missing inside input slots!", __FUNCTION__);
+        return;
+    }
+
+    auto detachBuffer = mDetachedBuffers.find(bufferId);
+    bool detach = (detachBuffer != mDetachedBuffers.end());
+    if (detach) {
+        mDetachedBuffers.erase(detachBuffer);
+        mInputSlots.erase(inputSlot);
+    }
+    // Temporarily unlock mutex to avoid circular lock:
+    // 1. This function holds splitter lock, calls releaseBuffer which triggers
+    // onBufferReleased in Camera3OutputStream. onBufferReleased waits on the
+    // OutputStream lock
+    // 2. Camera3SharedOutputStream::getBufferLocked calls
+    // attachBufferToOutputs, which holds the stream lock, and waits for the
+    // splitter lock.
+    sp<IGraphicBufferConsumer> consumer(mConsumer);
+    mMutex.unlock();
+    int res = NO_ERROR;
+    if (consumer != nullptr) {
+        if (detach) {
+            res = consumer->detachBuffer(consumerSlot);
+        } else {
+            res = consumer->releaseBuffer(consumerSlot, frameNumber, EGL_NO_DISPLAY,
+                                          EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+        }
+    } else {
+        SP_LOGE("%s: consumer has become null!", __FUNCTION__);
+    }
+    mMutex.lock();
+
+    if (res != NO_ERROR) {
+        if (detach) {
+            SP_LOGE("%s: detachBuffer returns %d", __FUNCTION__, res);
+        } else {
+            SP_LOGE("%s: releaseBuffer returns %d", __FUNCTION__, res);
+        }
+    } else {
+        if (mAcquiredInputBuffers == 0) {
+            ALOGW("%s: Acquired input buffer count already at zero!", __FUNCTION__);
+        } else {
+            mAcquiredInputBuffers--;
+        }
+    }
+}
+
+void DeprecatedCamera3StreamSplitter::onBufferReleasedByOutput(
+        const sp<IGraphicBufferProducer>& from) {
+    ATRACE_CALL();
+    sp<Fence> fence;
+
+    int slot = BufferItem::INVALID_BUFFER_SLOT;
+    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage, nullptr,
+                                   nullptr);
+    Mutex::Autolock lock(mMutex);
+    handleOutputDequeueStatusLocked(res, slot);
+    if (res != OK) {
+        return;
+    }
+
+    size_t surfaceId = 0;
+    bool found = false;
+    for (const auto& it : mOutputs) {
+        if (it.second == from) {
+            found = true;
+            surfaceId = it.first;
+            break;
+        }
+    }
+    if (!found) {
+        SP_LOGV("%s: output surface not registered anymore!", __FUNCTION__);
+        return;
+    }
+
+    returnOutputBufferLocked(fence, from, surfaceId, slot);
+}
+
+void DeprecatedCamera3StreamSplitter::onBufferReplacedLocked(const sp<IGraphicBufferProducer>& from,
+                                                             size_t surfaceId) {
+    ATRACE_CALL();
+    sp<Fence> fence;
+
+    int slot = BufferItem::INVALID_BUFFER_SLOT;
+    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage, nullptr,
+                                   nullptr);
+    handleOutputDequeueStatusLocked(res, slot);
+    if (res != OK) {
+        return;
+    }
+
+    returnOutputBufferLocked(fence, from, surfaceId, slot);
+}
+
+void DeprecatedCamera3StreamSplitter::returnOutputBufferLocked(
+        const sp<Fence>& fence, const sp<IGraphicBufferProducer>& from, size_t surfaceId,
+        int slot) {
+    sp<GraphicBuffer> buffer;
+
+    if (mOutputSlots[from] == nullptr) {
+        // Output surface got likely removed by client.
+        return;
+    }
+
+    auto outputSlots = *mOutputSlots[from];
+    buffer = outputSlots[slot];
+    BufferTracker& tracker = *(mBuffers[buffer->getId()]);
+    // Merge the release fence of the incoming buffer so that the fence we send
+    // back to the input includes all of the outputs' fences
+    if (fence != nullptr && fence->isValid()) {
+        tracker.mergeFence(fence);
+    }
+
+    auto detachBuffer = mDetachedBuffers.find(buffer->getId());
+    bool detach = (detachBuffer != mDetachedBuffers.end());
+    if (detach) {
+        auto res = from->detachBuffer(slot);
+        if (res == NO_ERROR) {
+            outputSlots[slot] = nullptr;
+        } else {
+            SP_LOGE("%s: detach buffer from output failed (%d)", __FUNCTION__, res);
+        }
+    }
+
+    // Check to see if this is the last outstanding reference to this buffer
+    decrementBufRefCountLocked(buffer->getId(), surfaceId);
+}
+
+void DeprecatedCamera3StreamSplitter::handleOutputDequeueStatusLocked(status_t res, int slot) {
+    if (res == NO_INIT) {
+        // If we just discovered that this output has been abandoned, note that,
+        // but we can't do anything else, since buffer is invalid
+        onAbandonedLocked();
+    } else if (res == IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION) {
+        SP_LOGE("%s: Producer needs to re-allocate buffer!", __FUNCTION__);
+        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
+    } else if (res == IGraphicBufferProducer::RELEASE_ALL_BUFFERS) {
+        SP_LOGE("%s: All slot->buffer mapping should be released!", __FUNCTION__);
+        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
+    } else if (res == NO_MEMORY) {
+        SP_LOGE("%s: No free buffers", __FUNCTION__);
+    } else if (res == WOULD_BLOCK) {
+        SP_LOGE("%s: Dequeue call will block", __FUNCTION__);
+    } else if (res != OK || (slot == BufferItem::INVALID_BUFFER_SLOT)) {
+        SP_LOGE("%s: dequeue buffer from output failed (%d)", __FUNCTION__, res);
+    }
+}
+
+void DeprecatedCamera3StreamSplitter::onAbandonedLocked() {
+    // If this is called from binderDied callback, it means the app process
+    // holding the binder has died. CameraService will be notified of the binder
+    // death, and camera device will be closed, which in turn calls
+    // disconnect().
+    //
+    // If this is called from onBufferReleasedByOutput or onFrameAvailable, one
+    // consumer being abanoned shouldn't impact the other consumer. So we won't
+    // stop the buffer flow.
+    //
+    // In both cases, we don't need to do anything here.
+    SP_LOGV("One of my outputs has abandoned me");
+}
+
+int DeprecatedCamera3StreamSplitter::getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
+                                                            const sp<GraphicBuffer>& gb) {
+    auto& outputSlots = *mOutputSlots[gbp];
+
+    for (size_t i = 0; i < outputSlots.size(); i++) {
+        if (outputSlots[i] == gb) {
+            return (int)i;
+        }
+    }
+
+    SP_LOGV("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(), gbp.get());
+    return BufferItem::INVALID_BUFFER_SLOT;
+}
+
+DeprecatedCamera3StreamSplitter::OutputListener::OutputListener(
+        wp<DeprecatedCamera3StreamSplitter> splitter, wp<IGraphicBufferProducer> output)
+    : mSplitter(splitter), mOutput(output) {}
+
+void DeprecatedCamera3StreamSplitter::OutputListener::onBufferReleased() {
+    ATRACE_CALL();
+    sp<DeprecatedCamera3StreamSplitter> splitter = mSplitter.promote();
+    sp<IGraphicBufferProducer> output = mOutput.promote();
+    if (splitter != nullptr && output != nullptr) {
+        splitter->onBufferReleasedByOutput(output);
+    }
+}
+
+void DeprecatedCamera3StreamSplitter::OutputListener::binderDied(const wp<IBinder>& /* who */) {
+    sp<DeprecatedCamera3StreamSplitter> splitter = mSplitter.promote();
+    if (splitter != nullptr) {
+        Mutex::Autolock lock(splitter->mMutex);
+        splitter->onAbandonedLocked();
+    }
+}
+
+DeprecatedCamera3StreamSplitter::BufferTracker::BufferTracker(
+        const sp<GraphicBuffer>& buffer, const std::vector<size_t>& requestedSurfaces)
+    : mBuffer(buffer),
+      mMergedFence(Fence::NO_FENCE),
+      mRequestedSurfaces(requestedSurfaces),
+      mReferenceCount(requestedSurfaces.size()) {}
+
+void DeprecatedCamera3StreamSplitter::BufferTracker::mergeFence(const sp<Fence>& with) {
+    mMergedFence = Fence::merge(String8("DeprecatedCamera3StreamSplitter"), mMergedFence, with);
+}
+
+size_t DeprecatedCamera3StreamSplitter::BufferTracker::decrementReferenceCountLocked(
+        size_t surfaceId) {
+    const auto& it = std::find(mRequestedSurfaces.begin(), mRequestedSurfaces.end(), surfaceId);
+    if (it == mRequestedSurfaces.end()) {
+        return mReferenceCount;
+    } else {
+        mRequestedSurfaces.erase(it);
+    }
+
+    if (mReferenceCount > 0) --mReferenceCount;
+    return mReferenceCount;
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
new file mode 100644
index 0000000..4610985
--- /dev/null
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
@@ -0,0 +1,299 @@
+/*
+ * Copyright 2014,2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_STREAMSPLITTER_H
+#define ANDROID_SERVERS_STREAMSPLITTER_H
+
+#include <unordered_set>
+
+#include <camera/CameraMetadata.h>
+
+#include <gui/BufferItemConsumer.h>
+#include <gui/IConsumerListener.h>
+#include <gui/Surface.h>
+
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+#include <utils/Timers.h>
+
+#define SP_LOGV(x, ...) ALOGV("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGI(x, ...) ALOGI("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGW(x, ...) ALOGW("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGE(x, ...) ALOGE("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+
+namespace android {
+
+class GraphicBuffer;
+class IGraphicBufferConsumer;
+class IGraphicBufferProducer;
+
+// DeprecatedCamera3StreamSplitter is an autonomous class that manages one input BufferQueue
+// and multiple output BufferQueues. By using the buffer attach and detach logic
+// in BufferQueue, it is able to present the illusion of a single split
+// BufferQueue, where each buffer queued to the input is available to be
+// acquired by each of the outputs, and is able to be dequeued by the input
+// again only once all of the outputs have released it.
+class DeprecatedCamera3StreamSplitter : public BnConsumerListener {
+  public:
+    // Constructor
+    DeprecatedCamera3StreamSplitter(bool useHalBufManager = false);
+
+    // Connect to the stream splitter by creating buffer queue and connecting it
+    // with output surfaces.
+    status_t connect(const std::unordered_map<size_t, sp<Surface>>& surfaces,
+                     uint64_t consumerUsage, uint64_t producerUsage, size_t halMaxBuffers,
+                     uint32_t width, uint32_t height, android::PixelFormat format,
+                     sp<Surface>* consumer, int64_t dynamicRangeProfile);
+
+    // addOutput adds an output BufferQueue to the splitter. The splitter
+    // connects to outputQueue as a CPU producer, and any buffers queued
+    // to the input will be queued to each output. If any  output is abandoned
+    // by its consumer, the splitter will abandon its input queue (see onAbandoned).
+    //
+    // A return value other than NO_ERROR means that an error has occurred and
+    // outputQueue has not been added to the splitter. BAD_VALUE is returned if
+    // outputQueue is NULL. See IGraphicBufferProducer::connect for explanations
+    // of other error codes.
+    status_t addOutput(size_t surfaceId, const sp<Surface>& outputQueue);
+
+    // removeOutput will remove a BufferQueue that was previously added to
+    // the splitter outputs. Any pending buffers in the BufferQueue will get
+    // reclaimed.
+    status_t removeOutput(size_t surfaceId);
+
+    // Notification that the graphic buffer has been released to the input
+    // BufferQueue. The buffer should be reused by the camera device instead of
+    // queuing to the outputs.
+    status_t notifyBufferReleased(const sp<GraphicBuffer>& buffer);
+
+    // Attach a buffer to the specified outputs. This call reserves a buffer
+    // slot in the output queue.
+    status_t attachBufferToOutputs(ANativeWindowBuffer* anb,
+                                   const std::vector<size_t>& surface_ids);
+
+    // Get return value of onFrameAvailable to work around problem that
+    // onFrameAvailable is void. This function should be called by the producer
+    // right after calling queueBuffer().
+    status_t getOnFrameAvailableResult();
+
+    // Disconnect the buffer queue from output surfaces.
+    void disconnect();
+
+    void setHalBufferManager(bool enabled);
+
+  private:
+    // From IConsumerListener
+    //
+    // During this callback, we store some tracking information, detach the
+    // buffer from the input, and attach it to each of the outputs. This call
+    // can block if there are too many outstanding buffers. If it blocks, it
+    // will resume when onBufferReleasedByOutput releases a buffer back to the
+    // input.
+    void onFrameAvailable(const BufferItem& item) override;
+
+    // From IConsumerListener
+    //
+    // Similar to onFrameAvailable, but buffer item is indeed replacing a buffer
+    // in the buffer queue. This can happen when buffer queue is in droppable
+    // mode.
+    void onFrameReplaced(const BufferItem& item) override;
+
+    // From IConsumerListener
+    // We don't care about released buffers because we detach each buffer as
+    // soon as we acquire it. See the comment for onBufferReleased below for
+    // some clarifying notes about the name.
+    void onBuffersReleased() override {}
+
+    // From IConsumerListener
+    // We don't care about sideband streams, since we won't be splitting them
+    void onSidebandStreamChanged() override {}
+
+    // This is the implementation of the onBufferReleased callback from
+    // IProducerListener. It gets called from an OutputListener (see below), and
+    // 'from' is which producer interface from which the callback was received.
+    //
+    // During this callback, we detach the buffer from the output queue that
+    // generated the callback, update our state tracking to see if this is the
+    // last output releasing the buffer, and if so, release it to the input.
+    // If we release the buffer to the input, we allow a blocked
+    // onFrameAvailable call to proceed.
+    void onBufferReleasedByOutput(const sp<IGraphicBufferProducer>& from);
+
+    // Called by outputBufferLocked when a buffer in the async buffer queue got replaced.
+    void onBufferReplacedLocked(const sp<IGraphicBufferProducer>& from, size_t surfaceId);
+
+    // When this is called, the splitter disconnects from (i.e., abandons) its
+    // input queue and signals any waiting onFrameAvailable calls to wake up.
+    // It still processes callbacks from other outputs, but only detaches their
+    // buffers so they can continue operating until they run out of buffers to
+    // acquire. This must be called with mMutex locked.
+    void onAbandonedLocked();
+
+    // Decrement the buffer's reference count. Once the reference count becomes
+    // 0, return the buffer back to the input BufferQueue.
+    void decrementBufRefCountLocked(uint64_t id, size_t surfaceId);
+
+    // Check for and handle any output surface dequeue errors.
+    void handleOutputDequeueStatusLocked(status_t res, int slot);
+
+    // Handles released output surface buffers.
+    void returnOutputBufferLocked(const sp<Fence>& fence, const sp<IGraphicBufferProducer>& from,
+                                  size_t surfaceId, int slot);
+
+    // This is a thin wrapper class that lets us determine which BufferQueue
+    // the IProducerListener::onBufferReleased callback is associated with. We
+    // create one of these per output BufferQueue, and then pass the producer
+    // into onBufferReleasedByOutput above.
+    class OutputListener : public SurfaceListener, public IBinder::DeathRecipient {
+      public:
+        OutputListener(wp<DeprecatedCamera3StreamSplitter> splitter,
+                       wp<IGraphicBufferProducer> output);
+        virtual ~OutputListener() = default;
+
+        // From IProducerListener
+        void onBufferReleased() override;
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {}
+
+        // From IBinder::DeathRecipient
+        void binderDied(const wp<IBinder>& who) override;
+
+      private:
+        wp<DeprecatedCamera3StreamSplitter> mSplitter;
+        wp<IGraphicBufferProducer> mOutput;
+    };
+
+    class BufferTracker {
+      public:
+        BufferTracker(const sp<GraphicBuffer>& buffer,
+                      const std::vector<size_t>& requestedSurfaces);
+        ~BufferTracker() = default;
+
+        const sp<GraphicBuffer>& getBuffer() const { return mBuffer; }
+        const sp<Fence>& getMergedFence() const { return mMergedFence; }
+
+        void mergeFence(const sp<Fence>& with);
+
+        // Returns the new value
+        // Only called while mMutex is held
+        size_t decrementReferenceCountLocked(size_t surfaceId);
+
+        const std::vector<size_t> requestedSurfaces() const { return mRequestedSurfaces; }
+
+      private:
+        // Disallow copying
+        BufferTracker(const BufferTracker& other);
+        BufferTracker& operator=(const BufferTracker& other);
+
+        sp<GraphicBuffer> mBuffer;  // One instance that holds this native handle
+        sp<Fence> mMergedFence;
+
+        // Request surfaces for a particular buffer. And when the buffer becomes
+        // available from the input queue, the registered surfaces are used to decide
+        // which output is the buffer sent to.
+        std::vector<size_t> mRequestedSurfaces;
+        size_t mReferenceCount;
+    };
+
+    // Must be accessed through RefBase
+    virtual ~DeprecatedCamera3StreamSplitter();
+
+    status_t addOutputLocked(size_t surfaceId, const sp<Surface>& outputQueue);
+
+    status_t removeOutputLocked(size_t surfaceId);
+
+    // Send a buffer to particular output, and increment the reference count
+    // of the buffer. If this output is abandoned, the buffer's reference count
+    // won't be incremented.
+    status_t outputBufferLocked(const sp<IGraphicBufferProducer>& output,
+                                const BufferItem& bufferItem, size_t surfaceId);
+
+    // Get unique name for the buffer queue consumer
+    std::string getUniqueConsumerName();
+
+    // Helper function to get the BufferQueue slot where a particular buffer is attached to.
+    int getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp, const sp<GraphicBuffer>& gb);
+
+    // Sum of max consumer buffers for all outputs
+    size_t mMaxConsumerBuffers = 0;
+    size_t mMaxHalBuffers = 0;
+    uint32_t mWidth = 0;
+    uint32_t mHeight = 0;
+    android::PixelFormat mFormat = android::PIXEL_FORMAT_NONE;
+    uint64_t mProducerUsage = 0;
+    int mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+
+    // The attachBuffer call will happen on different thread according to mUseHalBufManager and have
+    // different timing constraint.
+    static const nsecs_t kNormalDequeueBufferTimeout = s2ns(1);      // 1 sec
+    static const nsecs_t kHalBufMgrDequeueBufferTimeout = ms2ns(1);  // 1 msec
+
+    Mutex mMutex;
+
+    sp<IGraphicBufferProducer> mProducer;
+    sp<IGraphicBufferConsumer> mConsumer;
+    sp<BufferItemConsumer> mBufferItemConsumer;
+    sp<Surface> mSurface;
+
+    // Map graphic buffer ids -> buffer items
+    std::unordered_map<uint64_t, BufferItem> mInputSlots;
+
+    // Map surface ids -> gbp outputs
+    std::unordered_map<int, sp<IGraphicBufferProducer>> mOutputs;
+
+    // Map surface ids -> gbp outputs
+    std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+
+    // Map surface ids -> consumer buffer count
+    std::unordered_map<int, size_t> mConsumerBufferCount;
+
+    // Map of GraphicBuffer IDs (GraphicBuffer::getId()) to buffer tracking
+    // objects (which are mostly for counting how many outputs have released the
+    // buffer, but also contain merged release fences).
+    std::unordered_map<uint64_t, std::unique_ptr<BufferTracker>> mBuffers;
+
+    struct GBPHash {
+        std::size_t operator()(const sp<IGraphicBufferProducer>& producer) const {
+            return std::hash<IGraphicBufferProducer*>{}(producer.get());
+        }
+    };
+
+    std::unordered_map<sp<IGraphicBufferProducer>, sp<OutputListener>, GBPHash> mNotifiers;
+
+    typedef std::vector<sp<GraphicBuffer>> OutputSlots;
+    std::unordered_map<sp<IGraphicBufferProducer>, std::unique_ptr<OutputSlots>, GBPHash>
+            mOutputSlots;
+
+    // A set of buffers that could potentially stay in some of the outputs after removal
+    // and therefore should be detached from the input queue.
+    std::unordered_set<uint64_t> mDetachedBuffers;
+
+    // Latest onFrameAvailable return value
+    std::atomic<status_t> mOnFrameAvailableRes{0};
+
+    // Currently acquired input buffers
+    size_t mAcquiredInputBuffers;
+
+    std::string mConsumerName;
+
+    bool mUseHalBufManager;
+};
+
+}  // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 53234f0..3858410 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -32,7 +32,13 @@
 cc_defaults {
     name: "camera_service_fuzzer_defaults",
     header_libs: [
+        "libaudioflinger_headers",
+        "libaudiohal_headers",
+        "libaudioutils_headers",
+        "libbinder_headers",
         "libmedia_headers",
+        "libmediautils_headers",
+        "mediautils_headers",
     ],
     shared_libs: [
         "framework-permission-aidl-cpp",
@@ -59,7 +65,22 @@
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
         "camera_platform_flags_c_lib",
+        "libactivitymanager_aidl",
+        "libaudioclient",
+        "libaudioflinger",
+        "libaudiohal",
+        "libaudioprocessing",
+        "libmediaplayerservice",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpermission",
+        "libpowermanager",
+        "libsensorprivacy",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
     ],
+    static_libs: ["libbinder_random_parcel"],
     fuzz_config: {
         cc: [
             "android-camera-fwk-eng@google.com",
@@ -85,6 +106,9 @@
     srcs: [
         "camera_service_fuzzer.cpp",
     ],
+    static_libs: [
+        "libfakeservicemanager",
+    ],
     defaults: [
         "camera_service_fuzzer_defaults",
     ],
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 718e1d6..12ac33f 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -21,20 +21,29 @@
 #define LOG_TAG "CameraServiceFuzzer"
 //#define LOG_NDEBUG 0
 
+#include <AudioFlinger.h>
 #include <CameraService.h>
+#include <ISchedulingPolicyService.h>
+#include <MediaPlayerService.h>
+#include <android-base/logging.h>
 #include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/ICameraServiceListener.h>
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <binder/IActivityManager.h>
+#include <binder/IAppOpsService.h>
 #include <camera/CameraUtils.h>
 #include <camera/camera2/OutputConfiguration.h>
 #include <com_android_graphics_libgui_flags.h>
 #include <device3/Camera3StreamInterface.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <fuzzbinder/random_binder.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
+#include <media/IAudioFlinger.h>
 #include <private/android_filesystem_config.h>
 #include "fuzzer/FuzzedDataProvider.h"
 
@@ -100,6 +109,196 @@
 const size_t kNumCameraMsg = size(kCameraMsg);
 const size_t kNumSoundKind = size(kSoundKind);
 const size_t kNumShellCmd = size(kShellCmd);
+static std::once_flag gSmOnce;
+sp<CameraService> gCameraService;
+
+void addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
+                FuzzedDataProvider* fdp) {
+    sp<IBinder> binder = getRandomBinder(fdp);
+    if (!binder) {
+        return;
+    }
+
+    CHECK_EQ(NO_ERROR, fakeServiceManager->addService(serviceName, binder));
+    return;
+}
+
+class FuzzerActivityManager : public BnInterface<IActivityManager> {
+  public:
+    int32_t openContentUri(const String16& /*stringUri*/) override { return 0; }
+
+    status_t registerUidObserver(const sp<IUidObserver>& /*observer*/, const int32_t /*event*/,
+                                 const int32_t /*cutpoint*/,
+                                 const String16& /*callingPackage*/) override {
+        return OK;
+    }
+
+    status_t unregisterUidObserver(const sp<IUidObserver>& /*observer*/) override { return OK; }
+
+    status_t registerUidObserverForUids(const sp<IUidObserver>& /*observer*/,
+                                        const int32_t /*event*/, const int32_t /*cutpoint*/,
+                                        const String16& /*callingPackage*/,
+                                        const int32_t* /*uids[]*/, size_t /*nUids*/,
+                                        /*out*/ sp<IBinder>& /*observerToken*/) override {
+        return OK;
+    }
+
+    status_t addUidToObserver(const sp<IBinder>& /*observerToken*/,
+                              const String16& /*callingPackage*/, int32_t /*uid*/) override {
+        return OK;
+    }
+
+    status_t removeUidFromObserver(const sp<IBinder>& /*observerToken*/,
+                                   const String16& /*callingPackage*/, int32_t /*uid*/) override {
+        return OK;
+    }
+
+    bool isUidActive(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return true;
+    }
+
+    int32_t getUidProcessState(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return ActivityManager::PROCESS_STATE_UNKNOWN;
+    }
+
+    status_t checkPermission(const String16& /*permission*/, const pid_t /*pid*/,
+                             const uid_t /*uid*/, int32_t* /*outResult*/) override {
+        return NO_ERROR;
+    }
+
+    status_t logFgsApiBegin(int32_t /*apiType*/, int32_t /*appUid*/, int32_t /*appPid*/) override {
+        return OK;
+    }
+    status_t logFgsApiEnd(int32_t /*apiType*/, int32_t /*appUid*/, int32_t /*appPid*/) override {
+        return OK;
+    }
+    status_t logFgsApiStateChanged(int32_t /*apiType*/, int32_t /*state*/, int32_t /*appUid*/,
+                                   int32_t /*appPid*/) override {
+        return OK;
+    }
+};
+
+class FuzzerSensorPrivacyManager : public BnInterface<hardware::ISensorPrivacyManager> {
+  public:
+    binder::Status supportsSensorToggle(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                        bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status addSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    binder::Status addToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status removeSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    binder::Status removeToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    binder::Status isSensorPrivacyEnabled(bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status isCombinedToggleSensorPrivacyEnabled(int32_t /*sensor*/,
+                                                        bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status isToggleSensorPrivacyEnabled(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                                bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setSensorPrivacy(bool /*enable*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacy(int32_t /*userId*/, int32_t /*source*/,
+                                          int32_t /*sensor*/, bool /*enable*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacyForProfileGroup(int32_t /*userId*/, int32_t /*source*/,
+                                                         int32_t /*sensor*/,
+                                                         bool /*enable*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status getCameraPrivacyAllowlist(
+            ::std::vector<::android::String16>* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status getToggleSensorPrivacyState(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                               int32_t* /* _aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacyState(int32_t /*userId*/, int32_t /*source*/,
+                                               int32_t /*sensor*/, int32_t /*state*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacyStateForProfileGroup(int32_t /*userId*/,
+                                                              int32_t /*source*/,
+                                                              int32_t /*sensor*/,
+                                                              int32_t /*state*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status isCameraPrivacyEnabled(const ::android::String16& /*packageName*/,
+                                          bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+};
+
+class FuzzAppOpsService : public BnAppOpsService {
+  public:
+    int32_t checkOperation(int32_t /*code*/, int32_t /*uid*/,
+                           const String16& /*packageName*/) override {
+        return 0;
+    }
+
+    int32_t noteOperation(int32_t /*code*/, int32_t /*uid*/, const String16& /*packageName*/,
+                          const std::optional<String16>& /*attributionTag*/,
+                          bool /*shouldCollectAsyncNotedOp*/, const String16& /*message*/,
+                          bool /*shouldCollectMessage*/) override {
+        return 0;
+    }
+
+    void startWatchingModeWithFlags(int32_t /*op*/, const String16& /*packageName*/,
+                                    int32_t /*flags*/,
+                                    const sp<IAppOpsCallback>& /*callback*/) override {
+        return;
+    }
+
+    int32_t startOperation(const sp<IBinder>& /*token*/, int32_t /*code*/, int32_t /*uid*/,
+                           const String16& /*packageName*/,
+                           const std::optional<String16>& /*attributionTag*/,
+                           bool /*startIfModeDefault*/, bool /*shouldCollectAsyncNotedOp*/,
+                           const String16& /*message*/, bool /*shouldCollectMessage*/) override {
+        return 0;
+    }
+
+    void finishOperation(const sp<IBinder>& /*token*/, int32_t /*code*/, int32_t /*uid*/,
+                         const String16& /*packageName*/,
+                         const std::optional<String16>& /*attributionTag*/) override {
+        return;
+    }
+
+    void startWatchingMode(int32_t /*op*/, const String16& /*packageName*/,
+                           const sp<IAppOpsCallback>& /*callback*/) override {
+        return;
+    }
+
+    void stopWatchingMode(const sp<IAppOpsCallback>& /*callback*/) override { return; }
+
+    int32_t permissionToOpCode(const String16& /*permission*/) override { return 0; }
+
+    int32_t checkAudioOperation(int32_t /*code*/, int32_t /*usage*/, int32_t /*uid*/,
+                                const String16& /*packageName*/) override {
+        return 0;
+    }
+
+    void setCameraAudioRestriction(int32_t /*mode*/) override { return; }
+
+    bool shouldCollectNotes(int32_t /*opCode*/) override { return true; }
+};
 
 class CameraFuzzer : public ::android::hardware::BnCameraClient {
    public:
@@ -687,14 +886,38 @@
     }
     setuid(AID_CAMERASERVER);
     std::shared_ptr<FuzzedDataProvider> fp = std::make_shared<FuzzedDataProvider>(data, size);
-    sp<CameraService> cs = new CameraService();
-    cs->clearCachedVariables();
-    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(cs, fp);
+
+    std::call_once(gSmOnce, [&] {
+        /* Create a FakeServiceManager instance and add required services */
+        sp<FakeServiceManager> fsm = sp<FakeServiceManager>::make();
+        setDefaultServiceManager(fsm);
+        for (const char* service :
+             {"sensor_privacy", "permission", "media.camera.proxy", "batterystats", "media.metrics",
+              "media.extractor", "drm.drmManager", "permission_checker"}) {
+            addService(String16(service), fsm, fp.get());
+        }
+        const auto audioFlinger = sp<AudioFlinger>::make();
+        const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+        CHECK_EQ(NO_ERROR,
+                 fsm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                 IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+        sp<FuzzerActivityManager> am = new FuzzerActivityManager();
+        CHECK_EQ(NO_ERROR, fsm->addService(String16("activity"), IInterface::asBinder(am)));
+        sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
+        CHECK_EQ(NO_ERROR, fsm->addService(String16("sensor_privacy"),
+                                           IInterface::asBinder(sensorPrivacyManager)));
+        sp<FuzzAppOpsService> appops = new FuzzAppOpsService();
+        CHECK_EQ(NO_ERROR, fsm->addService(String16("appops"), IInterface::asBinder(appops)));
+        MediaPlayerService::instantiate();
+        gCameraService = new CameraService();
+    });
+    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(gCameraService, fp);
     if (!camerafuzzer) {
         return 0;
     }
     camerafuzzer->process();
-    Camera2Fuzzer camera2fuzzer(cs, fp);
+    Camera2Fuzzer camera2fuzzer(gCameraService, fp);
     camera2fuzzer.process();
     return 0;
 }
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4c56959
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..fc0e371
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..1266b3e
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..cb1c0e4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..ab820a4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..6051e9a
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..596e55b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..20d7dcb
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..5bbfa56
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..cd148f6
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..e4ddb50
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..3be3ce1
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..3b51e41
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..3b929df
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..f92337b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..0fe0f06
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
index 3d49ae5..5e32482 100644
--- a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
+++ b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
@@ -17,10 +17,9 @@
 #define LOG_TAG "Camera3StreamSplitterTest"
 // #define LOG_NDEBUG 0
 
-#include "../device3/Camera3StreamSplitter.h"
-
 #include <android/hardware_buffer.h>
 #include <com_android_graphics_libgui_flags.h>
+#include <com_android_internal_camera_flags.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -35,6 +34,14 @@
 
 #include <gtest/gtest.h>
 
+#include "../device3/Flags.h"
+
+#if USE_NEW_STREAM_SPLITTER
+#include "../device3/Camera3StreamSplitter.h"
+#else
+#include "../device3/deprecated/DeprecatedCamera3StreamSplitter.h"
+#endif  // USE_NEW_STREAM_SPLITTER
+
 using namespace android;
 
 namespace {
@@ -62,10 +69,20 @@
 
 class Camera3StreamSplitterTest : public testing::Test {
   public:
-    void SetUp() override { mSplitter = sp<Camera3StreamSplitter>::make(); }
+    void SetUp() override {
+#if USE_NEW_STREAM_SPLITTER
+        mSplitter = sp<Camera3StreamSplitter>::make();
+#else
+        mSplitter = sp<DeprecatedCamera3StreamSplitter>::make();
+#endif  // USE_NEW_STREAM_SPLITTER
+    }
 
   protected:
+#if USE_NEW_STREAM_SPLITTER
     sp<Camera3StreamSplitter> mSplitter;
+#else
+    sp<DeprecatedCamera3StreamSplitter> mSplitter;
+#endif  // USE_NEW_STREAM_SPLITTER
 };
 
 class TestSurfaceListener : public SurfaceListener {
@@ -102,7 +119,7 @@
 
 }  // namespace
 
-TEST_F(Camera3StreamSplitterTest, TestWithoutSurfaces_NoBuffersConsumed) {
+TEST_F(Camera3StreamSplitterTest, WithoutSurfaces_NoBuffersConsumed) {
     sp<Surface> consumer;
     EXPECT_EQ(OK, mSplitter->connect({}, kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
                                      kHeight, kFormat, &consumer, kDynamicRangeProfile));
diff --git a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
index 673c149..75cf21d 100644
--- a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
+++ b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
@@ -78,30 +78,30 @@
 }
 
 TEST(DepthProcessorTest, BadInput) {
+    static const size_t badInputBufferWidth = 17;
+    static const size_t badInputBufferHeight = 3;
+    static const size_t badInputJpegSize = 63;
+    static const size_t badInputBufferDepthSize = (badInputBufferWidth * badInputBufferHeight);
     int jpegQuality = 95;
 
     DepthPhotoInputFrame inputFrame;
+    std::vector<uint8_t> colorJpegBuffer(badInputJpegSize);
+    inputFrame.mMainJpegSize = colorJpegBuffer.size();
     // Worst case both depth and confidence maps have the same size as the main color image.
     inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
 
-    std::vector<uint8_t> colorJpegBuffer;
-    generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
-            /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
-
-    std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
-    generateDepth16Buffer(&depth16Buffer);
+    std::array<uint16_t, badInputBufferDepthSize> depth16Buffer;
 
     std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
     size_t actualDepthPhotoSize = 0;
 
-    inputFrame.mMainJpegWidth = kTestBufferWidth;
-    inputFrame.mMainJpegHeight = kTestBufferHeight;
+    inputFrame.mMainJpegWidth = badInputBufferWidth;
+    inputFrame.mMainJpegHeight = badInputBufferHeight;
     inputFrame.mJpegQuality = jpegQuality;
     ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
 
     inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
-    inputFrame.mMainJpegSize = colorJpegBuffer.size();
     ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
 
@@ -113,6 +113,9 @@
 
     ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 nullptr), 0);
+
+    ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+                &actualDepthPhotoSize), 0);
 }
 
 TEST(DepthProcessorTest, BasicDepthPhotoValidation) {
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 4aff60f..26015d1 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -89,13 +89,16 @@
     return;
   }
 
-  const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+  const int32_t rawFence = fence != nullptr ? dup(fence->get()) : -1;
   mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
       hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
   if (mLockStatus != OK) {
     ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
           statusToString(mLockStatus).c_str());
   }
+  if (rawFence >= 0) {
+    close(rawFence);
+  }
 }
 
 PlanesLockGuard::~PlanesLockGuard() {
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..802c2b5
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..9ee6a15
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..95006c8
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..853be96
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..c3e9848
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..08b7f0d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..20e5e80
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..4e54f0b
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..2b2495d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..753594d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..0ed2010
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..f6141d1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..b93f618
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f8f296d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..29bdbc1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..315f25e
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9c2fb7c..f12a5d6 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -310,6 +310,7 @@
     mServiceLog->add(log);
 
     std::scoped_lock lock{mLock};
+    ClientInfoParcel updatedClientInfo = clientInfo;
     if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -317,6 +318,8 @@
                 __FUNCTION__, pid, uid, callingPid, callingUid);
         pid = callingPid;
         uid = callingUid;
+        updatedClientInfo.pid = callingPid;
+        updatedClientInfo.uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
     ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
@@ -342,7 +345,7 @@
     }
     if (info.deathNotifier == nullptr && client != nullptr) {
         info.deathNotifier = DeathNotifier::Create(
-            client, ref<ResourceManagerService>(), clientInfo);
+            client, ref<ResourceManagerService>(), updatedClientInfo);
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);